pax_global_header00006660000000000000000000000064145474472320014526gustar00rootroot0000000000000052 comment=b244efcc80c9e17e515bdbd66cc0d51ae18aa5ca sentry-python-1.39.2/000077500000000000000000000000001454744723200144655ustar00rootroot00000000000000sentry-python-1.39.2/.craft.yml000066400000000000000000000017131454744723200163670ustar00rootroot00000000000000minVersion: 0.34.1 targets: - name: pypi includeNames: /^sentry[_\-]sdk.*$/ - name: gh-pages - name: registry sdks: pypi:sentry-sdk: - name: github - name: aws-lambda-layer includeNames: /^sentry-python-serverless-\d+(\.\d+)*\.zip$/ layerName: SentryPythonServerlessSDK compatibleRuntimes: - name: python versions: # The number of versions must be, at most, the maximum number of # runtimes AWS Lambda permits for a layer (currently 15). # On the other hand, AWS Lambda does not support every Python runtime. # The supported runtimes are available in the following link: # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html - python3.7 - python3.8 - python3.9 - python3.10 - python3.11 license: MIT - name: sentry-pypi internalPypiRepo: getsentry/pypi changelog: CHANGELOG.md changelogPolicy: auto sentry-python-1.39.2/.flake8000066400000000000000000000013141454744723200156370ustar00rootroot00000000000000[flake8] extend-ignore = # Handled by black (Whitespace before ':' -- handled by black) E203, # Handled by black (Line too long) E501, # Sometimes not possible due to execution order (Module level import is not at top of file) E402, # I don't care (Do not assign a lambda expression, use a def) E731, # does not apply to Python 2 (redundant exception types by flake8-bugbear) B014, # I don't care (Lowercase imported as non-lowercase by pep8-naming) N812, # is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls) N804, extend-exclude=checkouts,lol* exclude = # gRCP generated files grpc_test_service_pb2.py grpc_test_service_pb2_grpc.py sentry-python-1.39.2/.github/000077500000000000000000000000001454744723200160255ustar00rootroot00000000000000sentry-python-1.39.2/.github/ISSUE_TEMPLATE/000077500000000000000000000000001454744723200202105ustar00rootroot00000000000000sentry-python-1.39.2/.github/ISSUE_TEMPLATE/bug.yml000066400000000000000000000023151454744723200215110ustar00rootroot00000000000000name: 🐞 Bug Report description: Tell us about something that's not working the way we (probably) intend. body: - type: dropdown id: type attributes: label: How do you use Sentry? options: - Sentry Saas (sentry.io) - Self-hosted/on-premise validations: required: true - type: input id: version attributes: label: Version description: Which SDK version? placeholder: ex. 1.5.2 validations: required: true - type: textarea id: repro attributes: label: Steps to Reproduce description: How can we see what you're seeing? Specific is terrific. placeholder: |- 1. What 2. you 3. did. Extra points for also including the output of `pip freeze --all`. validations: required: true - type: textarea id: expected attributes: label: Expected Result validations: required: true - type: textarea id: actual attributes: label: Actual Result description: Logs? Screenshots? Yes, please. validations: required: true - type: markdown attributes: value: |- ## Thanks 🙏 validations: required: false sentry-python-1.39.2/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000002501454744723200221750ustar00rootroot00000000000000blank_issues_enabled: false contact_links: - name: Support Request url: https://sentry.io/support about: Use our dedicated support channel for paid accounts. sentry-python-1.39.2/.github/ISSUE_TEMPLATE/feature.yml000066400000000000000000000017421454744723200223720ustar00rootroot00000000000000name: 💡 Feature Request description: Create a feature request for sentry-python SDK. labels: 'enhancement' body: - type: markdown attributes: value: Thanks for taking the time to file a feature request! Please fill out this form as completely as possible. - type: textarea id: problem attributes: label: Problem Statement description: A clear and concise description of what you want and what your use case is. placeholder: |- I want to make whirled peas, but Sentry doesn't blend. validations: required: true - type: textarea id: expected attributes: label: Solution Brainstorm description: We know you have bright ideas to share ... share away, friend. placeholder: |- Add a blender to Sentry. validations: required: true - type: markdown attributes: value: |- ## Thanks 🙏 Check our [triage docs](https://open.sentry.io/triage/) for what to expect next. sentry-python-1.39.2/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000015161454744723200216310ustar00rootroot00000000000000 --- ## General Notes Thank you for contributing to `sentry-python`! Please add tests to validate your changes, and lint your code using `tox -e linters`. Running the test suite on your PR might require maintainer approval. Some tests (AWS Lambda) additionally require a maintainer to add a special label to run and will fail if the label is not present. #### For maintainers Sensitive test suites require maintainer review to ensure that tests do not compromise our secrets. This review must be repeated after any code revisions. Before running sensitive test suites, please carefully check the PR. Then, apply the `Trigger: tests using secrets` label. The label will be removed after any code changes to enforce our policy requiring maintainers to review all code revisions before running sensitive tests. sentry-python-1.39.2/.github/dependabot.yml000066400000000000000000000022471454744723200206620ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 allow: - dependency-type: direct - dependency-type: indirect ignore: - dependency-name: pytest versions: - "> 3.7.3" - dependency-name: flake8 # Later versions dropped Python 2 support versions: - "> 5.0.4" - dependency-name: jsonschema # Later versions dropped Python 2 support versions: - "> 3.2.0" - dependency-name: pytest-cov versions: - "> 2.8.1" - dependency-name: pytest-forked versions: - "> 1.1.3" - dependency-name: sphinx versions: - ">= 2.4.a, < 2.5" - dependency-name: tox versions: - "> 3.7.0" - dependency-name: werkzeug versions: - "> 0.15.5, < 1" - dependency-name: werkzeug versions: - ">= 1.0.a, < 1.1" - dependency-name: mypy versions: - "0.800" - dependency-name: sphinx versions: - 3.4.3 - package-ecosystem: gitsubmodule directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 - package-ecosystem: "github-actions" directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 sentry-python-1.39.2/.github/workflows/000077500000000000000000000000001454744723200200625ustar00rootroot00000000000000sentry-python-1.39.2/.github/workflows/ci.yml000066400000000000000000000042361454744723200212050ustar00rootroot00000000000000name: CI on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: lint: name: Lint Sources runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.12 - run: | pip install tox tox -e linters check-ci-config: name: Check CI config runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.12 - run: | pip install jinja2 python scripts/split-tox-gh-actions/split-tox-gh-actions.py --fail-on-changes build_lambda_layer: name: Build Package runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.12 - name: Setup build cache uses: actions/cache@v3 id: build_cache with: path: ${{ env.CACHED_BUILD_PATHS }} key: ${{ env.BUILD_CACHE_KEY }} - name: Build Packages run: | echo "Creating directory containing Python SDK Lambda Layer" pip install virtualenv # This will also trigger "make dist" that creates the Python packages make aws-lambda-layer - name: Upload Python Packages uses: actions/upload-artifact@v3 with: name: ${{ github.sha }} path: | dist/* docs: name: Build SDK API Doc runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.12 - run: | pip install virtualenv make apidocs cd docs/_build && zip -r gh-pages ./ - uses: actions/upload-artifact@v3.1.1 with: name: ${{ github.sha }} path: docs/_build/gh-pages.zip sentry-python-1.39.2/.github/workflows/codeql-analysis.yml000066400000000000000000000052751454744723200237060ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: - master - sentry-sdk-2.0 pull_request: # The branches below must be a subset of the branches above branches: - master - sentry-sdk-2.0 schedule: - cron: '18 18 * * 3' permissions: contents: read jobs: analyze: permissions: actions: read # for github/codeql-action/init to get workflow details contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/autobuild to send a status report name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed steps: - name: Checkout repository uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v2 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 sentry-python-1.39.2/.github/workflows/enforce-license-compliance.yml000066400000000000000000000007131454744723200257570ustar00rootroot00000000000000name: Enforce License Compliance on: push: branches: - master - main - release/* - sentry-sdk-2.0 pull_request: branches: - master - main - sentry-sdk-2.0 jobs: enforce-license-compliance: runs-on: ubuntu-latest steps: - name: 'Enforce License Compliance' uses: getsentry/action-enforce-license-compliance@main with: fossa_api_key: ${{ secrets.FOSSA_API_KEY }} sentry-python-1.39.2/.github/workflows/release.yml000066400000000000000000000013271454744723200222300ustar00rootroot00000000000000name: Release on: workflow_dispatch: inputs: version: description: Version to release required: true force: description: Force a release even when there are release-blockers (optional) required: false jobs: release: runs-on: ubuntu-latest name: "Release a new version" steps: - uses: actions/checkout@v4 with: token: ${{ secrets.GH_RELEASE_PAT }} fetch-depth: 0 - name: Prepare release uses: getsentry/action-prepare-release@v1 env: GITHUB_TOKEN: ${{ secrets.GH_RELEASE_PAT }} with: version: ${{ github.event.inputs.version }} force: ${{ github.event.inputs.force }} sentry-python-1.39.2/.github/workflows/scripts/000077500000000000000000000000001454744723200215515ustar00rootroot00000000000000sentry-python-1.39.2/.github/workflows/scripts/trigger_tests_on_label.py000066400000000000000000000046061454744723200266510ustar00rootroot00000000000000#!/usr/bin/env python3 import argparse import json import os from urllib.parse import quote from urllib.request import Request, urlopen LABEL = "Trigger: tests using secrets" def _has_write(repo_id: int, username: str, *, token: str) -> bool: req = Request( f"https://api.github.com/repositories/{repo_id}/collaborators/{username}/permission", headers={"Authorization": f"token {token}"}, ) contents = json.load(urlopen(req, timeout=10)) return contents["permission"] in {"admin", "write"} def _remove_label(repo_id: int, pr: int, label: str, *, token: str) -> None: quoted_label = quote(label) req = Request( f"https://api.github.com/repositories/{repo_id}/issues/{pr}/labels/{quoted_label}", method="DELETE", headers={"Authorization": f"token {token}"}, ) urlopen(req) def main() -> int: parser = argparse.ArgumentParser() parser.add_argument("--repo-id", type=int, required=True) parser.add_argument("--pr", type=int, required=True) parser.add_argument("--event", required=True) parser.add_argument("--username", required=True) parser.add_argument("--label-names", type=json.loads, required=True) args = parser.parse_args() token = os.environ["GITHUB_TOKEN"] write_permission = _has_write(args.repo_id, args.username, token=token) if ( not write_permission # `reopened` is included here due to close => push => reopen and args.event in {"synchronize", "reopened"} and LABEL in args.label_names ): print(f"Invalidating label [{LABEL}] due to code change...") _remove_label(args.repo_id, args.pr, LABEL, token=token) args.label_names.remove(LABEL) if write_permission or LABEL in args.label_names: print("Permissions passed!") print(f"- has write permission: {write_permission}") print(f"- has [{LABEL}] label: {LABEL in args.label_names}") return 0 else: print("Permissions failed!") print(f"- has write permission: {write_permission}") print(f"- has [{LABEL}] label: {LABEL in args.label_names}") print(f"- args.label_names: {args.label_names}") print( f"Please have a collaborator add the [{LABEL}] label once they " f"have reviewed the code to trigger tests." ) return 1 if __name__ == "__main__": raise SystemExit(main()) sentry-python-1.39.2/.github/workflows/test-integrations-aws-lambda.yml000066400000000000000000000076461454744723200263130ustar00rootroot00000000000000name: Test AWS Lambda on: push: branches: - master - release/** - sentry-sdk-2.0 # XXX: We are using `pull_request_target` instead of `pull_request` because we want # this to run on forks with access to the secrets necessary to run the test suite. # Prefer to use `pull_request` when possible. pull_request_target: types: [labeled, opened, reopened, synchronize] # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read # `write` is needed to remove the `Trigger: tests using secrets` label pull-requests: write env: SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID: ${{ secrets.SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID }} SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY }} BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: check-permissions: name: permissions check runs-on: ubuntu-20.04 steps: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 with: persist-credentials: false - name: Check permissions on PR if: github.event_name == 'pull_request_target' run: | python3 -uS .github/workflows/scripts/trigger_tests_on_label.py \ --repo-id ${{ github.event.repository.id }} \ --pr ${{ github.event.number }} \ --event ${{ github.event.action }} \ --username "$ARG_USERNAME" \ --label-names "$ARG_LABEL_NAMES" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # these can contain special characters ARG_USERNAME: ${{ github.event.pull_request.user.login }} ARG_LABEL_NAMES: ${{ toJSON(github.event.pull_request.labels.*.name) }} - name: Check permissions on repo branch if: github.event_name == 'push' run: true test-aws_lambda-pinned: name: AWS Lambda (pinned) timeout-minutes: 30 needs: check-permissions runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.9"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.ref }} - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test aws_lambda pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aws_lambda" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All AWS Lambda tests passed needs: test-aws_lambda-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-aws_lambda-pinned.result, 'failure') || contains(needs.test-aws_lambda-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-cloud-computing.yml000066400000000000000000000154361454744723200274100ustar00rootroot00000000000000name: Test Cloud Computing on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-cloud_computing-latest: name: Cloud Computing (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test boto3 latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-boto3-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test chalice latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-chalice-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test cloud_resource_context latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-cloud_resource_context-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gcp latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gcp-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-cloud_computing-pinned: name: Cloud Computing (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test boto3 pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test chalice pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test cloud_resource_context pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-cloud_resource_context" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gcp pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-cloud_computing-py27: name: Cloud Computing (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test boto3 py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test chalice py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test cloud_resource_context py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-cloud_resource_context" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gcp py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Cloud Computing tests passed needs: [test-cloud_computing-pinned, test-cloud_computing-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-cloud_computing-pinned.result, 'failure') || contains(needs.test-cloud_computing-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-cloud_computing-py27.result, 'failure') || contains(needs.test-cloud_computing-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-common.yml000066400000000000000000000064621454744723200255660ustar00rootroot00000000000000name: Test Common on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-common-pinned: name: Common (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test common pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-common-py27: name: Common (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test common py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Common tests passed needs: [test-common-pinned, test-common-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-common-pinned.result, 'failure') || contains(needs.test-common-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-common-py27.result, 'failure') || contains(needs.test-common-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-data-processing.yml000066400000000000000000000165321454744723200273600ustar00rootroot00000000000000name: Test Data Processing on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-data_processing-latest: name: Data Processing (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test arq latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-arq-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test beam latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-beam-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test celery latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-celery-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test huey latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-huey-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rq latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-rq-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-data_processing-pinned: name: Data Processing (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test arq pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test beam pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test celery pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test huey pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rq pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-data_processing-py27: name: Data Processing (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test arq py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test beam py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test celery py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test huey py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rq py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Data Processing tests passed needs: [test-data_processing-pinned, test-data_processing-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-data_processing-pinned.result, 'failure') || contains(needs.test-data_processing-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-data_processing-py27.result, 'failure') || contains(needs.test-data_processing-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-databases.yml000066400000000000000000000231251454744723200262200ustar00rootroot00000000000000name: Test Databases on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-databases-latest: name: Databases (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test asyncpg latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-asyncpg-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test clickhouse_driver latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-clickhouse_driver-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pymongo latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pymongo-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sqlalchemy latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-sqlalchemy-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-databases-pinned: name: Databases (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test asyncpg pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asyncpg" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test clickhouse_driver pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-clickhouse_driver" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pymongo pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sqlalchemy pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-databases-py27: name: Databases (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres steps: - uses: actions/checkout@v4 - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test asyncpg py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-asyncpg" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test clickhouse_driver py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-clickhouse_driver" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pymongo py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sqlalchemy py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Databases tests passed needs: [test-databases-pinned, test-databases-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-databases-pinned.result, 'failure') || contains(needs.test-databases-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-databases-py27.result, 'failure') || contains(needs.test-databases-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-graphql.yml000066400000000000000000000116561454744723200257350ustar00rootroot00000000000000name: Test GraphQL on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-graphql-latest: name: GraphQL (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test ariadne latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-ariadne-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gql latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gql-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test graphene latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-graphene-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test strawberry latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-strawberry-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-graphql-pinned: name: GraphQL (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.11"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test ariadne pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-ariadne" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gql pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gql" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test graphene pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-graphene" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test strawberry pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-strawberry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All GraphQL tests passed needs: test-graphql-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-graphql-pinned.result, 'failure') || contains(needs.test-graphql-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-miscellaneous.yml000066400000000000000000000120311454744723200271260ustar00rootroot00000000000000name: Test Miscellaneous on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-miscellaneous-latest: name: Miscellaneous (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test loguru latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-loguru-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test opentelemetry latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-opentelemetry-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pure_eval latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pure_eval-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test trytond latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-trytond-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-miscellaneous-pinned: name: Miscellaneous (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test loguru pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-loguru" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test opentelemetry pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-opentelemetry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pure_eval pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pure_eval" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test trytond pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-trytond" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Miscellaneous tests passed needs: test-miscellaneous-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-miscellaneous-pinned.result, 'failure') || contains(needs.test-miscellaneous-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-networking.yml000066400000000000000000000152201454744723200264550ustar00rootroot00000000000000name: Test Networking on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-networking-latest: name: Networking (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.8","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test gevent latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gevent-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test grpc latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-grpc-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test httpx latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-httpx-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test requests latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-requests-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-networking-pinned: name: Networking (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test gevent pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test grpc pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-grpc" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test httpx pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test requests pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-networking-py27: name: Networking (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test gevent py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test grpc py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-grpc" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test httpx py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test requests py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Networking tests passed needs: [test-networking-pinned, test-networking-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-networking-pinned.result, 'failure') || contains(needs.test-networking-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-networking-py27.result, 'failure') || contains(needs.test-networking-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-web-frameworks-1.yml000066400000000000000000000227211454744723200273630ustar00rootroot00000000000000name: Test Web Frameworks 1 on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-web_frameworks_1-latest: name: Web Frameworks 1 (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.8","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test django latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-django-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test fastapi latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-fastapi-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test flask latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-flask-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlette latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-starlette-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-web_frameworks_1-pinned: name: Web Frameworks 1 (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test django pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test fastapi pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test flask pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlette pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-web_frameworks_1-py27: name: Web Frameworks 1 (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test django py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test fastapi py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test flask py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlette py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Web Frameworks 1 tests passed needs: [test-web_frameworks_1-pinned, test-web_frameworks_1-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-web_frameworks_1-pinned.result, 'failure') || contains(needs.test-web_frameworks_1-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-web_frameworks_1-py27.result, 'failure') || contains(needs.test-web_frameworks_1-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-web-frameworks-2.yml000066400000000000000000000270051454744723200273640ustar00rootroot00000000000000name: Test Web Frameworks 2 on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-web_frameworks_2-latest: name: Web Frameworks 2 (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test aiohttp latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-aiohttp-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test asgi latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-asgi-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test bottle latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-bottle-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test falcon latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-falcon-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pyramid latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pyramid-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test quart latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-quart-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test redis latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-redis-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rediscluster latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-rediscluster-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sanic latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-sanic-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlite latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-starlite-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test tornado latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-tornado-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-web_frameworks_2-pinned: name: Web Frameworks 2 (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test aiohttp pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test asgi pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test bottle pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test falcon pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pyramid pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test quart pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test redis pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rediscluster pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sanic pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlite pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test tornado pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-web_frameworks_2-py27: name: Web Frameworks 2 (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test aiohttp py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test asgi py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test bottle py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test falcon py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pyramid py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test quart py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test redis py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rediscluster py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sanic py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlite py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test tornado py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Web Frameworks 2 tests passed needs: [test-web_frameworks_2-pinned, test-web_frameworks_2-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-web_frameworks_2-pinned.result, 'failure') || contains(needs.test-web_frameworks_2-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-web_frameworks_2-py27.result, 'failure') || contains(needs.test-web_frameworks_2-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.gitignore000066400000000000000000000004151454744723200164550ustar00rootroot00000000000000*.pyc *.log *.egg *.db *.pid .python-version .coverage* .DS_Store .tox pip-log.txt *.egg-info /build /dist /dist-serverless sentry-python-serverless*.zip .cache .idea .eggs venv .venv .vscode/tags .pytest_cache .hypothesis relay pip-wheel-metadata .mypy_cache .vscode/ sentry-python-1.39.2/.gitmodules000066400000000000000000000001741454744723200166440ustar00rootroot00000000000000[submodule "checkouts/data-schemas"] path = checkouts/data-schemas url = https://github.com/getsentry/sentry-data-schemas sentry-python-1.39.2/.pre-commit-config.yaml000066400000000000000000000011741454744723200207510ustar00rootroot00000000000000# See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/psf/black rev: 22.6.0 hooks: - id: black exclude: ^(.*_pb2.py|.*_pb2_grpc.py) - repo: https://github.com/pycqa/flake8 rev: 5.0.4 hooks: - id: flake8 # Disabled for now, because it lists a lot of problems. #- repo: https://github.com/pre-commit/mirrors-mypy # rev: 'v0.931' # hooks: # - id: mypy sentry-python-1.39.2/.tool-versions000066400000000000000000000000161454744723200173060ustar00rootroot00000000000000python 3.7.12 sentry-python-1.39.2/CHANGELOG.md000066400000000000000000002573201454744723200163070ustar00rootroot00000000000000# Changelog ## 1.39.2 ### Various fixes & improvements - Fix timestamp in transaction created by OTel (#2627) by @antonpirker - Fix relative path in DB query source (#2624) by @antonpirker - Run more CI checks on 2.0 branch (#2625) by @sentrivana - Fix tracing `TypeError` for static and class methods (#2559) by @szokeasaurusrex - Fix missing `ctx` in Arq integration (#2600) by @ivanovart - Change `data_category` from `check_in` to `monitor` (#2598) by @sentrivana ## 1.39.1 ### Various fixes & improvements - Fix psycopg2 detection in the Django integration (#2593) by @sentrivana - Filter out empty string releases (#2591) by @sentrivana - Fixed local var not present when there is an error in a user's `error_sampler` function (#2511) by @antonpirker - Fixed typing in `aiohttp` (#2590) by @antonpirker ## 1.39.0 ### Various fixes & improvements - Add support for cluster clients from Redis SDK (#2394) by @md384 - Improve location reporting for timer metrics (#2552) by @mitsuhiko - Fix Celery `TypeError` with no-argument `apply_async` (#2575) by @szokeasaurusrex - Fix Lambda integration with EventBridge source (#2546) by @davidcroda - Add max tries to Spotlight (#2571) by @hazAT - Handle `os.path.devnull` access issues (#2579) by @sentrivana - Change `code.filepath` frame picking logic (#2568) by @sentrivana - Trigger AWS Lambda tests on label (#2538) by @sentrivana - Run permissions step on pull_request_target but not push (#2548) by @sentrivana - Hash AWS Lambda test functions based on current revision (#2557) by @sentrivana - Update Django version in tests (#2562) by @sentrivana - Make metrics tests non-flaky (#2572) by @antonpirker ## 1.38.0 ### Various fixes & improvements - Only add trace context to checkins and do not run `event_processors` for checkins (#2536) by @antonpirker - Metric span summaries (#2522) by @mitsuhiko - Add source context to code locations (#2539) by @jan-auer - Use in-app filepath instead of absolute path (#2541) by @antonpirker - Switch to `jinja2` for generating CI yamls (#2534) by @sentrivana ## 1.37.1 ### Various fixes & improvements - Fix `NameError` on `parse_version` with eventlet (#2532) by @sentrivana - build(deps): bump checkouts/data-schemas from `68def1e` to `e9f7d58` (#2501) by @dependabot ## 1.37.0 ### Various fixes & improvements - Move installed modules code to utils (#2429) by @sentrivana Note: We moved the internal function `_get_installed_modules` from `sentry_sdk.integrations.modules` to `sentry_sdk.utils`. So if you use this function you have to update your imports - Add code locations for metrics (#2526) by @jan-auer - Add query source to DB spans (#2521) by @antonpirker - Send events to Spotlight sidecar (#2524) by @HazAT - Run integration tests with newest `pytest` (#2518) by @sentrivana - Bring tests up to date (#2512) by @sentrivana - Fix: Prevent global var from being discarded at shutdown (#2530) by @antonpirker - Fix: Scope transaction source not being updated in scope.span setter (#2519) by @sl0thentr0py ## 1.36.0 ### Various fixes & improvements - Django: Support Django 5.0 (#2490) by @sentrivana - Django: Handling ASGI body in the right way. (#2513) by @antonpirker - Flask: Test with Flask 3.0 (#2506) by @sentrivana - Celery: Do not create a span when task is triggered by Celery Beat (#2510) by @antonpirker - Redis: Ensure `RedisIntegration` is disabled, unless `redis` is installed (#2504) by @szokeasaurusrex - Quart: Fix Quart integration for Quart 0.19.4 (#2516) by @antonpirker - gRPC: Make async gRPC less noisy (#2507) by @jyggen ## 1.35.0 ### Various fixes & improvements - **Updated gRPC integration:** Asyncio interceptors and easier setup (#2369) by @fdellekart Our gRPC integration now instruments incoming unary-unary grpc requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels. Everything works now for sync and async code. Before this release you had to add Sentry interceptors by hand to your gRPC code, now the only thing you need to do is adding the `GRPCIntegration` to you `sentry_sdk_init()` call. (See [documentation](https://docs.sentry.io/platforms/python/integrations/grpc/) for more information): ```python import sentry_sdk from sentry_sdk.integrations.grpc import GRPCIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", enable_tracing=True, integrations=[ GRPCIntegration(), ], ) ``` The old way still works, but we strongly encourage you to update your code to the way described above. - Python 3.12: Replace deprecated datetime functions (#2502) by @sentrivana - Metrics: Unify datetime format (#2409) by @mitsuhiko - Celery: Set correct data in `check_in`s (#2500) by @antonpirker - Celery: Read timezone for Crons monitors from `celery_schedule` if existing (#2497) by @antonpirker - Django: Removing redundant code in Django tests (#2491) by @vagi8 - Django: Make reading the request body work in Django ASGI apps. (#2495) by @antonpirker - FastAPI: Use wraps on fastapi request call wrapper (#2476) by @nkaras - Fix: Probe for psycopg2 and psycopg3 parameters function. (#2492) by @antonpirker - Fix: Remove unnecessary TYPE_CHECKING alias (#2467) by @rafrafek ## 1.34.0 ### Various fixes & improvements - Added Python 3.12 support (#2471, #2483) - Handle missing `connection_kwargs` in `patch_redis_client` (#2482) by @szokeasaurusrex - Run common test suite on Python 3.12 (#2479) by @sentrivana ## 1.33.1 ### Various fixes & improvements - Make parse_version work in utils.py itself. (#2474) by @antonpirker ## 1.33.0 ### Various fixes & improvements - New: Added `error_sampler` option (#2456) by @szokeasaurusrex - Python 3.12: Detect interpreter in shutdown state on thread spawn (#2468) by @mitsuhiko - Patch eventlet under Sentry SDK (#2464) by @szokeasaurusrex - Mitigate CPU spikes when sending lots of events with lots of data (#2449) by @antonpirker - Make `debug` option also configurable via environment (#2450) by @antonpirker - Make sure `get_dsn_parameters` is an actual function (#2441) by @sentrivana - Bump pytest-localserver, add compat comment (#2448) by @sentrivana - AWS Lambda: Update compatible runtimes for AWS Lambda layer (#2453) by @antonpirker - AWS Lambda: Load AWS Lambda secrets in Github CI (#2153) by @antonpirker - Redis: Connection attributes in `redis` database spans (#2398) by @antonpirker - Falcon: Falcon integration checks response status before reporting error (#2465) by @szokeasaurusrex - Quart: Support Quart 0.19 onwards (#2403) by @pgjones - Sanic: Sanic integration initial version (#2419) by @szokeasaurusrex - Django: Fix parsing of Django `path` patterns (#2452) by @sentrivana - Django: Add Django 4.2 to test suite (#2462) by @sentrivana - Polish changelog (#2434) by @sentrivana - Update CONTRIBUTING.md (#2443) by @krishvsoni - Update README.md (#2435) by @sentrivana ## 1.32.0 ### Various fixes & improvements - **New:** Error monitoring for some of the most popular Python GraphQL libraries: - Add [GQL GraphQL integration](https://docs.sentry.io/platforms/python/integrations/gql/) (#2368) by @szokeasaurusrex Usage: ```python import sentry_sdk from sentry_sdk.integrations.gql import GQLIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ GQLIntegration(), ], ) ``` - Add [Graphene GraphQL error integration](https://docs.sentry.io/platforms/python/integrations/graphene/) (#2389) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.graphene import GrapheneIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ GrapheneIntegration(), ], ) ``` - Add [Strawberry GraphQL error & tracing integration](https://docs.sentry.io/platforms/python/integrations/strawberry/) (#2393) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.strawberry import StrawberryIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ # make sure to set async_execution to False if you're executing # GraphQL queries synchronously StrawberryIntegration(async_execution=True), ], traces_sample_rate=1.0, ) ``` - Add [Ariadne GraphQL error integration](https://docs.sentry.io/platforms/python/integrations/ariadne/) (#2387) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.ariadne import AriadneIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ AriadneIntegration(), ], ) ``` - Capture multiple named groups again (#2432) by @sentrivana - Don't fail when upstream scheme is unusual (#2371) by @vanschelven - Support new RQ version (#2405) by @antonpirker - Remove `utcnow`, `utcfromtimestamp` deprecated in Python 3.12 (#2415) by @rmad17 - Add `trace` to `__all__` in top-level `__init__.py` (#2401) by @lobsterkatie - Move minimetrics code to the SDK (#2385) by @mitsuhiko - Add configurable compression levels (#2382) by @mitsuhiko - Shift flushing by up to a rollup window (#2396) by @mitsuhiko - Make a consistent noop flush behavior (#2428) by @mitsuhiko - Stronger recursion protection (#2426) by @mitsuhiko - Remove `OpenTelemetryIntegration` from `__init__.py` (#2379) by @sentrivana - Update API docs (#2397) by @antonpirker - Pin some test requirements because new majors break our tests (#2404) by @antonpirker - Run more `requests`, `celery`, `falcon` tests (#2414) by @sentrivana - Move `importorskip`s in tests to `__init__.py` files (#2412) by @sentrivana - Fix `mypy` errors (#2433) by @sentrivana - Fix pre-commit issues (#2424) by @bukzor-sentryio - Update [CONTRIBUTING.md](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md) (#2411) by @sentrivana - Bump `sphinx` from 7.2.5 to 7.2.6 (#2378) by @dependabot - [Experimental] Add explain plan to DB spans (#2315) by @antonpirker ## 1.31.0 ### Various fixes & improvements - **New:** Add integration for `clickhouse-driver` (#2167) by @mimre25 For more information, see the documentation for [clickhouse-driver](https://docs.sentry.io/platforms/python/configuration/integrations/clickhouse-driver) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ ClickhouseDriverIntegration(), ], ) ``` - **New:** Add integration for `asyncpg` (#2314) by @mimre25 For more information, see the documentation for [asyncpg](https://docs.sentry.io/platforms/python/configuration/integrations/asyncpg/) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.asyncpg import AsyncPGIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ AsyncPGIntegration(), ], ) ``` - **New:** Allow to override `propagate_traces` in `Celery` per task (#2331) by @jan-auer For more information, see the documentation for [Celery](https://docs.sentry.io//platforms/python/guides/celery/#distributed-traces) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration # Enable global distributed traces (this is the default, just to be explicit.) sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ CeleryIntegration(propagate_traces=True), ], ) ... # This will NOT propagate the trace. (The task will start its own trace): my_task_b.apply_async( args=("some_parameter", ), headers={"sentry-propagate-traces": False}, ) ``` - Prevent Falcon integration from breaking ASGI apps (#2359) by @szokeasaurusrex - Backpressure: only downsample a max of 10 times (#2347) by @sl0thentr0py - Made NoOpSpan compatible to Transactions. (#2364) by @antonpirker - Cleanup ASGI integration (#2335) by @antonpirker - Pin anyio in tests (dep of httpx), because new major 4.0.0 breaks tests. (#2336) by @antonpirker - Added link to backpressure section in docs. (#2354) by @antonpirker - Add .vscode to .gitignore (#2317) by @shoaib-mohd - Documenting Spans and Transactions (#2358) by @antonpirker - Fix in profiler: do not call getcwd from module root (#2329) by @Zylphrex - Fix deprecated version attribute (#2338) by @vagi8 - Fix transaction name in Starlette and FastAPI (#2341) by @antonpirker - Fix tests using Postgres (#2362) by @antonpirker - build(deps): Updated linting tooling (#2350) by @antonpirker - build(deps): bump sphinx from 7.2.4 to 7.2.5 (#2344) by @dependabot - build(deps): bump actions/checkout from 2 to 4 (#2352) by @dependabot - build(deps): bump checkouts/data-schemas from `ebc77d3` to `68def1e` (#2351) by @dependabot ## 1.30.0 ### Various fixes & improvements - Officially support Python 3.11 (#2300) by @sentrivana - Context manager monitor (#2290) by @szokeasaurusrex - Set response status code in transaction `response` context. (#2312) by @antonpirker - Add missing context kwarg to `_sentry_task_factory` (#2267) by @JohnnyDeuss - In Postgres take the connection params from the connection (#2308) by @antonpirker - Experimental: Allow using OTel for performance instrumentation (#2272) by @sentrivana This release includes experimental support for replacing Sentry's default performance monitoring solution with one powered by OpenTelemetry without having to do any manual setup. Try it out by installing `pip install sentry-sdk[opentelemetry-experimental]` and then initializing the SDK with: ```python sentry_sdk.init( # ...your usual options... _experiments={"otel_powered_performance": True}, ) ``` This enables OpenTelemetry performance monitoring support for some of the most popular frameworks and libraries (Flask, Django, FastAPI, requests...). We're looking forward to your feedback! Please let us know about your experience in this discussion: https://github.com/getsentry/sentry/discussions/55023 **Important note:** Please note that this feature is experimental and in a proof-of-concept stage and is not meant for production use. It may be changed or removed at any point. - Enable backpressure handling by default (#2298) by @sl0thentr0py The SDK now dynamically downsamples transactions to reduce backpressure in high throughput systems. It starts a new `Monitor` thread to perform some health checks which decide to downsample (halved each time) in 10 second intervals till the system is healthy again. To disable this behavior, use: ```python sentry_sdk.init( # ...your usual options... enable_backpressure_handling=False, ) ``` If your system serves heavy load, please let us know how this feature works for you! Check out the [documentation](https://docs.sentry.io/platforms/python/configuration/options/#enable-backpressure-handling) for more information. - Stop recording spans for internal web requests to Sentry (#2297) by @szokeasaurusrex - Add test for `ThreadPoolExecutor` (#2259) by @gggritso - Add docstrings for `Scope.update_from_*` (#2311) by @sentrivana - Moved `is_sentry_url` to utils (#2304) by @szokeasaurusrex - Fix: arq attribute error on settings, support worker args (#2260) by @rossmacarthur - Fix: Exceptions include detail property for their value (#2193) by @nicolassanmar - build(deps): bump mypy from 1.4.1 to 1.5.1 (#2319) by @dependabot - build(deps): bump sphinx from 7.1.2 to 7.2.4 (#2322) by @dependabot - build(deps): bump sphinx from 7.0.1 to 7.1.2 (#2296) by @dependabot - build(deps): bump checkouts/data-schemas from `1b85152` to `ebc77d3` (#2254) by @dependabot ## 1.29.2 ### Various fixes & improvements - Revert GraphQL integration (#2287) by @sentrivana ## 1.29.1 ### Various fixes & improvements - Fix GraphQL integration swallowing responses (#2286) by @sentrivana - Fix typo (#2283) by @sentrivana ## 1.29.0 ### Various fixes & improvements - Capture GraphQL client errors (#2243) by @sentrivana - The SDK will now create dedicated errors whenever an HTTP client makes a request to a `/graphql` endpoint and the response contains an error. You can opt out of this by providing `capture_graphql_errors=False` to the HTTP client integration. - Read MAX_VALUE_LENGTH from client options (#2121) (#2171) by @puittenbroek - Rename `request_bodies` to `max_request_body_size` (#2247) by @mgaligniana - Always sample checkin regardless of `sample_rate` (#2279) by @szokeasaurusrex - Add information to short-interval cron error message (#2246) by @lobsterkatie - Add DB connection attributes in spans (#2274) by @antonpirker - Add `db.system` to remaining Redis spans (#2271) by @AbhiPrasad - Clarified the procedure for running tests (#2276) by @szokeasaurusrex - Fix Chalice tests (#2278) by @sentrivana - Bump Black from 23.3.0 to 23.7.0 (#2256) by @dependabot - Remove py3.4 from tox.ini (#2248) by @sentrivana ## 1.28.1 ### Various fixes & improvements - Redis: Add support for redis.asyncio (#1933) by @Zhenay - Make sure each task that is started by Celery Beat has its own trace. (#2249) by @antonpirker - Add Sampling Decision to Trace Envelope Header (#2239) by @antonpirker - Do not add trace headers (`sentry-trace` and `baggage`) to HTTP requests to Sentry (#2240) by @antonpirker - Prevent adding `sentry-trace` header multiple times (#2235) by @antonpirker - Skip distributions with incomplete metadata (#2231) by @rominf - Remove stale.yml (#2245) by @hubertdeng123 - Django: Fix 404 Handler handler being labeled as "generic ASGI request" (#1277) by @BeryJu ## 1.28.0 ### Various fixes & improvements - Add support for cron jobs in ARQ integration (#2088) by @lewazo - Backpressure handling prototype (#2189) by @sl0thentr0py - Add "replay" context to event payload (#2234) by @antonpirker - Update test Django app to be compatible for Django 4.x (#1794) by @DilLip-Chowdary-Codes ## 1.27.1 ### Various fixes & improvements - Add Starlette/FastAPI template tag for adding Sentry tracing information (#2225) by @antonpirker - By adding `{{ sentry_trace_meta }}` to your Starlette/FastAPI Jinja2 templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend. - Fixed generation of baggage when a DSC is already in propagation context (#2232) by @antonpirker - Handle explicitly passing `None` for `trace_configs` in `aiohttp` (#2230) by @Harmon758 - Support newest Starlette versions (#2227) by @antonpirker ## 1.27.0 ### Various fixes & improvements - Support for SQLAlchemy 2.0 (#2200) by @antonpirker - Add instrumentation of `aiohttp` client requests (#1761) by @md384 - Add Django template tag for adding Sentry tracing information (#2222) by @antonpirker - By adding `{{ sentry_trace_meta }}` to your Django templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend. - Update Flask HTML meta helper (#2203) by @antonpirker - Take trace ID always from propagation context (#2209) by @antonpirker - Fix trace context in event payload (#2205) by @antonpirker - Use new top level API in `trace_propagation_meta` (#2202) by @antonpirker - Do not overwrite existing baggage on outgoing requests (#2191, #2214) by @sentrivana - Set the transaction/span status from an OTel span (#2115) by @daniil-konovalenko - Fix propagation of OTel `NonRecordingSpan` (#2187) by @hartungstenio - Fix `TaskLockedException` handling in Huey integration (#2206) by @Zhenay - Add message format configuration arguments to Loguru integration (#2208) by @Gwill - Profiling: Add client reports for profiles (#2207) by @Zylphrex - CI: Fix CI (#2220) by @antonpirker - Dependencies: Bump `checkouts/data-schemas` from `7fdde87` to `1b85152` (#2218) by @dependabot - Dependencies: Bump `mypy` from 1.3.0 to 1.4.1 (#2194) by @dependabot - Docs: Change API doc theme (#2210) by @sentrivana - Docs: Allow (some) autocompletion for top-level API (#2213) by @sentrivana - Docs: Revert autocomplete hack (#2224) by @sentrivana ## 1.26.0 ### Various fixes & improvements - Tracing without performance (#2136) by @antonpirker - Load tracing information from environment (#2176) by @antonpirker - Auto-enable HTTPX integration if HTTPX installed (#2177) by @sentrivana - Support for SOCKS proxies (#1050) by @Roguelazer - Wrap `parse_url` calls in `capture_internal_exceptions` (#2162) by @sentrivana - Run 2.7 tests in CI again (#2181) by @sentrivana - Crons: Do not support sub-minute cron intervals (#2172) by @antonpirker - Profile: Add function name to profiler frame cache (#2164) by @Zylphrex - Dependencies: bump checkouts/data-schemas from `0ed3357` to `7fdde87` (#2165) by @dependabot - Update changelog (#2163) by @sentrivana ## 1.25.1 ### Django update (ongoing) Collections of improvements to our Django integration. By: @mgaligniana (#1773) ### Various fixes & improvements - Fix `parse_url` (#2161) by @sentrivana and @antonpirker Our URL sanitization used in multiple integrations broke with the recent Python security update. If you started seeing `ValueError`s with `"'Filtered' does not appear to be an IPv4 or IPv6 address"`, this release fixes that. See [the original issue](https://github.com/getsentry/sentry-python/issues/2160) for more context. - Better version parsing in integrations (#2152) by @antonpirker We now properly support all integration versions that conform to [PEP 440](https://peps.python.org/pep-0440/). This replaces our naïve version parsing that wouldn't accept versions such as `2.0.0rc1` or `2.0.5.post1`. - Align HTTP status code as span data field `http.response.status_code` (#2113) by @antonpirker - Do not encode cached value to determine size (#2143) by @sentrivana - Fix using `unittest.mock` whenever available (#1926) by @mgorny - Fix 2.7 `common` tests (#2145) by @sentrivana - Bump `actions/stale` from `6` to `8` (#1978) by @dependabot - Bump `black` from `22.12.0` to `23.3.0` (#1984) by @dependabot - Bump `mypy` from `1.2.0` to `1.3.0` (#2110) by @dependabot - Bump `sphinx` from `5.3.0` to `7.0.1` (#2112) by @dependabot ## 1.25.0 ### Various fixes & improvements - Support urllib3>=2.0.0 (#2148) by @asottile-sentry We're now supporting urllib3's new major version, 2.0.0. If you encounter issues (e.g. some of your dependencies not supporting the new urllib3 version yet) you might consider pinning the urllib3 version to `<2.0.0` manually in your project. Check out the [the urllib3 migration guide](https://urllib3.readthedocs.io/en/latest/v2-migration-guide.html#migrating-as-an-application-developer) for details. - Auto-retry tests on failure (#2134) by @sentrivana - Correct `importlib.metadata` check in `test_modules` (#2149) by @asottile-sentry - Fix distribution name normalization (PEP-0503) (#2144) by @rominf - Fix `functions_to_trace` typing (#2141) by @rcmarron ## 1.24.0 ### Various fixes & improvements - **New:** Celery Beat exclude tasks option (#2130) by @antonpirker You can exclude Celery Beat tasks from being auto-instrumented. To do this, add a list of tasks you want to exclude as option `exclude_beat_tasks` when creating `CeleryIntegration`. The list can contain simple strings with the full task name, as specified in the Celery Beat schedule, or regular expressions to match multiple tasks. For more information, see the documentation for [Crons](https://docs.sentry.io/platforms/python/guides/celery/crons/) for more information. Usage: ```python exclude_beat_tasks = [ "some-task-a", "payment-check-.*", ] sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ CeleryIntegration( monitor_beat_tasks=True, exclude_beat_tasks=exclude_beat_tasks, ), ], ) ``` In this example the task `some-task-a` and all tasks with a name starting with `payment-check-` will be ignored. - **New:** Add support for **ExceptionGroups** (#2025) by @antonpirker _Note:_ If running Self-Hosted Sentry, you should wait to adopt this SDK update until after updating to the 23.6.0 (est. June 2023) release of Sentry. Updating early will not break anything, but you will not get the full benefit of the Exception Groups improvements to issue grouping that were added to the Sentry backend. - Prefer `importlib.metadata` over `pkg_resources` if available (#2081) by @sentrivana - Work with a copy of request, vars in the event (#2125) by @sentrivana - Pinned version of dependency that broke the build (#2133) by @antonpirker ## 1.23.1 ### Various fixes & improvements - Disable Django Cache spans by default. (#2120) by @antonpirker ## 1.23.0 ### Various fixes & improvements - **New:** Add `loguru` integration (#1994) by @PerchunPak Check [the documentation](https://docs.sentry.io/platforms/python/configuration/integrations/loguru/) for more information. Usage: ```python from loguru import logger import sentry_sdk from sentry_sdk.integrations.loguru import LoguruIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", integrations=[ LoguruIntegration(), ], ) logger.debug("I am ignored") logger.info("I am a breadcrumb") logger.error("I am an event", extra=dict(bar=43)) logger.exception("An exception happened") ``` - An error event with the message `"I am an event"` will be created. - `"I am a breadcrumb"` will be attached as a breadcrumb to that event. - `bar` will end up in the `extra` attributes of that event. - `"An exception happened"` will send the current exception from `sys.exc_info()` with the stack trace to Sentry. If there's no exception, the current stack will be attached. - The debug message `"I am ignored"` will not be captured by Sentry. To capture it, set `level` to `DEBUG` or lower in `LoguruIntegration`. - Do not truncate request body if `request_bodies` is `"always"` (#2092) by @sentrivana - Fixed Celery headers for Beat auto-instrumentation (#2102) by @antonpirker - Add `db.operation` to Redis and MongoDB spans (#2089) by @antonpirker - Make sure we're importing `redis` the library (#2106) by @sentrivana - Add `include_source_context` option (#2020) by @farhat-nawaz and @sentrivana - Import `Markup` from `markupsafe` (#2047) by @rco-ableton - Fix `__qualname__` missing attribute in asyncio integration (#2105) by @sl0thentr0py - Remove relay extension from AWS Layer (#2068) by @sl0thentr0py - Add a note about `pip freeze` to the bug template (#2103) by @sentrivana ## 1.22.2 ### Various fixes & improvements - Fix: Django caching spans when using keyword arguments (#2086) by @antonpirker - Fix: Duration in Celery Beat tasks monitoring (#2087) by @antonpirker - Fix: Docstrings of SPANDATA (#2084) by @antonpirker ## 1.22.1 ### Various fixes & improvements - Fix: Handle a list of keys (not just a single key) in Django cache spans (#2082) by @antonpirker ## 1.22.0 ### Various fixes & improvements - Add `cache.hit` and `cache.item_size` to Django (#2057) by @antonpirker _Note:_ This will add spans for all requests to the caches configured in Django. This will probably add some overhead to your server an also add multiple spans to your performance waterfall diagrams. If you do not want this, you can disable this feature in the DjangoIntegration: ```python sentry_sdk.init( dsn="...", integrations=[ DjangoIntegration(cache_spans=False), ] ) ``` - Use `http.method` instead of `method` (#2054) by @AbhiPrasad - Handle non-int `exc.status_code` in Starlette (#2075) by @sentrivana - Handle SQLAlchemy `engine.name` being bytes (#2074) by @sentrivana - Fix `KeyError` in `capture_checkin` if SDK is not initialized (#2073) by @antonpirker - Use `functools.wrap` for `ThreadingIntegration` patches to fix attributes (#2080) by @EpicWink - Pin `urllib3` to <2.0.0 for now (#2069) by @sl0thentr0py ## 1.21.1 ### Various fixes & improvements - Do not send monitor_config when unset (#2058) by @evanpurkhiser - Add `db.system` span data (#2040, #2042) by @antonpirker - Fix memory leak in profiling (#2049) by @Zylphrex - Fix crash loop when returning none in before_send (#2045) by @sentrivana ## 1.21.0 ### Various fixes & improvements - Better handling of redis span/breadcrumb data (#2033) by @antonpirker _Note:_ With this release we will limit the description of redis db spans and the data in breadcrumbs represting redis db operations to 1024 characters. This can can lead to truncated data. If you do not want this there is a new parameter `max_data_size` in `RedisIntegration`. You can set this to `None` for disabling trimming. Example for **disabling** trimming of redis commands in spans or breadcrumbs: ```python sentry_sdk.init( integrations=[ RedisIntegration(max_data_size=None), ] ) ``` Example for custom trim size of redis commands in spans or breadcrumbs: ```python sentry_sdk.init( integrations=[ RedisIntegration(max_data_size=50), ] )` ``` - Add `db.system` to redis and SQLAlchemy db spans (#2037, #2038, #2039) (#2037) by @AbhiPrasad - Upgraded linting tooling (#2026) by @antonpirker - Made code more resilient. (#2031) by @antonpirker ## 1.20.0 ### Various fixes & improvements - Send all events to /envelope endpoint when tracing is enabled (#2009) by @antonpirker _Note:_ If you’re self-hosting Sentry 9, you need to stay in the previous version of the SDK or update your self-hosted to at least 20.6.0 - Profiling: Remove profile context from SDK (#2013) by @Zylphrex - Profiling: Additionl performance improvements to the profiler (#1991) by @Zylphrex - Fix: Celery Beat monitoring without restarting the Beat process (#2001) by @antonpirker - Fix: Using the Codecov uploader instead of deprecated python package (#2011) by @antonpirker - Fix: Support for Quart (#2003)` (#2003) by @antonpirker ## 1.19.1 ### Various fixes & improvements - Make auto monitoring beat update support Celery 4 and 5 (#1989) by @antonpirker ## 1.19.0 ### Various fixes & improvements - **New:** [Celery Beat](https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html) auto monitoring (#1967) by @antonpirker The CeleryIntegration can now also monitor your Celery Beat scheduled tasks automatically using the new [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/) feature of Sentry. To learn more see our [Celery Beat Auto Discovery](https://docs.sentry.io/platforms/python/guides/celery/crons/) documentation. Usage: ```python from celery import Celery, signals from celery.schedules import crontab import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration app = Celery('tasks', broker='...') app.conf.beat_schedule = { 'set-in-beat-schedule': { 'task': 'tasks.some_important_task', 'schedule': crontab(...), }, } @signals.celeryd_init.connect def init_sentry(**kwargs): sentry_sdk.init( dsn='...', integrations=[CeleryIntegration(monitor_beat_tasks=True)], # 👈 here environment="local.dev.grace", release="v1.0", ) ``` This will auto detect all schedules tasks in your `beat_schedule` and will monitor them with Sentry [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/). - **New:** [gRPC](https://grpc.io/) integration (#1911) by @hossein-raeisi The [gRPC](https://grpc.io/) integration instruments all incoming requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels. To learn more see our [gRPC Integration](https://docs.sentry.io/platforms/python/configuration/integrations/grpc/) documentation. On the server: ```python import grpc from sentry_sdk.integrations.grpc.server import ServerInterceptor server = grpc.server( thread_pool=..., interceptors=[ServerInterceptor()], ) ``` On the client: ```python import grpc from sentry_sdk.integrations.grpc.client import ClientInterceptor with grpc.insecure_channel("example.com:12345") as channel: channel = grpc.intercept_channel(channel, *[ClientInterceptor()]) ``` - **New:** socket integration (#1911) by @hossein-raeisi Use this integration to create spans for DNS resolves (`socket.getaddrinfo()`) and connection creations (`socket.create_connection()`). To learn more see our [Socket Integration](https://docs.sentry.io/platforms/python/configuration/integrations/socket/) documentation. Usage: ```python import sentry_sdk from sentry_sdk.integrations.socket import SocketIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", integrations=[ SocketIntegration(), ], ) ``` - Fix: Do not trim span descriptions. (#1983) by @antonpirker ## 1.18.0 ### Various fixes & improvements - **New:** Implement `EventScrubber` (#1943) by @sl0thentr0py To learn more see our [Scrubbing Sensitive Data](https://docs.sentry.io/platforms/python/data-management/sensitive-data/#event-scrubber) documentation. Add a new `EventScrubber` class that scrubs certain potentially sensitive interfaces with a `DEFAULT_DENYLIST`. The default scrubber is automatically run if `send_default_pii = False`: ```python import sentry_sdk from sentry_sdk.scrubber import EventScrubber sentry_sdk.init( # ... send_default_pii=False, event_scrubber=EventScrubber(), # this is set by default ) ``` You can also pass in a custom `denylist` to the `EventScrubber` class and filter additional fields that you want. ```python from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST # custom denylist denylist = DEFAULT_DENYLIST + ["my_sensitive_var"] sentry_sdk.init( # ... send_default_pii=False, event_scrubber=EventScrubber(denylist=denylist), ) ``` - **New:** Added new `functions_to_trace` option for central way of performance instrumentation (#1960) by @antonpirker To learn more see our [Tracing Options](https://docs.sentry.io/platforms/python/configuration/options/#functions-to-trace) documentation. An optional list of functions that should be set up for performance monitoring. For each function in the list, a span will be created when the function is executed. ```python functions_to_trace = [ {"qualified_name": "tests.test_basics._hello_world_counter"}, {"qualified_name": "time.sleep"}, {"qualified_name": "collections.Counter.most_common"}, ] sentry_sdk.init( # ... traces_sample_rate=1.0, functions_to_trace=functions_to_trace, ) ``` - Updated denylist to include other widely used cookies/headers (#1972) by @antonpirker - Forward all `sentry-` baggage items (#1970) by @cleptric - Update OSS licensing (#1973) by @antonpirker - Profiling: Handle non frame types in profiler (#1965) by @Zylphrex - Tests: Bad arq dependency in tests (#1966) by @Zylphrex - Better naming (#1962) by @antonpirker ## 1.17.0 ### Various fixes & improvements - **New:** Monitor Celery Beat tasks with Sentry [Cron Monitoring](https://docs.sentry.io/product/crons/). With this feature you can make sure that your Celery beat tasks run at the right time and see if they where successful or not. > **Warning** > Cron Monitoring is currently in beta. Beta features are still in-progress and may have bugs. We recognize the irony. > If you have any questions or feedback, please email us at crons-feedback@sentry.io, reach out via Discord (#cronjobs), or open an issue. Usage: ```python # File: tasks.py from celery import Celery, signals from celery.schedules import crontab import sentry_sdk from sentry_sdk.crons import monitor from sentry_sdk.integrations.celery import CeleryIntegration # 1. Setup your Celery beat configuration app = Celery('mytasks', broker='redis://localhost:6379/0') app.conf.beat_schedule = { 'set-in-beat-schedule': { 'task': 'tasks.tell_the_world', 'schedule': crontab(hour='10', minute='15'), 'args': ("in beat_schedule set", ), }, } # 2. Initialize Sentry either in `celeryd_init` or `beat_init` signal. #@signals.celeryd_init.connect @signals.beat_init.connect def init_sentry(**kwargs): sentry_sdk.init( dsn='...', integrations=[CeleryIntegration()], environment="local.dev.grace", release="v1.0.7-a1", ) # 3. Link your Celery task to a Sentry Cron Monitor @app.task @monitor(monitor_slug='3b861d62-ff82-4aa0-9cd6-b2b6403bd0cf') def tell_the_world(msg): print(msg) ``` - **New:** Add decorator for Sentry tracing (#1089) by @ynouri This allows you to use a decorator to setup custom performance instrumentation. To learn more see [Custom Instrumentation](https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/). Usage: Just add the new decorator to your function, and a span will be created for it: ```python import sentry_sdk @sentry_sdk.trace def my_complex_function(): # do stuff ... ``` - Make Django signals tracing optional (#1929) by @antonpirker See the [Django Guide](https://docs.sentry.io/platforms/python/guides/django) to learn more. - Deprecated `with_locals` in favor of `include_local_variables` (#1924) by @antonpirker - Added top level API to get current span (#1954) by @antonpirker - Profiling: Add profiler options to init (#1947) by @Zylphrex - Profiling: Set active thread id for quart (#1830) by @Zylphrex - Fix: Update `get_json` function call for werkzeug 2.1.0+ (#1939) by @michielderoos - Fix: Returning the tasks result. (#1931) by @antonpirker - Fix: Rename MYPY to TYPE_CHECKING (#1934) by @untitaker - Fix: Fix type annotation for ignore_errors in sentry_sdk.init() (#1928) by @tiangolo - Tests: Start a real http server instead of mocking libs (#1938) by @antonpirker ## 1.16.0 ### Various fixes & improvements - **New:** Add [arq](https://arq-docs.helpmanual.io/) Integration (#1872) by @Zhenay This integration will create performance spans when arq jobs will be enqueued and when they will be run. It will also capture errors in jobs and will link them to the performance spans. Usage: ```python import asyncio from httpx import AsyncClient from arq import create_pool from arq.connections import RedisSettings import sentry_sdk from sentry_sdk.integrations.arq import ArqIntegration from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT sentry_sdk.init( dsn="...", integrations=[ArqIntegration()], ) async def download_content(ctx, url): session: AsyncClient = ctx['session'] response = await session.get(url) print(f'{url}: {response.text:.80}...') return len(response.text) async def startup(ctx): ctx['session'] = AsyncClient() async def shutdown(ctx): await ctx['session'].aclose() async def main(): with sentry_sdk.start_transaction(name="testing_arq_tasks", source=TRANSACTION_SOURCE_COMPONENT): redis = await create_pool(RedisSettings()) for url in ('https://facebook.com', 'https://microsoft.com', 'https://github.com', "asdf" ): await redis.enqueue_job('download_content', url) class WorkerSettings: functions = [download_content] on_startup = startup on_shutdown = shutdown if __name__ == '__main__': asyncio.run(main()) ``` - Update of [Falcon](https://falconframework.org/) Integration (#1733) by @bartolootrit - Adding [Cloud Resource Context](https://docs.sentry.io/platforms/python/configuration/integrations/cloudresourcecontext/) integration (#1882) by @antonpirker - Profiling: Use the transaction timestamps to anchor the profile (#1898) by @Zylphrex - Profiling: Add debug logs to profiling (#1883) by @Zylphrex - Profiling: Start profiler thread lazily (#1903) by @Zylphrex - Fixed checks for structured http data (#1905) by @antonpirker - Make `set_measurement` public api and remove experimental status (#1909) by @sl0thentr0py - Add `trace_propagation_targets` option (#1916) by @antonpirker - Add `enable_tracing` to default traces_sample_rate to 1.0 (#1900) by @sl0thentr0py - Remove deprecated `tracestate` (#1907) by @sl0thentr0py - Sanitize URLs in Span description and breadcrumbs (#1876) by @antonpirker - Mechanism should default to true unless set explicitly (#1889) by @sl0thentr0py - Better setting of in-app in stack frames (#1894) by @antonpirker - Add workflow to test gevent (#1870) by @Zylphrex - Updated outdated HTTPX test matrix (#1917) by @antonpirker - Switch to MIT license (#1908) by @cleptric ## 1.15.0 ### Various fixes & improvements - New: Add [Huey](https://huey.readthedocs.io/en/latest/) Integration (#1555) by @Zhenay This integration will create performance spans when Huey tasks will be enqueued and when they will be executed. Usage: Task definition in `demo.py`: ```python import time from huey import SqliteHuey, crontab import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration sentry_sdk.init( dsn="...", integrations=[ HueyIntegration(), ], traces_sample_rate=1.0, ) huey = SqliteHuey(filename='/tmp/demo.db') @huey.task() def add_numbers(a, b): return a + b ``` Running the tasks in `run.py`: ```python from demo import add_numbers, flaky_task, nightly_backup import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction def main(): sentry_sdk.init( dsn="...", integrations=[ HueyIntegration(), ], traces_sample_rate=1.0, ) with sentry_sdk.start_transaction(name="testing_huey_tasks", source=TRANSACTION_SOURCE_COMPONENT): r = add_numbers(1, 2) if __name__ == "__main__": main() ``` - Profiling: Do not send single sample profiles (#1879) by @Zylphrex - Profiling: Add additional test coverage for profiler (#1877) by @Zylphrex - Profiling: Always use builtin time.sleep (#1869) by @Zylphrex - Profiling: Defaul in_app decision to None (#1855) by @Zylphrex - Profiling: Remove use of threading.Event (#1864) by @Zylphrex - Profiling: Enable profiling on all transactions (#1797) by @Zylphrex - FastAPI: Fix check for Starlette in FastAPI integration (#1868) by @antonpirker - Flask: Do not overwrite default for username with email address in FlaskIntegration (#1873) by @homeworkprod - Tests: Add py3.11 to test-common (#1871) by @Zylphrex - Fix: Don't log whole event in before_send / event_processor drops (#1863) by @sl0thentr0py ## 1.14.0 ### Various fixes & improvements - Add `before_send_transaction` (#1840) by @antonpirker Adds a hook (similar to `before_send`) that is called for all transaction events (performance releated data). Usage: ```python import sentry_sdk def strip_sensitive_data(event, hint): # modify event here (or return `None` if you want to drop the event entirely) return event sentry_sdk.init( # ... before_send_transaction=strip_sensitive_data, ) ``` See also: https://docs.sentry.io/platforms/python/configuration/filtering/#using-platformidentifier-namebefore-send-transaction- - Django: Always remove values of Django session related cookies. (#1842) by @antonpirker - Profiling: Enable profiling for ASGI frameworks (#1824) by @Zylphrex - Profiling: Better gevent support (#1822) by @Zylphrex - Profiling: Add profile context to transaction (#1860) by @Zylphrex - Profiling: Use co_qualname in python 3.11 (#1831) by @Zylphrex - OpenTelemetry: fix Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad - OpenTelemetry: fix extra dependency (#1825) by @bernardotorres - OpenTelemetry: fix NoOpSpan updates scope (#1834) by @Zylphrex - OpenTelemetry: Make sure to noop when there is no DSN (#1852) by @antonpirker - FastAPI: Fix middleware being patched multiple times (#1841) by @JohnnyDeuss - Starlette: Avoid import of pkg_resource with Starlette integration (#1836) by @mgu - Removed code coverage target (#1862) by @antonpirker ## 1.13.0 ### Various fixes & improvements - Add Starlite integration (#1748) by @gazorby Adding support for the [Starlite](https://starlite-api.github.io/starlite/1.48/) framework. Unhandled errors are captured. Performance spans for Starlite middleware are also captured. Thanks @gazorby for the great work! Usage: ```python from starlite import Starlite, get import sentry_sdk from sentry_sdk.integrations.starlite import StarliteIntegration sentry_sdk.init( dsn="...", traces_sample_rate=1.0, integrations=[ StarliteIntegration(), ], ) @get("/") def hello_world() -> dict[str, str]: """Keeping the tradition alive with hello world.""" bla = 1/0 # causing an error return {"hello": "world"} app = Starlite(route_handlers=[hello_world]) ``` - Profiling: Remove sample buffer from profiler (#1791) by @Zylphrex - Profiling: Performance tweaks to profile sampler (#1789) by @Zylphrex - Add span for Django SimpleTemplateResponse rendering (#1818) by @chdsbd - Use @wraps for Django Signal receivers (#1815) by @meanmail - Add enqueued_at and started_at to rq job extra (#1024) by @kruvasyan - Remove sanic v22 pin (#1819) by @sl0thentr0py - Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty - Handle `"rc"` in SQLAlchemy version. (#1812) by @peterschutt - Doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo - Auto publish to internal pypi on release (#1823) by @asottile-sentry - Added Python 3.11 to test suite (#1795) by @antonpirker - Update test/linting dependencies (#1801) by @antonpirker - Deps: bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot ## 1.12.1 ### Various fixes & improvements - Link errors to OTel spans (#1787) by @antonpirker ## 1.12.0 ### Basic OTel support This adds support to automatically integrate OpenTelemetry performance tracing with Sentry. See the documentation on how to set it up: https://docs.sentry.io/platforms/python/performance/instrumentation/opentelemetry/ Give it a try and let us know if you have any feedback or problems with using it. By: @antonpirker (#1772, #1766, #1765) ### Various fixes & improvements - Tox Cleanup (#1749) by @antonpirker - CI: Fix Github action checks (#1780) by @Zylphrex - Profiling: Introduce active thread id on scope (#1764) by @Zylphrex - Profiling: Eagerly hash stack for profiles (#1755) by @Zylphrex - Profiling: Resolve inherited method class names (#1756) by @Zylphrex ## 1.11.1 ### Various fixes & improvements - Move set_transaction_name out of event processor in fastapi/starlette (#1751) by @sl0thentr0py - Expose proxy_headers as top level config and use in ProxyManager: https://docs.sentry.io/platforms/python/configuration/options/#proxy-headers (#1746) by @sl0thentr0py ## 1.11.0 ### Various fixes & improvements - Fix signals problem on sentry.io (#1732) by @antonpirker - Fix reading FastAPI request body twice. (#1724) by @antonpirker - ref(profiling): Do not error if already setup (#1731) by @Zylphrex - ref(profiling): Use sleep scheduler by default (#1729) by @Zylphrex - feat(profiling): Extract more frame info (#1702) by @Zylphrex - Update actions/upload-artifact to v3.1.1 (#1718) by @mattgauntseo-sentry - Performance optimizations (#1725) by @antonpirker - feat(pymongo): add PyMongo integration (#1590) by @Agalin - Move relay to port 5333 to avoid collisions (#1716) by @sl0thentr0py - fix(utils): strip_string() checks text length counting bytes not chars (#1711) by @mgaligniana - chore: remove jira workflow (#1707) by @vladanpaunovic - build(deps): bump checkouts/data-schemas from `a214fbc` to `20ff3b9` (#1703) by @dependabot - perf(profiling): Tune the sample profile generation code for performance (#1694) by @Zylphrex ## 1.10.1 ### Various fixes & improvements - Bug fixes for FastAPI and Sentry SDK 1.10.0 (#1699) by @antonpirker - The wrapped receive() did not return anything. (#1698) by @antonpirker ## 1.10.0 ### Various fixes & improvements - Unified naming for span ops (#1661) by @antonpirker We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/ **WARNING**: If you have Sentry Dashboards or Sentry Discover queries that use `transaction.op` in their fields, conditions, aggregates or columns this change could potentially break your Dashboards/Discover setup. Here is a list of the changes we made to the `op`s. Please adjust your dashboards and Discover queries accordingly: | Old operation (`op`) | New Operation (`op`) | | ------------------------ | ---------------------- | | `asgi.server` | `http.server` | | `aws.request` | `http.client` | | `aws.request.stream` | `http.client.stream` | | `celery.submit` | `queue.submit.celery` | | `celery.task` | `queue.task.celery` | | `django.middleware` | `middleware.django` | | `django.signals` | `event.django` | | `django.template.render` | `template.render` | | `django.view` | `view.render` | | `http` | `http.client` | | `redis` | `db.redis` | | `rq.task` | `queue.task.rq` | | `serverless.function` | `function.aws` | | `serverless.function` | `function.gcp` | | `starlette.middleware` | `middleware.starlette` | - Include framework in SDK name (#1662) by @antonpirker - Asyncio integration (#1671) by @antonpirker - Add exception handling to Asyncio Integration (#1695) by @antonpirker - Fix asyncio task factory (#1689) by @antonpirker - Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker - Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker - fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower - build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot - build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot - build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot - build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot - build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot - Remove unused node setup from ci. (#1681) by @antonpirker - Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222 - Add session for aiohttp integration (#1605) by @denys-pidlisnyi - feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex - feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex - ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex - fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex - fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex - fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex - tests(profiling): Add basic profiling tests (#1677) by @Zylphrex - tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex ## 1.9.10 ### Various fixes & improvements - Use content-length header in ASGI instead of reading request body (#1646, #1631, #1595, #1573) (#1649) by @antonpirker - Added newer Celery versions to test suite (#1655) by @antonpirker - Django 4.x support (#1632) by @antonpirker - Cancel old CI runs when new one is started. (#1651) by @antonpirker - Increase max string size for desc (#1647) by @k-fish - Pin Sanic version for CI (#1650) by @antonpirker - Fix for partial signals in old Django and old Python versions. (#1641) by @antonpirker - Convert profile output to the sample format (#1611) by @phacops - Dynamically adjust profiler sleep time (#1634) by @Zylphrex ## 1.9.9 ### Django update (ongoing) - Instrument Django Signals so they show up in "Performance" view (#1526) by @BeryJu - include other Django enhancements brought up by the community ### Various fixes & improvements - fix(profiling): Profiler mode type hints (#1633) by @Zylphrex - New ASGIMiddleware tests (#1600) by @antonpirker - build(deps): bump mypy from 0.961 to 0.971 (#1517) by @dependabot - build(deps): bump black from 22.3.0 to 22.8.0 (#1596) by @dependabot - build(deps): bump sphinx from 5.0.2 to 5.1.1 (#1524) by @dependabot - ref: upgrade linters to flake8 5.x (#1610) by @asottile-sentry - feat(profiling): Introduce different profiler schedulers (#1616) by @Zylphrex - fix(profiling): Check transaction sampled status before profiling (#1624) by @Zylphrex - Wrap Baggage ser/deser in capture_internal_exceptions (#1630) by @sl0thentr0py - Faster Tests (DjangoCon) (#1602) by @antonpirker - feat(profiling): Add support for profiles_sample_rate (#1613) by @Zylphrex - feat(profiling): Support for multithreaded profiles (#1570) by @Zylphrex ## 1.9.8 ### Various fixes & improvements - Baggage creation for head of trace (#1589) by @sl0thentr0py - The SDK now also generates new baggage entries for dynamic sampling when it is the first (head) SDK in the pipeline. ## 1.9.7 ### Various fixes & improvements - Let SentryAsgiMiddleware work with Starlette and FastAPI integrations (#1594) by @antonpirker **Note:** The last version 1.9.6 introduced a breaking change where projects that used Starlette or FastAPI and had manually setup `SentryAsgiMiddleware` could not start. This versions fixes this behaviour. With this version if you have a manual `SentryAsgiMiddleware` setup and are using Starlette or FastAPI everything just works out of the box. Sorry for any inconveniences the last version might have brought to you. We can do better and in the future we will do our best to not break your code again. ## 1.9.6 ### Various fixes & improvements - Auto-enable Starlette and FastAPI (#1533) by @antonpirker - Add more version constraints (#1574) by @isra17 - Fix typo in starlette attribute check (#1566) by @sl0thentr0py ## 1.9.5 ### Various fixes & improvements - fix(redis): import redis pipeline using full path (#1565) by @olksdr - Fix side effects for parallel tests (#1554) by @sl0thentr0py ## 1.9.4 ### Various fixes & improvements - Remove TRANSACTION_SOURCE_UNKNOWN and default to CUSTOM (#1558) by @sl0thentr0py - feat(redis): Add instrumentation for redis pipeline (#1543) by @jjbayer - Handle no release when uploading profiles (#1548) by @szokeasaurusrex ## 1.9.3 ### Various fixes & improvements - Wrap StarletteRequestExtractor in capture_internal_exceptions (#1551) by @sl0thentr0py ## 1.9.2 ### Various fixes & improvements - chore: remove quotes (#1545) by @vladanpaunovic ## 1.9.1 ### Various fixes & improvements - Fix FastAPI issues (#1532) ( #1514) (#1532) by @antonpirker - Add deprecation warning for 3.4, 3.5 (#1541) by @sl0thentr0py - Fast tests (#1504) by @antonpirker - Replace Travis CI badge with GitHub Actions badge (#1538) by @153957 - chore(deps): update urllib3 minimum version with environment markers (#1312) by @miketheman - Update Flask and Quart integrations (#1520) by @pgjones - chore: Remove ancient examples from tracing prototype (#1528) by @sl0thentr0py - fix(django): Send correct "url" transaction source if Django resolver fails to resolve (#1525) by @sl0thentr0py ## 1.9.0 ### Various fixes & improvements - feat(profiler): Add experimental profiler under experiments.enable_profiling (#1481) by @szokeasaurusrex - Fixed problem with broken response and python-multipart (#1516) by @antonpirker ## 1.8.0 ### Various fixes & improvements - feat(starlette): add Starlette integration (#1441) by @sl0thentr0py **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the Starlette integration. Usage: ```python from starlette.applications import Starlette from sentry_sdk.integrations.starlette import StarletteIntegration sentry_sdk.init( dsn="...", integrations=[StarletteIntegration()], ) app = Starlette(debug=True, routes=[...]) ``` - feat(fastapi): add FastAPI integration (#829) by @antonpirker **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the FastAPI integration. Usage: ```python from fastapi import FastAPI from sentry_sdk.integrations.starlette import StarletteIntegration from sentry_sdk.integrations.fastapi import FastApiIntegration sentry_sdk.init( dsn="...", integrations=[StarletteIntegration(), FastApiIntegration()], ) app = FastAPI() ``` Yes, you have to add both, the `StarletteIntegration` **AND** the `FastApiIntegration`! - fix: avoid sending empty Baggage header (#1507) by @intgr - fix: properly freeze Baggage object (#1508) by @intgr - docs: fix simple typo, collecter | collector (#1505) by @timgates42 ## 1.7.2 ### Various fixes & improvements - feat(transactions): Transaction Source (#1490) by @antonpirker - Removed (unused) sentry_timestamp header (#1494) by @antonpirker ## 1.7.1 ### Various fixes & improvements - Skip malformed baggage items (#1491) by @robyoung ## 1.7.0 ### Various fixes & improvements - feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py The SDK now propagates the [W3C Baggage Header](https://www.w3.org/TR/baggage/) from incoming transactions to outgoing requests. It also extracts Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) and adds it to the transaction headers to enable Dynamic Sampling in the product. ## 1.6.0 ### Various fixes & improvements - Fix Deployment (#1474) by @antonpirker - Serverless V2 (#1450) by @antonpirker - Use logging levelno instead of levelname. Levelnames can be overridden (#1449) by @rrauenza ## 1.5.12 ### Various fixes & improvements - feat(measurements): Add experimental set_measurement api on transaction (#1359) by @sl0thentr0py - fix: Remove incorrect usage from flask helper example (#1434) by @BYK ## 1.5.11 ### Various fixes & improvements - chore: Bump mypy and fix abstract ContextManager typing (#1421) by @sl0thentr0py - chore(issues): add link to Sentry support (#1420) by @vladanpaunovic - fix: replace git.io links with redirect targets (#1412) by @asottile-sentry - ref: Update error verbose for sentry init (#1361) by @targhs - fix(sessions): Update session also for non sampled events and change filter order (#1394) by @adinauer ## 1.5.10 ### Various fixes & improvements - Remove Flask version contraint (#1395) by @antonpirker - Change ordering of event drop mechanisms (#1390) by @adinauer ## 1.5.9 ### Various fixes & improvements - fix(sqlalchemy): Use context instead of connection in sqlalchemy integration (#1388) by @sl0thentr0py - Update correct test command in contributing docs (#1377) by @targhs - Update black (#1379) by @antonpirker - build(deps): bump sphinx from 4.1.1 to 4.5.0 (#1376) by @dependabot - fix: Auto-enabling Redis and Pyramid integration (#737) by @untitaker - feat(testing): Add pytest-watch (#853) by @lobsterkatie - Treat x-api-key header as sensitive (#1236) by @simonschmidt - fix: Remove obsolete MAX_FORMAT_PARAM_LENGTH (#1375) by @blueyed ## 1.5.8 ### Various fixes & improvements - feat(asgi): Add support for setting transaction name to path in FastAPI (#1349) by @tiangolo - fix(sqlalchemy): Change context manager type to avoid race in threads (#1368) by @Fofanko - fix(perf): Fix transaction setter on scope to use containing_transaction to match with getter (#1366) by @sl0thentr0py - chore(ci): Change stale GitHub workflow to run once a day (#1367) by @kamilogorek - feat(django): Make django middleware expose more wrapped attributes (#1202) by @MattFisher ## 1.5.7 ### Various fixes & improvements - fix(serializer): Make sentry_repr dunder method to avoid mock problems (#1364) by @sl0thentr0py ## 1.5.6 ### Various fixes & improvements - Create feature.yml (#1350) by @vladanpaunovic - Update contribution guide (#1346) by @antonpirker - chore: add bug issue template (#1345) by @vladanpaunovic - Added default value for auto_session_tracking (#1337) by @antonpirker - docs(readme): reordered content (#1343) by @antonpirker - fix(tests): Removed unsupported Django 1.6 from tests to avoid confusion (#1338) by @antonpirker - Group captured warnings under separate issues (#1324) by @mnito - build(changelogs): Use automated changelogs from Craft (#1340) by @BYK - fix(aiohttp): AioHttpIntegration sentry_app_handle() now ignores ConnectionResetError (#1331) by @cmalek - meta: Remove black GH action (#1339) by @sl0thentr0py - feat(flask): Add `sentry_trace()` template helper (#1336) by @BYK ## 1.5.5 - Add session tracking to ASGI integration (#1329) - Pinning test requirements versions (#1330) - Allow classes to short circuit serializer with `sentry_repr` (#1322) - Set default on json.dumps in compute_tracestate_value to ensure string conversion (#1318) Work in this release contributed by @tomchuk. Thank you for your contribution! ## 1.5.4 - Add Python 3.10 to test suite (#1309) - Capture only 5xx HTTP errors in Falcon Integration (#1314) - Attempt custom urlconf resolve in `got_request_exception` as well (#1317) ## 1.5.3 - Pick up custom urlconf set by Django middlewares from request if any (#1308) ## 1.5.2 - Record event_processor client reports #1281 - Add a Quart integration #1248 - Sanic v21.12 support #1292 - Support Celery abstract tasks #1287 Work in this release contributed by @johnzeringue, @pgjones and @ahopkins. Thank you for your contribution! ## 1.5.1 - Fix django legacy url resolver regex substitution due to upstream CVE-2021-44420 fix #1272 - Record lost `sample_rate` events only if tracing is enabled #1268 - Fix gevent version parsing for non-numeric parts #1243 - Record span and breadcrumb when Django opens db connection #1250 ## 1.5.0 - Also record client outcomes for before send #1211 - Add support for implicitly sized envelope items #1229 - Fix integration with Apache Beam 2.32, 2.33 #1233 - Remove Python 2.7 support for AWS Lambda layers in craft config #1241 - Refactor Sanic integration for v21.9 support #1212 - AWS Lambda Python 3.9 runtime support #1239 - Fix "shutdown_timeout" typing #1256 Work in this release contributed by @galuszkak, @kianmeng, @ahopkins, @razumeiko, @tomscytale, and @seedofjoy. Thank you for your contribution! ## 1.4.3 - Turned client reports on by default. ## 1.4.2 - Made envelope modifications in the HTTP transport non observable #1206 ## 1.4.1 - Fix race condition between `finish` and `start_child` in tracing #1203 ## 1.4.0 - No longer set the last event id for transactions #1186 - Added support for client reports (disabled by default for now) #1181 - Added `tracestate` header handling #1179 - Added real ip detection to asgi integration #1199 ## 1.3.1 - Fix detection of contextvars compatibility with Gevent versions >=20.9.0 #1157 ## 1.3.0 - Add support for Sanic versions 20 and 21 #1146 ## 1.2.0 - Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139 - Fix for worker to set daemon attribute instead of deprecated setDaemon method #1093 - Fix for `bottle` Integration that discards `-dev` for version extraction #1085 - Fix for transport that adds a unified hook for capturing metrics about dropped events #1100 - Add `Httpx` Integration #1119 - Add support for china domains in `AWSLambda` Integration #1051 ## 1.1.0 - Fix for `AWSLambda` integration returns value of original handler #1106 - Fix for `RQ` integration that only captures exception if RQ job has failed and ignore retries #1076 - Feature that supports Tracing for the `Tornado` integration #1060 - Feature that supports wild cards in `ignore_logger` in the `Logging` Integration #1053 - Fix for django that deals with template span description names that are either lists or tuples #1054 ## 1.0.0 This release contains a breaking change - **BREAKING CHANGE**: Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed explicitly setting experimental `session_mode` in favor of auto detecting its value, hence enabling release health by default #994 - Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path` - Fix for tracing by getting HTTP headers from span rather than transaction when possible #1035 - Fix for Flask transactions missing request body in non errored transactions #1034 - Fix for honoring the `X-Forwarded-For` header #1037 - Fix for worker that logs data dropping of events with level error #1032 ## 0.20.3 - Added scripts to support auto instrumentation of no code AWS lambda Python functions ## 0.20.2 - Fix incorrect regex in craft to include wheel file in pypi release ## 0.20.1 - Fix for error that occurs with Async Middlewares when the middleware is a function rather than a class ## 0.20.0 - Fix for header extraction for AWS lambda/API extraction - Fix multiple \*\*kwargs type hints # 967 - Fix that corrects AWS lambda integration failure to detect the aws-lambda-ric 1.0 bootstrap #976 - Fix AWSLambda integration: variable "timeout_thread" referenced before assignment #977 - Use full git sha as release name #960 - **BREAKING CHANGE**: The default environment is now production, not based on release - Django integration now creates transaction spans for template rendering - Fix headers not parsed correctly in ASGI middleware, Decode headers before creating transaction #984 - Restored ability to have tracing disabled #991 - Fix Django async views not behaving asynchronously - Performance improvement: supported pre-aggregated sessions ## 0.19.5 - Fix two regressions added in 0.19.2 with regard to sampling behavior when reading the sampling decision from headers. - Increase internal transport queue size and make it configurable. ## 0.19.4 - Fix a bug that would make applications crash if an old version of `boto3` was installed. ## 0.19.3 - Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, Flask, GCP, Pyramid, Tryton, RQ, and WSGI integrations - Fix a bug where the AWS integration would crash if event was anything besides a dictionary - Fix the Django integrations's ASGI handler for Channels 3.0. Thanks Luke Pomfrey! ## 0.19.2 - Add `traces_sampler` option. - The SDK now attempts to infer a default release from various environment variables and the current git repo. - Fix a crash with async views in Django 3.1. - Fix a bug where complex URL patterns in Django would create malformed transaction names. - Add options for transaction styling in AIOHTTP. - Add basic attachment support (documentation tbd). - fix a crash in the `pure_eval` integration. - Integration for creating spans from `boto3`. ## 0.19.1 - Fix dependency check for `blinker` fixes #858 - Fix incorrect timeout warnings in AWS Lambda and GCP integrations #854 ## 0.19.0 - Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default. ## 0.18.0 - **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez! - Added Performance/Tracing support for AWS and GCP functions. - Fix an issue with Django instrumentation where the SDK modified `resolver_match.callback` and broke user code. ## 0.17.8 - Fix yet another bug with disjoint traces in Celery. - Added support for Chalice 1.20. Thanks again to the folks at Cuenca MX! ## 0.17.7 - Internal: Change data category for transaction envelopes. - Fix a bug under Celery 4.2+ that may have caused disjoint traces or missing transactions. ## 0.17.6 - Support for Flask 0.10 (only relaxing version check) ## 0.17.5 - Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation. - Add possibility to wrap ASGI application twice in middleware to enable split up of request scope data and exception catching. ## 0.17.4 - New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX! ## 0.17.3 - Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming. ## 0.17.2 - Fix timezone bugs in GCP integration. ## 0.17.1 - Fix timezone bugs in AWS Lambda integration. - Fix crash on GCP integration because of missing parameter `timeout_warning`. ## 0.17.0 - Fix a bug where class-based callables used as Django views (without using Django's regular class-based views) would not have `csrf_exempt` applied. - New integration for Google Cloud Functions. - Fix a bug where a recently released version of `urllib3` would cause the SDK to enter an infinite loop on networking and SSL errors. - **Breaking change**: Remove the `traceparent_v2` option. The option has been ignored since 0.16.3, just remove it from your code. ## 0.16.5 - Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute. ## 0.16.4 - Add experiment to avoid trunchating span descriptions. Initialize with `init(_experiments={"smart_transaction_trimming": True})`. - Add a span around the Django view in transactions to distinguish its operations from middleware operations. ## 0.16.3 - Fix AWS Lambda support for Python 3.8. - The AWS Lambda integration now captures initialization/import errors for Python 3. - The AWS Lambda integration now supports an option to warn about functions likely to time out. - Testing for RQ 1.5 - Flip default of `traceparent_v2`. This change should have zero impact. The flag will be removed in 0.17. - Fix compatibility bug with Django 3.1. ## 0.16.2 - New (optional) integrations for richer stacktraces: `pure_eval` for additional variables, `executing` for better function names. ## 0.16.1 - Flask integration: Fix a bug that prevented custom tags from being attached to transactions. ## 0.16.0 - Redis integration: add tags for more commands - Redis integration: Patch rediscluster package if installed. - Session tracking: A session is no longer considered crashed if there has been a fatal log message (only unhandled exceptions count). - **Breaking change**: Revamping of the tracing API. - **Breaking change**: `before_send` is no longer called for transactions. ## 0.15.1 - Fix fatal crash in Pyramid integration on 404. ## 0.15.0 - **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations. - Contextvars are now used in more circumstances following a bugfix release of `gevent`. This will fix a few instances of wrong request data being attached to events while using an asyncio-based web framework. - APM: Fix a bug in the SQLAlchemy integration where a span was left open if the database transaction had to be rolled back. This could have led to deeply nested span trees under that db query span. - Fix a bug in the Pyramid integration where the transaction name could not be overridden at all. - Fix a broken type annotation on `capture_exception`. - Basic support for Django 3.1. More work is required for async middlewares to be instrumented properly for APM. ## 0.14.4 - Fix bugs in transport rate limit enforcement for specific data categories. The bug should not have affected anybody because we do not yet emit rate limits for specific event types/data categories. - Fix a bug in `capture_event` where it would crash if given additional kwargs. Thanks to Tatiana Vasilevskaya! - Fix a bug where contextvars from the request handler were inaccessible in AIOHTTP error handlers. - Fix a bug where the Celery integration would crash if newrelic instrumented Celery as well. ## 0.14.3 - Attempt to use a monotonic clock to measure span durations in Performance/APM. - Avoid overwriting explicitly set user data in web framework integrations. - Allow to pass keyword arguments to `capture_event` instead of configuring the scope. - Feature development for session tracking. ## 0.14.2 - Fix a crash in Django Channels instrumentation when SDK is reinitialized. - More contextual data for AWS Lambda (cloudwatch logs link). ## 0.14.1 - Fix a crash in the Django integration when used in combination with Django Rest Framework's test utilities for request. - Fix high memory consumption when sending a lot of errors in the same process. Particularly noticeable in async environments. ## 0.14.0 - Show ASGI request data in Django 3.0 - New integration for the Trytond ERP framework. Thanks n1ngu! ## 0.13.5 - Fix trace continuation bugs in APM. - No longer report `asyncio.CancelledError` as part of AIOHTTP integration. ## 0.13.4 - Fix package classifiers to mark this package as supporting Python 3.8. The SDK supported 3.8 before though. - Update schema sent for transaction events (transaction status). - Fix a bug where `None` inside request data was skipped/omitted. ## 0.13.3 - Fix an issue with the ASGI middleware that would cause Uvicorn to infer the wrong ASGI versions and call the wrapped application with the wrong argument count. - Do not ignore the `tornado.application` logger. - The Redis integration now instruments Redis blaster for breadcrumbs and transaction spans. ## 0.13.2 - Fix a bug in APM that would cause wrong durations to be displayed on non-UTC servers. ## 0.13.1 - Add new global functions for setting scope/context data. - Fix a bug that would make Django 1.11+ apps crash when using function-based middleware. ## 0.13.0 - Remove an old deprecation warning (behavior itself already changed since a long time). - The AIOHTTP integration now attaches the request body to crash reports. Thanks to Vitali Rebkavets! - Add an experimental PySpark integration. - First release to be tested under Python 3.8. No code changes were necessary though, so previous releases also might have worked. ## 0.12.3 - Various performance improvements to event sending. - Avoid crashes when scope or hub is racy. - Revert a change that broke applications using gevent and channels (in the same virtualenv, but different processes). - Fix a bug that made the SDK crash on unicode in SQL. ## 0.12.2 - Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets. ## 0.12.1 - Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues. ## 0.12.0 - Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions. - Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time. - APM: Add spans for more methods on `subprocess.Popen` objects. - APM: Add spans for Django middlewares. - APM: Add spans for ASGI requests. - Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.** ## 0.11.2 - Fix a bug where the SDK would throw an exception on shutdown when running under eventlet. - Add missing data to Redis breadcrumbs. ## 0.11.1 - Remove a faulty assertion (observed in environment with Django Channels and ASGI). ## 0.11.0 - Fix type hints for the logging integration. Thanks Steven Dignam! - Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! - Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! - Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. - More instrumentation for APM. - New integration for SQLAlchemy (creates breadcrumbs from queries). - New (experimental) integration for Apache Beam. - Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone. - The `AiohttpIntegration` now sets the event's transaction name. - Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events. ## 0.10.2 - Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash. - Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels. - Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration. - Fix a memory leak in the new tracing feature when it is not enabled. ## 0.10.1 - Fix bug where the SDK would yield a deprecation warning about `collections.abc` vs `collections`. - Fix bug in stdlib integration that would cause spawned subprocesses to not inherit the environment variables from the parent process. ## 0.10.0 - Massive refactor in preparation to tracing. There are no intentional breaking changes, but there is a risk of breakage (hence the minor version bump). Two new client options `traces_sample_rate` and `traceparent_v2` have been added. Do not change the defaults in production, they will bring your application down or at least fill your Sentry project up with nonsense events. ## 0.9.5 - Do not use `getargspec` on Python 3 to evade deprecation warning. ## 0.9.4 - Revert a change in 0.9.3 that prevented passing a `unicode` string as DSN to `init()`. ## 0.9.3 - Add type hints for `init()`. - Include user agent header when sending events. ## 0.9.2 - Fix a bug in the Django integration that would prevent the user from initializing the SDK at the top of `settings.py`. This bug was introduced in 0.9.1 for all Django versions, but has been there for much longer for Django 1.6 in particular. ## 0.9.1 - Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to leak event data between requests. - Fix a bug where the GNU backtrace integration would not parse certain frames. - Fix a bug where the SDK would not pick up request bodies for Django Rest Framework based apps. - Remove a few more headers containing sensitive data per default. - Various improvements to type hints. Thanks Ran Benita! - Add a event hint to access the log record from `before_send`. - Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican! - Fix distribution information for mypy support (add `py.typed` file). Thanks Ran Benita! ## 0.9.0 - The SDK now captures `SystemExit` and other `BaseException`s when coming from within a WSGI app (Flask, Django, ...) - Pyramid: No longer report an exception if there exists an exception view for it. ## 0.8.1 - Fix infinite recursion bug in Celery integration. ## 0.8.0 - Add the always_run option in excepthook integration. - Fix performance issues when attaching large data to events. This is not really intended to be a breaking change, but this release does include a rewrite of a larger chunk of code, therefore the minor version bump. ## 0.7.14 - Fix crash when using Celery integration (`TypeError` when using `apply_async`). ## 0.7.13 - Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry. - Add experimental support for tracing PoC. ## 0.7.12 - Read from `X-Real-IP` for user IP address. - Fix a bug that would not apply in-app rules for attached callstacks. - It's now possible to disable automatic proxy support by passing `http_proxy=""`. Thanks Marco Neumann! ## 0.7.11 - Fix a bug that would send `errno` in an invalid format to the server. - Fix import-time crash when running Python with `-O` flag. - Fix a bug that would prevent the logging integration from attaching `extra` keys called `data`. - Fix order in which exception chains are reported to match Raven behavior. - New integration for the Falcon web framework. Thanks to Jacob Magnusson! ## 0.7.10 - Add more event trimming. - Log Sentry's response body in debug mode. - Fix a few bad typehints causing issues in IDEs. - Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. redirects) as errors. - Fix a bug that would prevent use of `in_app_exclude` without setting `in_app_include`. - Fix a bug where request bodies of Django Rest Framework apps were not captured. - Suppress errors during SQL breadcrumb capturing in Django integration. Also change order in which formatting strategies are tried. ## 0.7.9 - New integration for the Bottle web framework. Thanks to Stepan Henek! - Self-protect against broken mapping implementations and other broken reprs instead of dropping all local vars from a stacktrace. Thanks to Marco Neumann! ## 0.7.8 - Add support for Sanic versions 18 and 19. - Fix a bug that causes an SDK crash when using composed SQL from psycopg2. ## 0.7.7 - Fix a bug that would not capture request bodies if they were empty JSON arrays, objects or strings. - New GNU backtrace integration parses stacktraces from exception messages and appends them to existing stacktrace. - Capture Tornado formdata. - Support Python 3.6 in Sanic and AIOHTTP integration. - Clear breadcrumbs before starting a new request. - Fix a bug in the Celery integration that would drop pending events during worker shutdown (particularly an issue when running with `max_tasks_per_child = 1`) - Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the WSGI environment or other data that we're also trying to serialize at the same time. ## 0.7.6 - Fix a bug where artificial frames for Django templates would not be marked as in-app and would always appear as the innermost frame. Implement a heuristic to show template frame closer to `render` or `parse` invocation. ## 0.7.5 - Fix bug into Tornado integration that would send broken cookies to the server. - Fix a bug in the logging integration that would ignore the client option `with_locals`. ## 0.7.4 - Read release and environment from process environment like the Raven SDK does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`. - Fix a bug in the `serverless` integration where it would not push a new scope for each function call (leaking tags and other things across calls). - Experimental support for type hints. ## 0.7.3 - Fix crash in AIOHTTP integration when integration was set up but disabled. - Flask integration now adds usernames, email addresses based on the protocol Flask-User defines on top of Flask-Login. - New threading integration catches exceptions from crashing threads. - New method `flush` on hubs and clients. New global `flush` function. - Add decorator for serverless functions to fix common problems in those environments. - Fix a bug in the logging integration where using explicit handlers required enabling the integration. ## 0.7.2 - Fix `celery.exceptions.Retry` spamming in Celery integration. ## 0.7.1 - Fix `UnboundLocalError` crash in Celery integration. ## 0.7.0 - Properly display chained exceptions (PEP-3134). - Rewrite celery integration to monkeypatch instead of using signals due to bugs in Celery 3's signal handling. The Celery scope is also now available in prerun and postrun signals. - Fix Tornado integration to work with Tornado 6. - Do not evaluate Django `QuerySet` when trying to capture local variables. Also an internal hook was added to overwrite `repr` for local vars. ## 0.6.9 - Second attempt at fixing the bug that was supposed to be fixed in 0.6.8. > No longer access arbitrary sequences in local vars due to possible side effects. ## 0.6.8 - No longer access arbitrary sequences in local vars due to possible side effects. ## 0.6.7 - Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were. - Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime. - Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors. - Fix a bug where a crashing `before_send` would crash the SDK and app. - Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK. ## 0.6.6 - Un-break API of internal `Auth` object that we use in Sentry itself. ## 0.6.5 - Capture WSGI request data eagerly to save memory and avoid issues with uWSGI. - Ability to use subpaths in DSN. - Ignore `django.request` logger. ## 0.6.4 - Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps. ## 0.6.3 - New integration for Tornado - Fix request data in Django, Flask and other WSGI frameworks leaking between events. - Fix infinite recursion when sending more events in `before_send`. ## 0.6.2 - Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa. ## 0.6.1 - New integration for aiohttp-server. - Fix crash when reading hostname in broken WSGI environments. ## 0.6.0 - Fix bug where a 429 without Retry-After would not be honored. - Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic. - A WSGI middleware is now available for catching errors and adding context about the current request to them. - Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available. - The Python 3.7 runtime for AWS Lambda is now supported. - Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded. - Logging an exception will no longer add the exception as breadcrumb to the exception's own event. ## 0.5.5 - New client option `ca_certs`. - Fix crash with Django and psycopg2. ## 0.5.4 - Fix deprecation warning in relation to the `collections` stdlib module. - Fix bug that would crash Django and Flask when streaming responses are failing halfway through. ## 0.5.3 - Fix bug where using `push_scope` with a callback would not pop the scope. - Fix crash when initializing the SDK in `push_scope`. - Fix bug where IP addresses were sent when `send_default_pii=False`. ## 0.5.2 - Fix bug where events sent through the RQ integration were sometimes lost. - Remove a deprecation warning about usage of `logger.warn`. - Fix bug where large frame local variables would lead to the event being rejected by Sentry. ## 0.5.1 - Integration for Redis Queue (RQ) ## 0.5.0 - Fix a bug that would omit several debug logs during SDK initialization. - Fix issue that sent a event key `""` Sentry wouldn't understand. - **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other. - Fix a bug in the Sanic integration that would report the exception behind any HTTP error code. - Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`. - Additional attributes on log records are now put into `extra`. - Integration for Pyramid. - `sys.argv` is put into extra automatically. ## 0.4.3 - Fix a bug that would leak WSGI responses. ## 0.4.2 - Fix a bug in the Sanic integration that would leak data between requests. - Fix a bug that would hide all debug logging happening inside of the built-in transport. - Fix a bug that would report errors for typos in Django's shell. ## 0.4.1 - Fix bug that would only show filenames in stacktraces but not the parent directories. ## 0.4.0 - Changed how integrations are initialized. Integrations are now configured and enabled per-client. ## 0.3.11 - Fix issue with certain deployment tools and the AWS Lambda integration. ## 0.3.10 - Set transactions for Django like in Raven. Which transaction behavior is used can be configured. - Fix a bug which would omit frame local variables from stacktraces in Celery. - New option: `attach_stacktrace` ## 0.3.9 - Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions. ## 0.3.8 - Nicer log level for internal errors. ## 0.3.7 - Remove `repos` configuration option. There was never a way to make use of this feature. - Fix a bug in `last_event_id`. - Add Django SQL queries to breadcrumbs. - Django integration won't set user attributes if they were already set. - Report correct SDK version to Sentry. ## 0.3.6 - Integration for Sanic ## 0.3.5 - Integration for AWS Lambda - Fix mojibake when encoding local variable values ## 0.3.4 - Performance improvement when storing breadcrumbs ## 0.3.3 - Fix crash when breadcrumbs had to be trunchated ## 0.3.2 - Fixed an issue where some paths where not properly sent as absolute paths sentry-python-1.39.2/CONTRIBUTING-aws-lambda.md000066400000000000000000000022211454744723200207210ustar00rootroot00000000000000# Contributing to Sentry AWS Lambda Layer All the general terms of the [CONTRIBUTING.md](CONTRIBUTING.md) apply. ## Development environment You need to have a AWS account and AWS CLI installed and setup. We put together two helper functions that can help you with development: - `./scripts/aws-deploy-local-layer.sh` This script [scripts/aws-deploy-local-layer.sh](scripts/aws-deploy-local-layer.sh) will take the code you have checked out locally, create a Lambda layer out of it and deploy it to the `eu-central-1` region of your configured AWS account using `aws` CLI. The Lambda layer will have the name `SentryPythonServerlessSDK-local-dev` - `./scripts/aws-attach-layer-to-lambda-function.sh` You can use this script [scripts/aws-attach-layer-to-lambda-function.sh](scripts/aws-attach-layer-to-lambda-function.sh) to attach the Lambda layer you just deployed (using the first script) onto one of your existing Lambda functions. You will have to give the name of the Lambda function to attach onto as an argument. (See the script for details.) With this two helper scripts it should be easy to rapidly iterate your development on the Lambda layer. sentry-python-1.39.2/CONTRIBUTING.md000066400000000000000000000233571454744723200167300ustar00rootroot00000000000000# Contributing to Sentry SDK for Python We welcome contributions to `sentry-python` by the community. This file outlines the process to contribute to the SDK itself. For contributing to the documentation, please see the [Contributing to Docs](https://docs.sentry.io/contributing/) page. ## How to Report a Problem Please search the [issue tracker](https://github.com/getsentry/sentry-python/issues) before creating a new issue (a problem or an improvement request). Please also ask in our [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr) before submitting a new issue. There are a ton of great people in our Discord community ready to help you! ## Submitting Changes - Fork the `sentry-python` repo and prepare your changes. - Add tests for your changes to `tests/`. - Run tests and make sure all of them pass. - Submit a pull request, referencing any issues your changes address. Please follow our [commit message format](https://develop.sentry.dev/commit-messages/#commit-message-format) when naming your pull request. We will review your pull request as soon as possible. Thank you for contributing! ## Development Environment ### Set up Python Make sure that you have Python 3 installed. Version 3.7 or higher is required to run style checkers on pre-commit. On macOS, we recommend using `brew` to install Python. For Windows, we recommend an official [python.org](https://www.python.org/downloads/) release. ### Fork and Clone the Repo Before you can contribute, you will need to [fork the `sentry-python` repository](https://github.com/getsentry/sentry-python/fork). Then, clone the forked repository to your local development environment. ### Create a Virtual Environment To keep your Python development environment and packages separate from the ones used by your operation system, create a [virtual environment](https://docs.python.org/3/tutorial/venv.html): ```bash cd sentry-python python -m venv .venv ``` Then, activate your virtual environment with the following command. You will need to repeat this step every time you wish to work on your changes for `sentry-python`. ```bash source .venv/bin/activate ``` ### Install `sentry-python` in Editable Mode Install `sentry-python` in [editable mode](https://pip.pypa.io/en/latest/topics/local-project-installs/#editable-installs). This will make any changes you make to the SDK code locally immediately effective without you having to reinstall or copy anything. ```bash pip install -e . ``` **Hint:** Sometimes you need a sample project to run your new changes to `sentry-python`. In this case install the sample project in the same virtualenv and you should be good to go. ### Install Coding Style Pre-commit Hooks This will make sure that your commits will have the correct coding style. ```bash cd sentry-python pip install -r linter-requirements.txt pip install pre-commit pre-commit install ``` That's it. You should be ready to make changes, run tests, and make commits! If you experience any problems, please don't hesitate to ping us in our [Discord Community](https://discord.com/invite/Ww9hbqr). ## Running Tests To run the tests, first setup your development environment according to the instructions above. Then, install the required packages for running tests with the following command: ```bash pip install -r test-requirements.txt ``` Once the requirements are installed, you can run all tests with the following command: ```bash pytest tests/ ``` If you would like to run the tests for a specific integration, use a command similar to the one below: ```bash pytest -rs tests/integrations/flask/ # Replace "flask" with the specific integration you wish to test ``` **Hint:** Tests of integrations need additional dependencies. The switch `-rs` will show you why tests were skipped and what dependencies you need to install for the tests to run. (You can also consult the [tox.ini](tox.ini) file to see what dependencies are installed for each integration) ## Adding a New Integration 1. Write the integration. - Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. - Everybody monkeypatches. That means: - Make sure to think about conflicts with other monkeypatches when monkeypatching. - You don't need to feel bad about it. - Make sure your changes don't break end user contracts. The SDK should never alter the expected behavior of the underlying library or framework from the user's perspective and it shouldn't have any side effects. - Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. - Allow the user to turn off the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). 2. Write tests. - Consider the minimum versions supported, and test each version in a separate env in `tox.ini`. - Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. 3. Update package metadata. - We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. Do not set upper bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. 4. Write the [docs](https://github.com/getsentry/sentry-docs). Follow the structure of [existing integration docs](https://docs.sentry.io/platforms/python/integrations/). And, please **make sure to add your integration to the table in `python/integrations/index.md`** (people often forget this step 🙂). 5. Merge docs after new version has been released. The docs are built and deployed after each merge, so your changes should go live in a few minutes. 6. (optional, if possible) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. This step will only apply to some integrations. ## Releasing a New Version _(only relevant for Sentry employees)_ ### Prerequisites - All the changes that should be released must be on the `master` branch. - Every commit should follow the [Commit Message Format](https://develop.sentry.dev/commit-messages/#commit-message-format) convention. - CHANGELOG.md is updated automatically. No human intervention is necessary, but you might want to consider polishing the changelog by hand to make it more user friendly by grouping related things together, adding small code snippets and links to docs, etc. ### Manual Process - On GitHub in the `sentry-python` repository, go to "Actions" and select the "Release" workflow. - Click on "Run workflow" on the right side, and make sure the `master` branch is selected. - Set the "Version to release" input field. Here you decide if it is a major, minor or patch release. (See "Versioning Policy" below) - Click "Run Workflow". This will trigger [Craft](https://github.com/getsentry/craft) to prepare everything needed for a release. (For more information, see [craft prepare](https://github.com/getsentry/craft#craft-prepare-preparing-a-new-release).) At the end of this process a release issue is created in the [Publish](https://github.com/getsentry/publish) repository. (Example release issue: https://github.com/getsentry/publish/issues/815) Now one of the persons with release privileges (most probably your engineering manager) will review this issue and then add the `accepted` label to the issue. There are always two persons involved in a release. If you are in a hurry and the release should be out immediately, there is a Slack channel called `#proj-release-approval` where you can see your release issue and where you can ping people to please have a look immediately. When the release issue is labeled `accepted`, [Craft](https://github.com/getsentry/craft) is triggered again to publish the release to all the right platforms. (See [craft publish](https://github.com/getsentry/craft#craft-publish-publishing-the-release) for more information.) At the end of this process the release issue on GitHub will be closed and the release is completed! Congratulations! There is a sequence diagram visualizing all this in the [README.md](https://github.com/getsentry/publish) of the `Publish` repository. ### Versioning Policy This project follows [semver](https://semver.org/), with three additions: - Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. - All undocumented APIs are considered internal. They are not part of this contract. - Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. We recommend to pin your version requirements against `1.x.*` or `1.x.y`. Either one of the following is fine: ``` sentry-sdk>=1.0.0,<2.0.0 sentry-sdk==1.5.0 ``` A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. sentry-python-1.39.2/LICENSE000066400000000000000000000021051454744723200154700ustar00rootroot00000000000000MIT License Copyright (c) 2018 Functional Software, Inc. dba Sentry Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. sentry-python-1.39.2/MANIFEST.in000066400000000000000000000000541454744723200162220ustar00rootroot00000000000000include LICENSE include sentry_sdk/py.typed sentry-python-1.39.2/Makefile000066400000000000000000000033031454744723200161240ustar00rootroot00000000000000SHELL = /bin/bash VENV_PATH = .venv help: @echo "Thanks for your interest in the Sentry Python SDK!" @echo @echo "make lint: Run linters" @echo "make test: Run basic tests (not testing most integrations)" @echo "make test-all: Run ALL tests (slow, closest to CI)" @echo "make format: Run code formatters (destructive)" @echo "make aws-lambda-layer: Build AWS Lambda layer directory for serverless integration" @echo @echo "Also make sure to read ./CONTRIBUTING.md" @false .venv: virtualenv -ppython3 $(VENV_PATH) $(VENV_PATH)/bin/pip install tox dist: .venv rm -rf dist dist-serverless build $(VENV_PATH)/bin/pip install wheel setuptools $(VENV_PATH)/bin/python setup.py sdist bdist_wheel .PHONY: dist format: .venv $(VENV_PATH)/bin/tox -e linters --notest .tox/linters/bin/black . .PHONY: format test: .venv @$(VENV_PATH)/bin/tox -e py3.9 .PHONY: test test-all: .venv @TOXPATH=$(VENV_PATH)/bin/tox sh ./scripts/runtox.sh .PHONY: test-all check: lint test .PHONY: check lint: .venv @set -e && $(VENV_PATH)/bin/tox -e linters || ( \ echo "================================"; \ echo "Bad formatting? Run: make format"; \ echo "================================"; \ false) .PHONY: lint apidocs: .venv @$(VENV_PATH)/bin/pip install --editable . @$(VENV_PATH)/bin/pip install -U -r ./docs-requirements.txt @$(VENV_PATH)/bin/sphinx-build -vv -W -b html docs/ docs/_build .PHONY: apidocs apidocs-hotfix: apidocs @$(VENV_PATH)/bin/pip install ghp-import @$(VENV_PATH)/bin/ghp-import -pf docs/_build .PHONY: apidocs-hotfix aws-lambda-layer: dist $(VENV_PATH)/bin/pip install -r aws-lambda-layer-requirements.txt $(VENV_PATH)/bin/python -m scripts.build_aws_lambda_layer .PHONY: aws-lambda-layer sentry-python-1.39.2/README.md000066400000000000000000000115561454744723200157540ustar00rootroot00000000000000

Sentry

_Bad software is everywhere, and we're tired of it. Sentry is on a mission to help developers write better software faster, so we can get back to enjoying technology. If you want to join us [**Check out our open positions**](https://sentry.io/careers/)_ # Official Sentry SDK for Python [![Build Status](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml/badge.svg)](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml) [![PyPi page link -- version](https://img.shields.io/pypi/v/sentry-sdk.svg)](https://pypi.python.org/pypi/sentry-sdk) [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/cWnMQeA) This is the official Python SDK for [Sentry](http://sentry.io/) --- ## Getting Started ### Install ```bash pip install --upgrade sentry-sdk ``` ### Configuration ```python import sentry_sdk sentry_sdk.init( "https://12927b5f211046b575ee51fd8b1ac34f@o1.ingest.sentry.io/1", # Set traces_sample_rate to 1.0 to capture 100% # of transactions for performance monitoring. traces_sample_rate=1.0, ) ``` ### Usage ```python from sentry_sdk import capture_message capture_message("Hello World") # Will create an event in Sentry. raise ValueError() # Will also create an event in Sentry. ``` - To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/). - Are you coming from `raven-python`? [Use this migration guide](https://docs.sentry.io/platforms/python/migration/). - To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/). ## Integrations (If you want to create a new integration, have a look at the [Adding a new integration checklist](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md#adding-a-new-integration).) See [the documentation](https://docs.sentry.io/platforms/python/integrations/) for an up-to-date list of libraries and frameworks we support. Here are some examples: - [Django](https://docs.sentry.io/platforms/python/integrations/django/) - [Flask](https://docs.sentry.io/platforms/python/integrations/flask/) - [FastAPI](https://docs.sentry.io/platforms/python/integrations/fastapi/) - [AIOHTTP](https://docs.sentry.io/platforms/python/integrations/aiohttp/) - [SQLAlchemy](https://docs.sentry.io/platforms/python/integrations/sqlalchemy/) - [asyncpg](https://docs.sentry.io/platforms/python/integrations/asyncpg/) - [Redis](https://docs.sentry.io/platforms/python/integrations/redis/) - [Celery](https://docs.sentry.io/platforms/python/integrations/celery/) - [Apache Airflow](https://docs.sentry.io/platforms/python/integrations/airflow/) - [Apache Spark](https://docs.sentry.io/platforms/python/integrations/pyspark/) - [asyncio](https://docs.sentry.io/platforms/python/integrations/asyncio/) - [Graphene](https://docs.sentry.io/platforms/python/integrations/graphene/) - [Logging](https://docs.sentry.io/platforms/python/integrations/logging/) - [Loguru](https://docs.sentry.io/platforms/python/integrations/loguru/) - [HTTPX](https://docs.sentry.io/platforms/python/integrations/httpx/) - [AWS Lambda](https://docs.sentry.io/platforms/python/integrations/aws-lambda/) - [Google Cloud Functions](https://docs.sentry.io/platforms/python/integrations/gcp-functions/) ## Migrating From `raven-python` The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). If you're using `raven-python`, we recommend you to migrate to this new SDK. You can find the benefits of migrating and how to do it in our [migration guide](https://docs.sentry.io/platforms/python/migration/). ## Contributing to the SDK Please refer to [CONTRIBUTING.md](CONTRIBUTING.md). ## Getting Help/Support If you need help setting up or configuring the Python SDK (or anything else in the Sentry universe) please head over to the [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr). There is a ton of great people in our Discord community ready to help you! ## Resources - [![Documentation](https://img.shields.io/badge/documentation-sentry.io-green.svg)](https://docs.sentry.io/quickstart/) - [![Forum](https://img.shields.io/badge/forum-sentry-green.svg)](https://forum.sentry.io/c/sdks) - [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/Ww9hbqr) - [![Stack Overflow](https://img.shields.io/badge/stack%20overflow-sentry-green.svg)](http://stackoverflow.com/questions/tagged/sentry) - [![Twitter Follow](https://img.shields.io/twitter/follow/getsentry?label=getsentry&style=social)](https://twitter.com/intent/follow?screen_name=getsentry) ## License Licensed under the MIT license, see [`LICENSE`](LICENSE) sentry-python-1.39.2/aws-lambda-layer-requirements.txt000066400000000000000000000003361454744723200230730ustar00rootroot00000000000000certifi # In Lambda functions botocore is used, and botocore is not # yet supporting urllib3 1.27.0 never mind 2+. # So we pin this here to make our Lambda layer work with # Lambda Function using Python 3.7+ urllib3<1.27 sentry-python-1.39.2/checkouts/000077500000000000000000000000001454744723200164555ustar00rootroot00000000000000sentry-python-1.39.2/checkouts/data-schemas/000077500000000000000000000000001454744723200210075ustar00rootroot00000000000000sentry-python-1.39.2/codecov.yml000066400000000000000000000004551454744723200166360ustar00rootroot00000000000000comment: false coverage: status: project: default: target: auto # auto compares coverage to the previous base commit threshold: 10% # this allows a 10% drop from the previous base commit coverage informational: true ignore: - "tests" - "sentry_sdk/_types.py" sentry-python-1.39.2/docs-requirements.txt000066400000000000000000000001271454744723200206770ustar00rootroot00000000000000shibuya sphinx==7.2.6 sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions sentry-python-1.39.2/docs/000077500000000000000000000000001454744723200154155ustar00rootroot00000000000000sentry-python-1.39.2/docs/.gitignore000066400000000000000000000000071454744723200174020ustar00rootroot00000000000000_build sentry-python-1.39.2/docs/_static/000077500000000000000000000000001454744723200170435ustar00rootroot00000000000000sentry-python-1.39.2/docs/_static/.gitkeep000066400000000000000000000000001454744723200204620ustar00rootroot00000000000000sentry-python-1.39.2/docs/api.rst000066400000000000000000000024421454744723200167220ustar00rootroot00000000000000============= Top Level API ============= This is the user facing API of the SDK. It's exposed as ``sentry_sdk``. With this API you can implement a custom performance monitoring or error reporting solution. Capturing Data ============== .. autofunction:: sentry_sdk.api.capture_event .. autofunction:: sentry_sdk.api.capture_exception .. autofunction:: sentry_sdk.api.capture_message Enriching Events ================ .. autofunction:: sentry_sdk.api.add_breadcrumb .. autofunction:: sentry_sdk.api.set_context .. autofunction:: sentry_sdk.api.set_extra .. autofunction:: sentry_sdk.api.set_level .. autofunction:: sentry_sdk.api.set_tag .. autofunction:: sentry_sdk.api.set_user Performance Monitoring ====================== .. autofunction:: sentry_sdk.api.continue_trace .. autofunction:: sentry_sdk.api.get_current_span .. autofunction:: sentry_sdk.api.start_span .. autofunction:: sentry_sdk.api.start_transaction Distributed Tracing =================== .. autofunction:: sentry_sdk.api.get_baggage .. autofunction:: sentry_sdk.api.get_traceparent Managing Scope (advanced) ========================= .. autofunction:: sentry_sdk.api.configure_scope .. autofunction:: sentry_sdk.api.push_scope .. Not documented (On purpose. Not sure if anyone should use those) .. last_event_id() .. flush() sentry-python-1.39.2/docs/apidocs.rst000066400000000000000000000014311454744723200175700ustar00rootroot00000000000000======== API Docs ======== .. autoclass:: sentry_sdk.Hub :members: .. autoclass:: sentry_sdk.Scope :members: .. autoclass:: sentry_sdk.Client :members: .. autoclass:: sentry_sdk.Transport :members: .. autoclass:: sentry_sdk.HttpTransport :members: .. autoclass:: sentry_sdk.tracing.Transaction :members: .. autoclass:: sentry_sdk.tracing.Span :members: .. autoclass:: sentry_sdk.profiler.Profile :members: .. autoclass:: sentry_sdk.session.Session :members: .. autoclass:: sentry_sdk.attachments.Attachment :members: .. autoclass:: sentry_sdk.scrubber.EventScrubber :members: .. autoclass:: sentry_sdk.monitor.Monitor :members: .. autoclass:: sentry_sdk.envelope.Envelope :members: .. autoclass:: sentry_sdk.envelope.Item :members: sentry-python-1.39.2/docs/conf.py000066400000000000000000000130061454744723200167140ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import sys import typing from datetime import datetime # prevent circular imports import sphinx.builders.html import sphinx.builders.latex import sphinx.builders.texinfo import sphinx.builders.text import sphinx.ext.autodoc # noqa: F401 import urllib3.exceptions # noqa: F401 typing.TYPE_CHECKING = True # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config sys.path.insert(0, os.path.abspath("..")) # -- Project information ----------------------------------------------------- project = "sentry-python" copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" release = "1.39.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx_autodoc_typehints", "sphinx.ext.viewcode", "sphinx.ext.githubpages", "sphinx.ext.intersphinx", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The master toctree document. master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # on_rtd = os.environ.get("READTHEDOCS", None) == "True" html_theme = "shibuya" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { "github_url": "https://github.com/getsentry/sentry-python", } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = "sentry-pythondoc" # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, "sentry-python.tex", "sentry-python Documentation", "Sentry Team and Contributors", "manual", ) ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "sentry-python", "sentry-python Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "sentry-python", "sentry-python Documentation", author, "sentry-python", "The official Sentry SDK for Python.", "Miscellaneous", ) ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} sentry-python-1.39.2/docs/index.rst000066400000000000000000000005701454744723200172600ustar00rootroot00000000000000===================================== sentry-python - Sentry SDK for Python ===================================== This is the API documentation for `Sentry's Python SDK `_. For full documentation and other resources visit the `GitHub repository `_. .. toctree:: api integrations apidocs sentry-python-1.39.2/docs/integrations.rst000066400000000000000000000003121454744723200206510ustar00rootroot00000000000000============ Integrations ============ TBD Logging ======= .. module:: sentry_sdk.integrations.logging .. autofunction:: ignore_logger .. autoclass:: EventHandler .. autoclass:: BreadcrumbHandler sentry-python-1.39.2/linter-requirements.txt000066400000000000000000000005661454744723200212530ustar00rootroot00000000000000mypy black flake8==5.0.4 # flake8 depends on pyflakes>=3.0.0 and this dropped support for Python 2 "# type:" comments types-certifi types-protobuf==4.24.0.4 # newer raises an error on mypy sentry_sdk types-redis types-setuptools pymongo # There is no separate types module. loguru # There is no separate types module. flake8-bugbear pep8-naming pre-commit # local linting sentry-python-1.39.2/mypy.ini000066400000000000000000000034761454744723200161760ustar00rootroot00000000000000[mypy] python_version = 3.11 allow_redefinition = True check_untyped_defs = True ; disallow_any_decorated = True ; disallow_any_explicit = True ; disallow_any_expr = True disallow_any_generics = True ; disallow_any_unimported = True disallow_incomplete_defs = True disallow_subclassing_any = True ; disallow_untyped_calls = True disallow_untyped_decorators = True disallow_untyped_defs = True no_implicit_optional = True strict_equality = True strict_optional = True warn_redundant_casts = True ; warn_return_any = True warn_unused_configs = True warn_unused_ignores = True ; Relaxations for code written before mypy was introduced ; ; Do not use wildcards in module paths, otherwise added modules will ; automatically have the same set of relaxed rules as the rest [mypy-django.*] ignore_missing_imports = True [mypy-pyramid.*] ignore_missing_imports = True [mypy-psycopg2.*] ignore_missing_imports = True [mypy-pytest.*] ignore_missing_imports = True [mypy-aiohttp.*] ignore_missing_imports = True [mypy-sanic.*] ignore_missing_imports = True [mypy-tornado.*] ignore_missing_imports = True [mypy-fakeredis.*] ignore_missing_imports = True [mypy-rq.*] ignore_missing_imports = True [mypy-pyspark.*] ignore_missing_imports = True [mypy-asgiref.*] ignore_missing_imports = True [mypy-executing.*] ignore_missing_imports = True [mypy-asttokens.*] ignore_missing_imports = True [mypy-pure_eval.*] ignore_missing_imports = True [mypy-blinker.*] ignore_missing_imports = True [mypy-sentry_sdk._queue] ignore_missing_imports = True disallow_untyped_defs = False [mypy-sentry_sdk._lru_cache] disallow_untyped_defs = False [mypy-celery.app.trace] ignore_missing_imports = True [mypy-flask.signals] ignore_missing_imports = True [mypy-huey.*] ignore_missing_imports = True [mypy-arq.*] ignore_missing_imports = True [mypy-grpc.*] ignore_missing_imports = True sentry-python-1.39.2/pyproject.toml000066400000000000000000000006121454744723200174000ustar00rootroot00000000000000[tool.black] # 'extend-exclude' excludes files or directories in addition to the defaults extend-exclude = ''' # A regex preceded with ^/ will apply only to files and directories # in the root of the project. ( .*_pb2.py # exclude autogenerated Protocol Buffer files anywhere in the project | .*_pb2_grpc.py # exclude autogenerated Protocol Buffer files anywhere in the project ) ''' sentry-python-1.39.2/pytest.ini000066400000000000000000000010071454744723200165140ustar00rootroot00000000000000[pytest] DJANGO_SETTINGS_MODULE = tests.integrations.django.myapp.settings addopts = --tb=short markers = tests_internal_exceptions: Handle internal exceptions just as the SDK does, to test it. (Otherwise internal exceptions are recorded and reraised.) only: A temporary marker, to make pytest only run the tests with the mark, similar to jests `it.only`. To use, run `pytest -v -m only`. asyncio_mode = strict [pytest-watch] ; Enable this to drop into pdb on errors ; pdb = True verbose = True nobeep = True sentry-python-1.39.2/scripts/000077500000000000000000000000001454744723200161545ustar00rootroot00000000000000sentry-python-1.39.2/scripts/aws-attach-layer-to-lambda-function.sh000077500000000000000000000017521454744723200253470ustar00rootroot00000000000000#!/usr/bin/env bash # # Attaches the layer `SentryPythonServerlessSDK-local-dev` to a given lambda function. # set -euo pipefail # Check for argument if [ $# -eq 0 ] then SCRIPT_NAME=$(basename "$0") echo "ERROR: No argument supplied. Please give the name of a Lambda function!" echo "" echo "Usage: $SCRIPT_NAME " echo "" exit 1 fi FUNCTION_NAME=$1 echo "Getting ARN of newest Sentry lambda layer..." LAYER_ARN=$(aws lambda list-layer-versions --layer-name SentryPythonServerlessSDK-local-dev --query "LayerVersions[0].LayerVersionArn" | tr -d '"') echo "Done getting ARN of newest Sentry lambda layer $LAYER_ARN." echo "Attaching Lamba layer to function $FUNCTION_NAME..." echo "Warning: This remove all other layers!" aws lambda update-function-configuration \ --function-name "$FUNCTION_NAME" \ --layers "$LAYER_ARN" \ --no-cli-pager echo "Done attaching Lamba layer to function '$FUNCTION_NAME'." echo "All done. Have a nice day!" sentry-python-1.39.2/scripts/aws-cleanup.sh000077500000000000000000000012011454744723200207240ustar00rootroot00000000000000#!/bin/sh # # Helper script to clean up AWS Lambda functions created # by the test suite (tests/integrations/aws_lambda/test_aws.py). # # This will delete all Lambda functions named `test_function_*`. # export AWS_DEFAULT_REGION="us-east-1" export AWS_ACCESS_KEY_ID="$SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID" export AWS_SECRET_ACCESS_KEY="$SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY" for func in $(aws lambda list-functions --output text --query 'Functions[?starts_with(FunctionName, `test_`) == `true`].FunctionName'); do echo "Deleting $func" aws lambda delete-function --function-name "$func" done echo "All done! Have a nice day!" sentry-python-1.39.2/scripts/aws-delete-lamba-layer-versions.sh000077500000000000000000000007201454744723200245760ustar00rootroot00000000000000#!/usr/bin/env bash # # Deletes all versions of the layer specified in LAYER_NAME in one region. # set -euo pipefail # override default AWS region export AWS_REGION=eu-central-1 LAYER_NAME=SentryPythonServerlessSDK-local-dev VERSION="0" while [[ $VERSION != "1" ]] do VERSION=$(aws lambda list-layer-versions --layer-name $LAYER_NAME | jq '.LayerVersions[0].Version') aws lambda delete-layer-version --layer-name $LAYER_NAME --version-number $VERSION done sentry-python-1.39.2/scripts/aws-deploy-local-layer.sh000077500000000000000000000020511454744723200227770ustar00rootroot00000000000000#!/usr/bin/env bash # # Builds and deploys the Sentry AWS Lambda layer (including the Sentry SDK and the Sentry Lambda Extension) # # The currently checked out version of the SDK in your local directory is used. # The latest version of the Lambda Extension is fetched from the Sentry Release Registry. # set -euo pipefail # Creating Lambda layer echo "Creating Lambda layer in ./dist ..." make aws-lambda-layer echo "Done creating Lambda layer in ./dist" # Deploying zipped Lambda layer to AWS ZIP=$(ls dist | grep serverless | head -n 1) echo "Deploying zipped Lambda layer $ZIP to AWS..." aws lambda publish-layer-version \ --layer-name "SentryPythonServerlessSDK-local-dev" \ --region "eu-central-1" \ --zip-file "fileb://dist/$ZIP" \ --description "Local test build of SentryPythonServerlessSDK (can be deleted)" \ --compatible-runtimes python3.7 python3.8 python3.9 python3.10 python3.11 \ --no-cli-pager echo "Done deploying zipped Lambda layer to AWS as 'SentryPythonServerlessSDK-local-dev'." echo "All done. Have a nice day!" sentry-python-1.39.2/scripts/build_aws_lambda_layer.py000066400000000000000000000111451454744723200231750ustar00rootroot00000000000000import os import shutil import subprocess import sys import tempfile from typing import TYPE_CHECKING from sentry_sdk.consts import VERSION as SDK_VERSION if TYPE_CHECKING: from typing import Optional DIST_PATH = "dist" # created by "make dist" that is called by "make aws-lambda-layer" PYTHON_SITE_PACKAGES = "python" # see https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path class LayerBuilder: def __init__( self, base_dir, # type: str out_zip_filename=None, # type: Optional[str] ): # type: (...) -> None self.base_dir = base_dir self.python_site_packages = os.path.join(self.base_dir, PYTHON_SITE_PACKAGES) self.out_zip_filename = ( f"sentry-python-serverless-{SDK_VERSION}.zip" if out_zip_filename is None else out_zip_filename ) def make_directories(self): # type: (...) -> None os.makedirs(self.python_site_packages) def install_python_packages(self): # type: (...) -> None # Install requirements for Lambda Layer (these are more limited than the SDK requirements, # because Lambda does not support the newest versions of some packages) subprocess.check_call( [ sys.executable, "-m", "pip", "install", "-r", "aws-lambda-layer-requirements.txt", "--target", self.python_site_packages, ], ) sentry_python_sdk = os.path.join( DIST_PATH, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl", # this is generated by "make dist" that is called by "make aws-lamber-layer" ) subprocess.run( [ "pip", "install", "--no-cache-dir", # always access PyPI "--no-deps", # the right depencencies have been installed in the call above "--quiet", sentry_python_sdk, "--target", self.python_site_packages, ], check=True, ) def create_init_serverless_sdk_package(self): # type: (...) -> None """ Method that creates the init_serverless_sdk pkg in the sentry-python-serverless zip """ serverless_sdk_path = ( f"{self.python_site_packages}/sentry_sdk/" f"integrations/init_serverless_sdk" ) if not os.path.exists(serverless_sdk_path): os.makedirs(serverless_sdk_path) shutil.copy( "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py" ) def zip(self): # type: (...) -> None subprocess.run( [ "zip", "-q", # Quiet "-x", # Exclude files "**/__pycache__/*", # Files to be excluded "-r", # Recurse paths self.out_zip_filename, # Output filename PYTHON_SITE_PACKAGES, # Files to be zipped ], cwd=self.base_dir, check=True, # Raises CalledProcessError if exit status is non-zero ) shutil.copy( os.path.join(self.base_dir, self.out_zip_filename), os.path.abspath(DIST_PATH), ) def build_packaged_zip(base_dir=None, make_dist=False, out_zip_filename=None): if base_dir is None: base_dir = tempfile.mkdtemp() if make_dist: # Same thing that is done by "make dist" # (which is a dependency of "make aws-lambda-layer") subprocess.check_call( [sys.executable, "setup.py", "sdist", "bdist_wheel", "-d", DIST_PATH], ) layer_builder = LayerBuilder(base_dir, out_zip_filename=out_zip_filename) layer_builder.make_directories() layer_builder.install_python_packages() layer_builder.create_init_serverless_sdk_package() layer_builder.zip() # Just for debugging dist_path = os.path.abspath(DIST_PATH) print("Created Lambda Layer package with this information:") print(" - Base directory for generating package: {}".format(layer_builder.base_dir)) print( " - Created Python SDK distribution (in `{}`): {}".format(dist_path, make_dist) ) if not make_dist: print(" If 'False' we assume it was already created (by 'make dist')") print(" - Package zip filename: {}".format(layer_builder.out_zip_filename)) print(" - Copied package zip to: {}".format(dist_path)) if __name__ == "__main__": build_packaged_zip() sentry-python-1.39.2/scripts/bump-version.sh000077500000000000000000000012011454744723200211330ustar00rootroot00000000000000#!/bin/bash set -eux if [ "$(uname -s)" != "Linux" ]; then echo "Please use the GitHub Action." exit 1 fi SCRIPT_DIR="$( dirname "$0" )" cd $SCRIPT_DIR/.. OLD_VERSION="${1}" NEW_VERSION="${2}" echo "Current version: $OLD_VERSION" echo "Bumping version: $NEW_VERSION" function replace() { ! grep "$2" $3 perl -i -pe "s/$1/$2/g" $3 grep "$2" $3 # verify that replacement was successful } replace "version=\"[0-9.]+\"" "version=\"$NEW_VERSION\"" ./setup.py replace "VERSION = \"[0-9.]+\"" "VERSION = \"$NEW_VERSION\"" ./sentry_sdk/consts.py replace "release = \"[0-9.]+\"" "release = \"$NEW_VERSION\"" ./docs/conf.py sentry-python-1.39.2/scripts/init_serverless_sdk.py000066400000000000000000000056361454744723200226210ustar00rootroot00000000000000""" For manual instrumentation, The Handler function string of an aws lambda function should be added as an environment variable with a key of 'SENTRY_INITIAL_HANDLER' along with the 'DSN' Then the Handler function sstring should be replaced with 'sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler' """ import os import sys import re import sentry_sdk from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration if TYPE_CHECKING: from typing import Any # Configure Sentry SDK sentry_sdk.init( dsn=os.environ["SENTRY_DSN"], integrations=[AwsLambdaIntegration(timeout_warning=True)], traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]), ) class AWSLambdaModuleLoader: DIR_PATH_REGEX = r"^(.+)\/([^\/]+)$" def __init__(self, sentry_initial_handler): try: module_path, self.handler_name = sentry_initial_handler.rsplit(".", 1) except ValueError: raise ValueError("Incorrect AWS Handler path (Not a path)") self.extract_and_load_lambda_function_module(module_path) def extract_and_load_lambda_function_module(self, module_path): """ Method that extracts and loads lambda function module from module_path """ py_version = sys.version_info if re.match(self.DIR_PATH_REGEX, module_path): # With a path like -> `scheduler/scheduler/event` # `module_name` is `event`, and `module_file_path` is `scheduler/scheduler/event.py` module_name = module_path.split(os.path.sep)[-1] module_file_path = module_path + ".py" # Supported python versions are 2.7, 3.6, 3.7, 3.8 if py_version >= (3, 5): import importlib.util spec = importlib.util.spec_from_file_location( module_name, module_file_path ) self.lambda_function_module = importlib.util.module_from_spec(spec) spec.loader.exec_module(self.lambda_function_module) elif py_version[0] < 3: import imp self.lambda_function_module = imp.load_source( module_name, module_file_path ) else: raise ValueError("Python version %s is not supported." % py_version) else: import importlib self.lambda_function_module = importlib.import_module(module_path) def get_lambda_handler(self): return getattr(self.lambda_function_module, self.handler_name) def sentry_lambda_handler(event, context): # type: (Any, Any) -> None """ Handler function that invokes a lambda handler which path is defined in environment variables as "SENTRY_INITIAL_HANDLER" """ module_loader = AWSLambdaModuleLoader(os.environ["SENTRY_INITIAL_HANDLER"]) return module_loader.get_lambda_handler()(event, context) sentry-python-1.39.2/scripts/runtox.sh000077500000000000000000000015161454744723200200550ustar00rootroot00000000000000#!/bin/bash # Usage: sh scripts/runtox.sh py3.12 # Runs all environments with substring py3.12 and the given arguments for pytest set -ex if [ -n "$TOXPATH" ]; then true elif which tox &> /dev/null; then TOXPATH=tox else TOXPATH=./.venv/bin/tox fi excludelatest=false for arg in "$@" do if [ "$arg" = "--exclude-latest" ]; then excludelatest=true shift break fi done searchstring="$1" export TOX_PARALLEL_NO_SPINNER=1 if $excludelatest; then echo "Excluding latest" ENV="$($TOXPATH -l | grep -- "$searchstring" | grep -v -- '-latest' | tr $'\n' ',')" else echo "Including latest" ENV="$($TOXPATH -l | grep -- "$searchstring" | tr $'\n' ',')" fi if [ -z "${ENV}" ]; then echo "No targets found. Skipping." exit 0 fi exec $TOXPATH -vv -e "$ENV" -- "${@:2}" sentry-python-1.39.2/scripts/split-tox-gh-actions/000077500000000000000000000000001454744723200221515ustar00rootroot00000000000000sentry-python-1.39.2/scripts/split-tox-gh-actions/split-tox-gh-actions.py000077500000000000000000000215661454744723200265350ustar00rootroot00000000000000"""Split Tox to GitHub Actions This is a small script to split a tox.ini config file into multiple GitHub actions configuration files. This way each group of frameworks defined in tox.ini will get its own GitHub actions configuration file which allows them to be run in parallel in GitHub actions. This will generate/update several configuration files, that need to be commited to Git afterwards. Whenever tox.ini is changed, this script needs to be run. Usage: python split-tox-gh-actions.py [--fail-on-changes] If the parameter `--fail-on-changes` is set, the script will raise a RuntimeError in case the yaml files have been changed by the scripts execution. This is used in CI to check if the yaml files represent the current tox.ini file. (And if not the CI run fails.) """ import configparser import hashlib import sys from collections import defaultdict from functools import reduce from glob import glob from pathlib import Path from jinja2 import Environment, FileSystemLoader OUT_DIR = Path(__file__).resolve().parent.parent.parent / ".github" / "workflows" TOX_FILE = Path(__file__).resolve().parent.parent.parent / "tox.ini" TEMPLATE_DIR = Path(__file__).resolve().parent / "templates" FRAMEWORKS_NEEDING_POSTGRES = { "django", "asyncpg", } FRAMEWORKS_NEEDING_CLICKHOUSE = { "clickhouse_driver", } FRAMEWORKS_NEEDING_AWS = { "aws_lambda", } FRAMEWORKS_NEEDING_GITHUB_SECRETS = { "aws_lambda", } # Frameworks grouped here will be tested together to not hog all GitHub runners. # If you add or remove a group, make sure to git rm the generated YAML file as # well. GROUPS = { "Common": [ "common", ], "AWS Lambda": [ # this is separate from Cloud Computing because only this one test suite # needs to run with access to GitHub secrets "aws_lambda", ], "Cloud Computing": [ "boto3", "chalice", "cloud_resource_context", "gcp", ], "Data Processing": [ "arq", "beam", "celery", "huey", "rq", ], "Databases": [ "asyncpg", "clickhouse_driver", "pymongo", "sqlalchemy", ], "GraphQL": [ "ariadne", "gql", "graphene", "strawberry", ], "Networking": [ "gevent", "grpc", "httpx", "requests", ], "Web Frameworks 1": [ "django", "fastapi", "flask", "starlette", ], "Web Frameworks 2": [ "aiohttp", "asgi", "bottle", "falcon", "pyramid", "quart", "redis", "rediscluster", "sanic", "starlite", "tornado", ], "Miscellaneous": [ "loguru", "opentelemetry", "pure_eval", "trytond", ], } ENV = Environment( loader=FileSystemLoader(TEMPLATE_DIR), ) def main(fail_on_changes): """Create one CI workflow for each framework defined in tox.ini.""" if fail_on_changes: old_hash = get_files_hash() print("Parsing tox.ini...") py_versions_pinned, py_versions_latest = parse_tox() if fail_on_changes: print("Checking if all frameworks belong in a group...") missing_frameworks = find_frameworks_missing_from_groups( py_versions_pinned, py_versions_latest ) if missing_frameworks: raise RuntimeError( "Please add the following frameworks to the corresponding group " "in `GROUPS` in `scripts/split-tox-gh-actions/split-tox-gh-actions.py: " + ", ".join(missing_frameworks) ) print("Rendering templates...") for group, frameworks in GROUPS.items(): contents = render_template( group, frameworks, py_versions_pinned, py_versions_latest ) filename = write_file(contents, group) print(f"Created {filename}") if fail_on_changes: new_hash = get_files_hash() if old_hash != new_hash: raise RuntimeError( "The yaml configuration files have changed. This means that tox.ini has changed " "but the changes have not been propagated to the GitHub actions config files. " "Please run `python scripts/split-tox-gh-actions/split-tox-gh-actions.py` " "locally and commit the changes of the yaml configuration files to continue. " ) print("All done. Have a nice day!") def parse_tox(): config = configparser.ConfigParser() config.read(TOX_FILE) lines = [ line for line in config["tox"]["envlist"].split("\n") if line.strip() and not line.strip().startswith("#") ] py_versions_pinned = defaultdict(set) py_versions_latest = defaultdict(set) for line in lines: # normalize lines line = line.strip().lower() try: # parse tox environment definition try: (raw_python_versions, framework, framework_versions) = line.split("-") except ValueError: (raw_python_versions, framework) = line.split("-") framework_versions = [] # collect python versions to test the framework in raw_python_versions = set( raw_python_versions.replace("{", "").replace("}", "").split(",") ) if "latest" in framework_versions: py_versions_latest[framework] |= raw_python_versions else: py_versions_pinned[framework] |= raw_python_versions except ValueError: print(f"ERROR reading line {line}") py_versions_pinned = _normalize_py_versions(py_versions_pinned) py_versions_latest = _normalize_py_versions(py_versions_latest) return py_versions_pinned, py_versions_latest def find_frameworks_missing_from_groups(py_versions_pinned, py_versions_latest): frameworks_in_a_group = _union(GROUPS.values()) all_frameworks = set(py_versions_pinned.keys()) | set(py_versions_latest.keys()) return all_frameworks - frameworks_in_a_group def _normalize_py_versions(py_versions): def replace_and_sort(versions): return sorted( [py.replace("py", "") for py in versions], key=lambda v: tuple(map(int, v.split("."))), ) if isinstance(py_versions, dict): normalized = defaultdict(set) normalized |= { framework: replace_and_sort(versions) for framework, versions in py_versions.items() } elif isinstance(py_versions, set): normalized = replace_and_sort(py_versions) return normalized def get_files_hash(): """Calculate a hash of all the yaml configuration files""" hasher = hashlib.md5() path_pattern = (OUT_DIR / "test-integration-*.yml").as_posix() for file in glob(path_pattern): with open(file, "rb") as f: buf = f.read() hasher.update(buf) return hasher.hexdigest() def _union(seq): return reduce(lambda x, y: set(x) | set(y), seq) def render_template(group, frameworks, py_versions_pinned, py_versions_latest): template = ENV.get_template("base.jinja") categories = set() py_versions = defaultdict(set) for framework in frameworks: if py_versions_pinned[framework]: categories.add("pinned") py_versions["pinned"] |= set(py_versions_pinned[framework]) if py_versions_latest[framework]: categories.add("latest") py_versions["latest"] |= set(py_versions_latest[framework]) if "2.7" in py_versions_pinned[framework]: categories.add("py27") py_versions["pinned"].discard("2.7") py_versions["latest"].discard("2.7") context = { "group": group, "frameworks": frameworks, "categories": sorted(categories), "needs_aws_credentials": bool(set(frameworks) & FRAMEWORKS_NEEDING_AWS), "needs_clickhouse": bool(set(frameworks) & FRAMEWORKS_NEEDING_CLICKHOUSE), "needs_postgres": bool(set(frameworks) & FRAMEWORKS_NEEDING_POSTGRES), "needs_github_secrets": bool( set(frameworks) & FRAMEWORKS_NEEDING_GITHUB_SECRETS ), "py_versions": { category: [f'"{version}"' for version in _normalize_py_versions(versions)] for category, versions in py_versions.items() }, } rendered = template.render(context) rendered = postprocess_template(rendered) return rendered def postprocess_template(rendered): return "\n".join([line for line in rendered.split("\n") if line.strip()]) + "\n" def write_file(contents, group): group = group.lower().replace(" ", "-") outfile = OUT_DIR / f"test-integrations-{group}.yml" with open(outfile, "w") as file: file.write(contents) return outfile if __name__ == "__main__": fail_on_changes = len(sys.argv) == 2 and sys.argv[1] == "--fail-on-changes" main(fail_on_changes) sentry-python-1.39.2/scripts/split-tox-gh-actions/templates/000077500000000000000000000000001454744723200241475ustar00rootroot00000000000000sentry-python-1.39.2/scripts/split-tox-gh-actions/templates/base.jinja000066400000000000000000000031271454744723200261010ustar00rootroot00000000000000{% with lowercase_group=group | replace(" ", "_") | lower %} name: Test {{ group }} on: push: branches: - master - release/** - sentry-sdk-2.0 {% if needs_github_secrets %} # XXX: We are using `pull_request_target` instead of `pull_request` because we want # this to run on forks with access to the secrets necessary to run the test suite. # Prefer to use `pull_request` when possible. pull_request_target: types: [labeled, opened, reopened, synchronize] {% else %} pull_request: {% endif %} # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: {% raw %}${{ github.workflow }}-${{ github.head_ref || github.run_id }}{% endraw %} cancel-in-progress: true permissions: contents: read {% if needs_github_secrets %} # `write` is needed to remove the `Trigger: tests using secrets` label pull-requests: write {% endif %} env: {% if needs_aws_credentials %} {% raw %} SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID: ${{ secrets.SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID }} SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY }} {% endraw %} {% endif %} BUILD_CACHE_KEY: {% raw %}${{ github.sha }}{% endraw %} CACHED_BUILD_PATHS: | {% raw %}${{ github.workspace }}/dist-serverless{% endraw %} jobs: {% if needs_github_secrets %} {% include "check_permissions.jinja" %} {% endif %} {% for category in categories %} {% include "test_group.jinja" %} {% endfor %} {% include "check_required.jinja" %} {% endwith %} sentry-python-1.39.2/scripts/split-tox-gh-actions/templates/check_permissions.jinja000066400000000000000000000021241454744723200306730ustar00rootroot00000000000000 check-permissions: name: permissions check runs-on: ubuntu-20.04 steps: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 with: persist-credentials: false - name: Check permissions on PR if: github.event_name == 'pull_request_target' run: | {% raw %} python3 -uS .github/workflows/scripts/trigger_tests_on_label.py \ --repo-id ${{ github.event.repository.id }} \ --pr ${{ github.event.number }} \ --event ${{ github.event.action }} \ --username "$ARG_USERNAME" \ --label-names "$ARG_LABEL_NAMES" {% endraw %} env: {% raw %} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # these can contain special characters ARG_USERNAME: ${{ github.event.pull_request.user.login }} ARG_LABEL_NAMES: ${{ toJSON(github.event.pull_request.labels.*.name) }} {% endraw %} - name: Check permissions on repo branch if: github.event_name == 'push' run: true sentry-python-1.39.2/scripts/split-tox-gh-actions/templates/check_required.jinja000066400000000000000000000021151454744723200301400ustar00rootroot00000000000000 check_required_tests: name: All {{ group }} tests passed {% if "pinned" in categories and "py27" in categories %} needs: [test-{{ group | replace(" ", "_") | lower }}-pinned, test-{{ group | replace(" ", "_") | lower }}-py27] {% elif "pinned" in categories %} needs: test-{{ group | replace(" ", "_") | lower }}-pinned {% endif %} # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-{{ lowercase_group }}-pinned.result, 'failure') || contains(needs.test-{{ lowercase_group }}-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 {% if "py27" in categories %} - name: Check for 2.7 failures if: contains(needs.test-{{ lowercase_group }}-py27.result, 'failure') || contains(needs.test-{{ lowercase_group }}-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 {% endif %} sentry-python-1.39.2/scripts/split-tox-gh-actions/templates/test_group.jinja000066400000000000000000000075721454744723200273720ustar00rootroot00000000000000 test-{{ lowercase_group }}-{{ category }}: name: {{ group }} ({{ category }}) timeout-minutes: 30 {% if needs_github_secrets %} needs: check-permissions {% endif %} {% if category == "py27" %} runs-on: ubuntu-20.04 container: python:2.7 {% else %} runs-on: {% raw %}${{ matrix.os }}{% endraw %} strategy: fail-fast: false matrix: python-version: [{{ py_versions.get(category)|join(",") }}] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] {% endif %} {% if needs_postgres %} services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: {% if category == "py27" %}postgres{% else %}localhost{% endif %} {% endif %} steps: - uses: actions/checkout@v4 {% if needs_github_secrets %} {% raw %} with: ref: ${{ github.event.pull_request.head.sha || github.ref }} {% endraw %} {% endif %} {% if category != "py27" %} - uses: actions/setup-python@v4 with: python-version: {% raw %}${{ matrix.python-version }}{% endraw %} {% endif %} {% if needs_clickhouse %} - uses: getsentry/action-clickhouse-in-ci@v1 {% endif %} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" {% if needs_postgres %} {% if category == "py27" %} psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true {% else %} psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true {% endif %} {% endif %} - name: Erase coverage run: | coverage erase {% for framework in frameworks %} - name: Test {{ framework }} {{ category }} run: | set -x # print commands that are executed {% if category == "py27" %} ./scripts/runtox.sh --exclude-latest "py2.7-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch {% elif category == "pinned" %} ./scripts/runtox.sh --exclude-latest "{% raw %}py${{ matrix.python-version }}{% endraw %}-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch {% elif category == "latest" %} ./scripts/runtox.sh "{% raw %}py${{ matrix.python-version }}{% endraw %}-{{ framework }}-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch {% endif %} {% endfor %} - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: {% raw %}${{ secrets.CODECOV_TOKEN }}{% endraw %} files: coverage.xml sentry-python-1.39.2/sentry_sdk/000077500000000000000000000000001454744723200166525ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/__init__.py000066400000000000000000000020551454744723200207650ustar00rootroot00000000000000from sentry_sdk.hub import Hub, init from sentry_sdk.scope import Scope from sentry_sdk.transport import Transport, HttpTransport from sentry_sdk.client import Client from sentry_sdk.api import * # noqa from sentry_sdk.consts import VERSION # noqa from sentry_sdk.crons import monitor # noqa from sentry_sdk.tracing import trace # noqa __all__ = [ # noqa "Hub", "Scope", "Client", "Transport", "HttpTransport", "init", "integrations", # From sentry_sdk.api "capture_event", "capture_message", "capture_exception", "add_breadcrumb", "configure_scope", "push_scope", "flush", "last_event_id", "start_span", "start_transaction", "set_tag", "set_context", "set_extra", "set_user", "set_level", "set_measurement", "get_current_span", "get_traceparent", "get_baggage", "continue_trace", "trace", ] # Initialize the debug support after everything is loaded from sentry_sdk.debug import init_debug_support init_debug_support() del init_debug_support sentry-python-1.39.2/sentry_sdk/_compat.py000066400000000000000000000113731454744723200206530ustar00rootroot00000000000000import sys import contextlib from datetime import datetime from functools import wraps from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from typing import Tuple from typing import Any from typing import Type from typing import TypeVar from typing import Callable T = TypeVar("T") PY2 = sys.version_info[0] == 2 PY33 = sys.version_info[0] == 3 and sys.version_info[1] >= 3 PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7 PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10 PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11 if PY2: import urlparse text_type = unicode # noqa string_types = (str, text_type) number_types = (int, long, float) # noqa int_types = (int, long) # noqa iteritems = lambda x: x.iteritems() # noqa: B301 binary_sequence_types = (bytearray, memoryview) def datetime_utcnow(): return datetime.utcnow() def utc_from_timestamp(timestamp): return datetime.utcfromtimestamp(timestamp) def implements_str(cls): # type: (T) -> T cls.__unicode__ = cls.__str__ cls.__str__ = lambda x: unicode(x).encode("utf-8") # noqa return cls # The line below is written as an "exec" because it triggers a syntax error in Python 3 exec("def reraise(tp, value, tb=None):\n raise tp, value, tb") def contextmanager(func): # type: (Callable) -> Callable """ Decorator which creates a contextmanager that can also be used as a decorator, similar to how the built-in contextlib.contextmanager function works in Python 3.2+. """ contextmanager_func = contextlib.contextmanager(func) @wraps(func) class DecoratorContextManager: def __init__(self, *args, **kwargs): # type: (...) -> None self.the_contextmanager = contextmanager_func(*args, **kwargs) def __enter__(self): # type: () -> None self.the_contextmanager.__enter__() def __exit__(self, *args, **kwargs): # type: (...) -> None self.the_contextmanager.__exit__(*args, **kwargs) def __call__(self, decorated_func): # type: (Callable) -> Callable[...] @wraps(decorated_func) def when_called(*args, **kwargs): # type: (...) -> Any with self.the_contextmanager: return_val = decorated_func(*args, **kwargs) return return_val return when_called return DecoratorContextManager else: from datetime import timezone import urllib.parse as urlparse # noqa text_type = str string_types = (text_type,) # type: Tuple[type] number_types = (int, float) # type: Tuple[type, type] int_types = (int,) iteritems = lambda x: x.items() binary_sequence_types = (bytes, bytearray, memoryview) def datetime_utcnow(): # type: () -> datetime return datetime.now(timezone.utc) def utc_from_timestamp(timestamp): # type: (float) -> datetime return datetime.fromtimestamp(timestamp, timezone.utc) def implements_str(x): # type: (T) -> T return x def reraise(tp, value, tb=None): # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> None assert value is not None if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value # contextlib.contextmanager already can be used as decorator in Python 3.2+ contextmanager = contextlib.contextmanager def with_metaclass(meta, *bases): # type: (Any, *Any) -> Any class MetaClass(type): def __new__(metacls, name, this_bases, d): # type: (Any, Any, Any, Any) -> Any return meta(name, bases, d) return type.__new__(MetaClass, "temporary_class", (), {}) def check_thread_support(): # type: () -> None try: from uwsgi import opt # type: ignore except ImportError: return # When `threads` is passed in as a uwsgi option, # `enable-threads` is implied on. if "threads" in opt: return # put here because of circular import from sentry_sdk.consts import FALSE_VALUES if str(opt.get("enable-threads", "0")).lower() in FALSE_VALUES: from warnings import warn warn( Warning( "We detected the use of uwsgi with disabled threads. " "This will cause issues with the transport you are " "trying to use. Please enable threading for uwsgi. " '(Add the "enable-threads" flag).' ) ) sentry-python-1.39.2/sentry_sdk/_functools.py000066400000000000000000000115351454744723200214040ustar00rootroot00000000000000""" A backport of Python 3 functools to Python 2/3. The only important change we rely upon is that `update_wrapper` handles AttributeError gracefully. Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. """ from functools import partial from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable WRAPPER_ASSIGNMENTS = ( "__module__", "__name__", "__qualname__", "__doc__", "__annotations__", ) WRAPPER_UPDATES = ("__dict__",) def update_wrapper( wrapper, wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES ): # type: (Any, Any, Any, Any) -> Any """Update a wrapper function to look like the wrapped function wrapper is the function to be updated wrapped is the original function assigned is a tuple naming the attributes assigned directly from the wrapped function to the wrapper function (defaults to functools.WRAPPER_ASSIGNMENTS) updated is a tuple naming the attributes of the wrapper that are updated with the corresponding attribute from the wrapped function (defaults to functools.WRAPPER_UPDATES) """ for attr in assigned: try: value = getattr(wrapped, attr) except AttributeError: pass else: setattr(wrapper, attr, value) for attr in updated: getattr(wrapper, attr).update(getattr(wrapped, attr, {})) # Issue #17482: set __wrapped__ last so we don't inadvertently copy it # from the wrapped function when updating __dict__ wrapper.__wrapped__ = wrapped # Return the wrapper so this can be used as a decorator via partial() return wrapper def wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES): # type: (Callable[..., Any], Any, Any) -> Callable[[Callable[..., Any]], Callable[..., Any]] """Decorator factory to apply update_wrapper() to a wrapper function Returns a decorator that invokes update_wrapper() with the decorated function as the wrapper argument and the arguments to wraps() as the remaining arguments. Default arguments are as for update_wrapper(). This is a convenience function to simplify applying partial() to update_wrapper(). """ return partial(update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated) sentry-python-1.39.2/sentry_sdk/_lru_cache.py000066400000000000000000000124161454744723200213140ustar00rootroot00000000000000""" A fork of Python 3.6's stdlib lru_cache (found in Python's 'cpython/Lib/functools.py') adapted into a data structure for single threaded uses. https://github.com/python/cpython/blob/v3.6.12/Lib/functools.py Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. """ SENTINEL = object() # aliases to the entries in a node PREV = 0 NEXT = 1 KEY = 2 VALUE = 3 class LRUCache(object): def __init__(self, max_size): assert max_size > 0 self.max_size = max_size self.full = False self.cache = {} # root of the circularly linked list to keep track of # the least recently used key self.root = [] # type: ignore # the node looks like [PREV, NEXT, KEY, VALUE] self.root[:] = [self.root, self.root, None, None] self.hits = self.misses = 0 def set(self, key, value): link = self.cache.get(key, SENTINEL) if link is not SENTINEL: # have to move the node to the front of the linked list link_prev, link_next, _key, _value = link # first remove the node from the lsnked list link_prev[NEXT] = link_next link_next[PREV] = link_prev # insert the node between the root and the last last = self.root[PREV] last[NEXT] = self.root[PREV] = link link[PREV] = last link[NEXT] = self.root # update the value link[VALUE] = value elif self.full: # reuse the root node, so update its key/value old_root = self.root old_root[KEY] = key old_root[VALUE] = value self.root = old_root[NEXT] old_key = self.root[KEY] self.root[KEY] = self.root[VALUE] = None del self.cache[old_key] self.cache[key] = old_root else: # insert new node after last last = self.root[PREV] link = [last, self.root, key, value] last[NEXT] = self.root[PREV] = self.cache[key] = link self.full = len(self.cache) >= self.max_size def get(self, key, default=None): link = self.cache.get(key, SENTINEL) if link is SENTINEL: self.misses += 1 return default # have to move the node to the front of the linked list link_prev, link_next, _key, _value = link # first remove the node from the lsnked list link_prev[NEXT] = link_next link_next[PREV] = link_prev # insert the node between the root and the last last = self.root[PREV] last[NEXT] = self.root[PREV] = link link[PREV] = last link[NEXT] = self.root self.hits += 1 return link[VALUE] sentry-python-1.39.2/sentry_sdk/_queue.py000066400000000000000000000260031454744723200205100ustar00rootroot00000000000000""" A fork of Python 3.6's stdlib queue (found in Pythons 'cpython/Lib/queue.py') with Lock swapped out for RLock to avoid a deadlock while garbage collecting. https://github.com/python/cpython/blob/v3.6.12/Lib/queue.py See also https://codewithoutrules.com/2017/08/16/concurrency-python/ https://bugs.python.org/issue14976 https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1 We also vendor the code to evade eventlet's broken monkeypatching, see https://github.com/getsentry/sentry-python/pull/484 Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. """ import threading from collections import deque from time import time from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any __all__ = ["EmptyError", "FullError", "Queue"] class EmptyError(Exception): "Exception raised by Queue.get(block=0)/get_nowait()." pass class FullError(Exception): "Exception raised by Queue.put(block=0)/put_nowait()." pass class Queue(object): """Create a queue object with a given maximum size. If maxsize is <= 0, the queue size is infinite. """ def __init__(self, maxsize=0): self.maxsize = maxsize self._init(maxsize) # mutex must be held whenever the queue is mutating. All methods # that acquire mutex must release it before returning. mutex # is shared between the three conditions, so acquiring and # releasing the conditions also acquires and releases mutex. self.mutex = threading.RLock() # Notify not_empty whenever an item is added to the queue; a # thread waiting to get is notified then. self.not_empty = threading.Condition(self.mutex) # Notify not_full whenever an item is removed from the queue; # a thread waiting to put is notified then. self.not_full = threading.Condition(self.mutex) # Notify all_tasks_done whenever the number of unfinished tasks # drops to zero; thread waiting to join() is notified to resume self.all_tasks_done = threading.Condition(self.mutex) self.unfinished_tasks = 0 def task_done(self): """Indicate that a formerly enqueued task is complete. Used by Queue consumer threads. For each get() used to fetch a task, a subsequent call to task_done() tells the queue that the processing on the task is complete. If a join() is currently blocking, it will resume when all items have been processed (meaning that a task_done() call was received for every item that had been put() into the queue). Raises a ValueError if called more times than there were items placed in the queue. """ with self.all_tasks_done: unfinished = self.unfinished_tasks - 1 if unfinished <= 0: if unfinished < 0: raise ValueError("task_done() called too many times") self.all_tasks_done.notify_all() self.unfinished_tasks = unfinished def join(self): """Blocks until all items in the Queue have been gotten and processed. The count of unfinished tasks goes up whenever an item is added to the queue. The count goes down whenever a consumer thread calls task_done() to indicate the item was retrieved and all work on it is complete. When the count of unfinished tasks drops to zero, join() unblocks. """ with self.all_tasks_done: while self.unfinished_tasks: self.all_tasks_done.wait() def qsize(self): """Return the approximate size of the queue (not reliable!).""" with self.mutex: return self._qsize() def empty(self): """Return True if the queue is empty, False otherwise (not reliable!). This method is likely to be removed at some point. Use qsize() == 0 as a direct substitute, but be aware that either approach risks a race condition where a queue can grow before the result of empty() or qsize() can be used. To create code that needs to wait for all queued tasks to be completed, the preferred technique is to use the join() method. """ with self.mutex: return not self._qsize() def full(self): """Return True if the queue is full, False otherwise (not reliable!). This method is likely to be removed at some point. Use qsize() >= n as a direct substitute, but be aware that either approach risks a race condition where a queue can shrink before the result of full() or qsize() can be used. """ with self.mutex: return 0 < self.maxsize <= self._qsize() def put(self, item, block=True, timeout=None): """Put an item into the queue. If optional args 'block' is true and 'timeout' is None (the default), block if necessary until a free slot is available. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and raises the FullError exception if no free slot was available within that time. Otherwise ('block' is false), put an item on the queue if a free slot is immediately available, else raise the FullError exception ('timeout' is ignored in that case). """ with self.not_full: if self.maxsize > 0: if not block: if self._qsize() >= self.maxsize: raise FullError() elif timeout is None: while self._qsize() >= self.maxsize: self.not_full.wait() elif timeout < 0: raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while self._qsize() >= self.maxsize: remaining = endtime - time() if remaining <= 0.0: raise FullError() self.not_full.wait(remaining) self._put(item) self.unfinished_tasks += 1 self.not_empty.notify() def get(self, block=True, timeout=None): """Remove and return an item from the queue. If optional args 'block' is true and 'timeout' is None (the default), block if necessary until an item is available. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and raises the EmptyError exception if no item was available within that time. Otherwise ('block' is false), return an item if one is immediately available, else raise the EmptyError exception ('timeout' is ignored in that case). """ with self.not_empty: if not block: if not self._qsize(): raise EmptyError() elif timeout is None: while not self._qsize(): self.not_empty.wait() elif timeout < 0: raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while not self._qsize(): remaining = endtime - time() if remaining <= 0.0: raise EmptyError() self.not_empty.wait(remaining) item = self._get() self.not_full.notify() return item def put_nowait(self, item): """Put an item into the queue without blocking. Only enqueue the item if a free slot is immediately available. Otherwise raise the FullError exception. """ return self.put(item, block=False) def get_nowait(self): """Remove and return an item from the queue without blocking. Only get an item if one is immediately available. Otherwise raise the EmptyError exception. """ return self.get(block=False) # Override these methods to implement other queue organizations # (e.g. stack or priority queue). # These will only be called with appropriate locks held # Initialize the queue representation def _init(self, maxsize): self.queue = deque() # type: Any def _qsize(self): return len(self.queue) # Put a new item in the queue def _put(self, item): self.queue.append(item) # Get an item from the queue def _get(self): return self.queue.popleft() sentry-python-1.39.2/sentry_sdk/_types.py000066400000000000000000000061561454744723200205370ustar00rootroot00000000000000try: from typing import TYPE_CHECKING except ImportError: TYPE_CHECKING = False # Re-exported for compat, since code out there in the wild might use this variable. MYPY = TYPE_CHECKING if TYPE_CHECKING: from types import TracebackType from typing import Any from typing import Callable from typing import Dict from typing import List from typing import Mapping from typing import Optional from typing import Tuple from typing import Type from typing import Union from typing_extensions import Literal ExcInfo = Tuple[ Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType] ] Event = Dict[str, Any] Hint = Dict[str, Any] Breadcrumb = Dict[str, Any] BreadcrumbHint = Dict[str, Any] SamplingContext = Dict[str, Any] EventProcessor = Callable[[Event, Hint], Optional[Event]] ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]] BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]] TransactionProcessor = Callable[[Event, Hint], Optional[Event]] TracesSampler = Callable[[SamplingContext], Union[float, int, bool]] # https://github.com/python/mypy/issues/5710 NotImplementedType = Any EventDataCategory = Literal[ "default", "error", "crash", "transaction", "security", "attachment", "session", "internal", "profile", "statsd", "monitor", ] SessionStatus = Literal["ok", "exited", "crashed", "abnormal"] EndpointType = Literal["store", "envelope"] DurationUnit = Literal[ "nanosecond", "microsecond", "millisecond", "second", "minute", "hour", "day", "week", ] InformationUnit = Literal[ "bit", "byte", "kilobyte", "kibibyte", "megabyte", "mebibyte", "gigabyte", "gibibyte", "terabyte", "tebibyte", "petabyte", "pebibyte", "exabyte", "exbibyte", ] FractionUnit = Literal["ratio", "percent"] MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str] ProfilerMode = Literal["sleep", "thread", "gevent", "unknown"] # Type of the metric. MetricType = Literal["d", "s", "g", "c"] # Value of the metric. MetricValue = Union[int, float, str] # Internal representation of tags as a tuple of tuples (this is done in order to allow for the same key to exist # multiple times). MetricTagsInternal = Tuple[Tuple[str, str], ...] # External representation of tags as a dictionary. MetricTagValue = Union[ str, int, float, None, List[Union[int, str, float, None]], Tuple[Union[int, str, float, None], ...], ] MetricTags = Mapping[str, MetricTagValue] # Value inside the generator for the metric value. FlushedMetricValue = Union[int, float] BucketKey = Tuple[MetricType, str, MeasurementUnit, MetricTagsInternal] MetricMetaKey = Tuple[MetricType, str, MeasurementUnit] sentry-python-1.39.2/sentry_sdk/_werkzeug.py000066400000000000000000000073161454744723200212350ustar00rootroot00000000000000""" Copyright (c) 2007 by the Pallets team. Some rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ from sentry_sdk._compat import iteritems from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Dict from typing import Iterator from typing import Tuple # # `get_headers` comes from `werkzeug.datastructures.EnvironHeaders` # https://github.com/pallets/werkzeug/blob/0.14.1/werkzeug/datastructures.py#L1361 # # We need this function because Django does not give us a "pure" http header # dict. So we might as well use it for all WSGI integrations. # def _get_headers(environ): # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] """ Returns only proper HTTP headers. """ for key, value in iteritems(environ): key = str(key) if key.startswith("HTTP_") and key not in ( "HTTP_CONTENT_TYPE", "HTTP_CONTENT_LENGTH", ): yield key[5:].replace("_", "-").title(), value elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"): yield key.replace("_", "-").title(), value # # `get_host` comes from `werkzeug.wsgi.get_host` # https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145 # def get_host(environ, use_x_forwarded_for=False): # type: (Dict[str, str], bool) -> str """ Return the host for the given WSGI environment. """ if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ: rv = environ["HTTP_X_FORWARDED_HOST"] if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): rv = rv[:-3] elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): rv = rv[:-4] elif environ.get("HTTP_HOST"): rv = environ["HTTP_HOST"] if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): rv = rv[:-3] elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): rv = rv[:-4] elif environ.get("SERVER_NAME"): rv = environ["SERVER_NAME"] if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in ( ("https", "443"), ("http", "80"), ): rv += ":" + environ["SERVER_PORT"] else: # In spite of the WSGI spec, SERVER_NAME might not be present. rv = "unknown" return rv sentry-python-1.39.2/sentry_sdk/api.py000066400000000000000000000140601454744723200177760ustar00rootroot00000000000000import inspect from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.hub import Hub from sentry_sdk.scope import Scope from sentry_sdk.tracing import NoOpSpan, Transaction if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from typing import overload from typing import Callable from typing import TypeVar from typing import ContextManager from typing import Union from sentry_sdk._types import ( Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo, MeasurementUnit, ) from sentry_sdk.tracing import Span T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) else: def overload(x): # type: (T) -> T return x # When changing this, update __all__ in __init__.py too __all__ = [ "capture_event", "capture_message", "capture_exception", "add_breadcrumb", "configure_scope", "push_scope", "flush", "last_event_id", "start_span", "start_transaction", "set_tag", "set_context", "set_extra", "set_user", "set_level", "set_measurement", "get_current_span", "get_traceparent", "get_baggage", "continue_trace", ] def hubmethod(f): # type: (F) -> F f.__doc__ = "%s\n\n%s" % ( "Alias for :py:meth:`sentry_sdk.Hub.%s`" % f.__name__, inspect.getdoc(getattr(Hub, f.__name__)), ) return f def scopemethod(f): # type: (F) -> F f.__doc__ = "%s\n\n%s" % ( "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__, inspect.getdoc(getattr(Scope, f.__name__)), ) return f @hubmethod def capture_event( event, # type: Event hint=None, # type: Optional[Hint] scope=None, # type: Optional[Any] **scope_args # type: Any ): # type: (...) -> Optional[str] return Hub.current.capture_event(event, hint, scope=scope, **scope_args) @hubmethod def capture_message( message, # type: str level=None, # type: Optional[str] scope=None, # type: Optional[Any] **scope_args # type: Any ): # type: (...) -> Optional[str] return Hub.current.capture_message(message, level, scope=scope, **scope_args) @hubmethod def capture_exception( error=None, # type: Optional[Union[BaseException, ExcInfo]] scope=None, # type: Optional[Any] **scope_args # type: Any ): # type: (...) -> Optional[str] return Hub.current.capture_exception(error, scope=scope, **scope_args) @hubmethod def add_breadcrumb( crumb=None, # type: Optional[Breadcrumb] hint=None, # type: Optional[BreadcrumbHint] **kwargs # type: Any ): # type: (...) -> None return Hub.current.add_breadcrumb(crumb, hint, **kwargs) @overload def configure_scope(): # type: () -> ContextManager[Scope] pass @overload def configure_scope( # noqa: F811 callback, # type: Callable[[Scope], None] ): # type: (...) -> None pass @hubmethod def configure_scope( # noqa: F811 callback=None, # type: Optional[Callable[[Scope], None]] ): # type: (...) -> Optional[ContextManager[Scope]] return Hub.current.configure_scope(callback) @overload def push_scope(): # type: () -> ContextManager[Scope] pass @overload def push_scope( # noqa: F811 callback, # type: Callable[[Scope], None] ): # type: (...) -> None pass @hubmethod def push_scope( # noqa: F811 callback=None, # type: Optional[Callable[[Scope], None]] ): # type: (...) -> Optional[ContextManager[Scope]] return Hub.current.push_scope(callback) @scopemethod def set_tag(key, value): # type: (str, Any) -> None return Hub.current.scope.set_tag(key, value) @scopemethod def set_context(key, value): # type: (str, Dict[str, Any]) -> None return Hub.current.scope.set_context(key, value) @scopemethod def set_extra(key, value): # type: (str, Any) -> None return Hub.current.scope.set_extra(key, value) @scopemethod def set_user(value): # type: (Optional[Dict[str, Any]]) -> None return Hub.current.scope.set_user(value) @scopemethod def set_level(value): # type: (str) -> None return Hub.current.scope.set_level(value) @hubmethod def flush( timeout=None, # type: Optional[float] callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None return Hub.current.flush(timeout=timeout, callback=callback) @hubmethod def last_event_id(): # type: () -> Optional[str] return Hub.current.last_event_id() @hubmethod def start_span( span=None, # type: Optional[Span] **kwargs # type: Any ): # type: (...) -> Span return Hub.current.start_span(span=span, **kwargs) @hubmethod def start_transaction( transaction=None, # type: Optional[Transaction] **kwargs # type: Any ): # type: (...) -> Union[Transaction, NoOpSpan] return Hub.current.start_transaction(transaction, **kwargs) def set_measurement(name, value, unit=""): # type: (str, float, MeasurementUnit) -> None transaction = Hub.current.scope.transaction if transaction is not None: transaction.set_measurement(name, value, unit) def get_current_span(hub=None): # type: (Optional[Hub]) -> Optional[Span] """ Returns the currently active span if there is one running, otherwise `None` """ if hub is None: hub = Hub.current current_span = hub.scope.span return current_span def get_traceparent(): # type: () -> Optional[str] """ Returns the traceparent either from the active span or from the scope. """ return Hub.current.get_traceparent() def get_baggage(): # type: () -> Optional[str] """ Returns Baggage either from the active span or from the scope. """ return Hub.current.get_baggage() def continue_trace(environ_or_headers, op=None, name=None, source=None): # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction """ Sets the propagation context from environment or headers and returns a transaction. """ return Hub.current.continue_trace(environ_or_headers, op, name, source) sentry-python-1.39.2/sentry_sdk/attachments.py000066400000000000000000000034231454744723200215410ustar00rootroot00000000000000import os import mimetypes from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.envelope import Item, PayloadRef if TYPE_CHECKING: from typing import Optional, Union, Callable class Attachment(object): def __init__( self, bytes=None, # type: Union[None, bytes, Callable[[], bytes]] filename=None, # type: Optional[str] path=None, # type: Optional[str] content_type=None, # type: Optional[str] add_to_transactions=False, # type: bool ): # type: (...) -> None if bytes is None and path is None: raise TypeError("path or raw bytes required for attachment") if filename is None and path is not None: filename = os.path.basename(path) if filename is None: raise TypeError("filename is required for attachment") if content_type is None: content_type = mimetypes.guess_type(filename)[0] self.bytes = bytes self.filename = filename self.path = path self.content_type = content_type self.add_to_transactions = add_to_transactions def to_envelope_item(self): # type: () -> Item """Returns an envelope item for this attachment.""" payload = None # type: Union[None, PayloadRef, bytes] if self.bytes is not None: if callable(self.bytes): payload = self.bytes() else: payload = self.bytes else: payload = PayloadRef(path=self.path) return Item( payload=payload, type="attachment", content_type=self.content_type, filename=self.filename, ) def __repr__(self): # type: () -> str return "" % (self.filename,) sentry-python-1.39.2/sentry_sdk/client.py000066400000000000000000000632361454744723200205140ustar00rootroot00000000000000from importlib import import_module import os import uuid import random import socket from sentry_sdk._compat import datetime_utcnow, string_types, text_type, iteritems from sentry_sdk.utils import ( capture_internal_exceptions, current_stacktrace, disable_capture_event, format_timestamp, get_sdk_name, get_type_name, get_default_release, handle_in_app, logger, ) from sentry_sdk.serializer import serialize from sentry_sdk.tracing import trace, has_tracing_enabled from sentry_sdk.transport import make_transport from sentry_sdk.consts import ( DEFAULT_MAX_VALUE_LENGTH, DEFAULT_OPTIONS, INSTRUMENTER, VERSION, ClientConstructor, ) from sentry_sdk.integrations import _DEFAULT_INTEGRATIONS, setup_integrations from sentry_sdk.utils import ContextVar from sentry_sdk.sessions import SessionFlusher from sentry_sdk.envelope import Envelope from sentry_sdk.profiler import has_profiling_enabled, setup_profiler from sentry_sdk.scrubber import EventScrubber from sentry_sdk.monitor import Monitor from sentry_sdk.spotlight import setup_spotlight from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from typing import Sequence from sentry_sdk.scope import Scope from sentry_sdk._types import Event, Hint from sentry_sdk.session import Session _client_init_debug = ContextVar("client_init_debug") SDK_INFO = { "name": "sentry.python", # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations() "version": VERSION, "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}], } def _get_options(*args, **kwargs): # type: (*Optional[str], **Any) -> Dict[str, Any] if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None): dsn = args[0] # type: Optional[str] args = args[1:] else: dsn = None if len(args) > 1: raise TypeError("Only single positional argument is expected") rv = dict(DEFAULT_OPTIONS) options = dict(*args, **kwargs) if dsn is not None and options.get("dsn") is None: options["dsn"] = dsn for key, value in iteritems(options): if key not in rv: # Option "with_locals" was renamed to "include_local_variables" if key == "with_locals": msg = ( "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. " "Please use 'include_local_variables'. The option 'with_locals' will be removed in the future." ) logger.warning(msg) rv["include_local_variables"] = value continue # Option "request_bodies" was renamed to "max_request_body_size" if key == "request_bodies": msg = ( "Deprecated: The option 'request_bodies' was renamed to 'max_request_body_size'. " "Please use 'max_request_body_size'. The option 'request_bodies' will be removed in the future." ) logger.warning(msg) rv["max_request_body_size"] = value continue raise TypeError("Unknown option %r" % (key,)) rv[key] = value if rv["dsn"] is None: rv["dsn"] = os.environ.get("SENTRY_DSN") if rv["release"] is None: rv["release"] = get_default_release() if rv["environment"] is None: rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production" if rv["debug"] is None: rv["debug"] = os.environ.get("SENTRY_DEBUG", "False").lower() in ( "true", "1", "t", ) if rv["server_name"] is None and hasattr(socket, "gethostname"): rv["server_name"] = socket.gethostname() if rv["instrumenter"] is None: rv["instrumenter"] = INSTRUMENTER.SENTRY if rv["project_root"] is None: try: project_root = os.getcwd() except Exception: project_root = None rv["project_root"] = project_root if rv["enable_tracing"] is True and rv["traces_sample_rate"] is None: rv["traces_sample_rate"] = 1.0 if rv["event_scrubber"] is None: rv["event_scrubber"] = EventScrubber() return rv try: # Python 3.6+ module_not_found_error = ModuleNotFoundError except Exception: # Older Python versions module_not_found_error = ImportError # type: ignore class _Client(object): """The client is internally responsible for capturing the events and forwarding them to sentry through the configured transport. It takes the client options as keyword arguments and optionally the DSN as first argument. """ def __init__(self, *args, **kwargs): # type: (*Any, **Any) -> None self.options = get_options(*args, **kwargs) # type: Dict[str, Any] self._init_impl() def __getstate__(self): # type: () -> Any return {"options": self.options} def __setstate__(self, state): # type: (Any) -> None self.options = state["options"] self._init_impl() def _setup_instrumentation(self, functions_to_trace): # type: (Sequence[Dict[str, str]]) -> None """ Instruments the functions given in the list `functions_to_trace` with the `@sentry_sdk.tracing.trace` decorator. """ for function in functions_to_trace: class_name = None function_qualname = function["qualified_name"] module_name, function_name = function_qualname.rsplit(".", 1) try: # Try to import module and function # ex: "mymodule.submodule.funcname" module_obj = import_module(module_name) function_obj = getattr(module_obj, function_name) setattr(module_obj, function_name, trace(function_obj)) logger.debug("Enabled tracing for %s", function_qualname) except module_not_found_error: try: # Try to import a class # ex: "mymodule.submodule.MyClassName.member_function" module_name, class_name = module_name.rsplit(".", 1) module_obj = import_module(module_name) class_obj = getattr(module_obj, class_name) function_obj = getattr(class_obj, function_name) function_type = type(class_obj.__dict__[function_name]) traced_function = trace(function_obj) if function_type in (staticmethod, classmethod): traced_function = staticmethod(traced_function) setattr(class_obj, function_name, traced_function) setattr(module_obj, class_name, class_obj) logger.debug("Enabled tracing for %s", function_qualname) except Exception as e: logger.warning( "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.", function_qualname, e, ) except Exception as e: logger.warning( "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.", function_qualname, e, ) def _init_impl(self): # type: () -> None old_debug = _client_init_debug.get(False) def _capture_envelope(envelope): # type: (Envelope) -> None if self.transport is not None: self.transport.capture_envelope(envelope) try: _client_init_debug.set(self.options["debug"]) self.transport = make_transport(self.options) self.monitor = None if self.transport: if self.options["enable_backpressure_handling"]: self.monitor = Monitor(self.transport) self.session_flusher = SessionFlusher(capture_func=_capture_envelope) self.metrics_aggregator = None # type: Optional[MetricsAggregator] experiments = self.options.get("_experiments", {}) if experiments.get("enable_metrics"): from sentry_sdk.metrics import MetricsAggregator self.metrics_aggregator = MetricsAggregator( capture_func=_capture_envelope, enable_code_locations=bool( experiments.get("metric_code_locations") ), ) max_request_body_size = ("always", "never", "small", "medium") if self.options["max_request_body_size"] not in max_request_body_size: raise ValueError( "Invalid value for max_request_body_size. Must be one of {}".format( max_request_body_size ) ) if self.options["_experiments"].get("otel_powered_performance", False): logger.debug( "[OTel] Enabling experimental OTel-powered performance monitoring." ) self.options["instrumenter"] = INSTRUMENTER.OTEL _DEFAULT_INTEGRATIONS.append( "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration", ) self.integrations = setup_integrations( self.options["integrations"], with_defaults=self.options["default_integrations"], with_auto_enabling_integrations=self.options[ "auto_enabling_integrations" ], ) self.spotlight = None if self.options.get("spotlight"): self.spotlight = setup_spotlight(self.options) sdk_name = get_sdk_name(list(self.integrations.keys())) SDK_INFO["name"] = sdk_name logger.debug("Setting SDK name to '%s'", sdk_name) if has_profiling_enabled(self.options): try: setup_profiler(self.options) except Exception as e: logger.debug("Can not set up profiler. (%s)", e) finally: _client_init_debug.set(old_debug) self._setup_instrumentation(self.options.get("functions_to_trace", [])) @property def dsn(self): # type: () -> Optional[str] """Returns the configured DSN as string.""" return self.options["dsn"] def _prepare_event( self, event, # type: Event hint, # type: Hint scope, # type: Optional[Scope] ): # type: (...) -> Optional[Event] if event.get("timestamp") is None: event["timestamp"] = datetime_utcnow() if scope is not None: is_transaction = event.get("type") == "transaction" event_ = scope.apply_to_event(event, hint, self.options) # one of the event/error processors returned None if event_ is None: if self.transport: self.transport.record_lost_event( "event_processor", data_category=("transaction" if is_transaction else "error"), ) return None event = event_ if ( self.options["attach_stacktrace"] and "exception" not in event and "stacktrace" not in event and "threads" not in event ): with capture_internal_exceptions(): event["threads"] = { "values": [ { "stacktrace": current_stacktrace( include_local_variables=self.options.get( "include_local_variables", True ), max_value_length=self.options.get( "max_value_length", DEFAULT_MAX_VALUE_LENGTH ), ), "crashed": False, "current": True, } ] } for key in "release", "environment", "server_name", "dist": if event.get(key) is None and self.options[key] is not None: event[key] = text_type(self.options[key]).strip() if event.get("sdk") is None: sdk_info = dict(SDK_INFO) sdk_info["integrations"] = sorted(self.integrations.keys()) event["sdk"] = sdk_info if event.get("platform") is None: event["platform"] = "python" event = handle_in_app( event, self.options["in_app_exclude"], self.options["in_app_include"], self.options["project_root"], ) if event is not None: event_scrubber = self.options["event_scrubber"] if event_scrubber and not self.options["send_default_pii"]: event_scrubber.scrub_event(event) # Postprocess the event here so that annotated types do # generally not surface in before_send if event is not None: event = serialize( event, max_request_body_size=self.options.get("max_request_body_size"), max_value_length=self.options.get("max_value_length"), ) before_send = self.options["before_send"] if ( before_send is not None and event is not None and event.get("type") != "transaction" ): new_event = None with capture_internal_exceptions(): new_event = before_send(event, hint or {}) if new_event is None: logger.info("before send dropped event") if self.transport: self.transport.record_lost_event( "before_send", data_category="error" ) event = new_event # type: ignore before_send_transaction = self.options["before_send_transaction"] if ( before_send_transaction is not None and event is not None and event.get("type") == "transaction" ): new_event = None with capture_internal_exceptions(): new_event = before_send_transaction(event, hint or {}) if new_event is None: logger.info("before send transaction dropped event") if self.transport: self.transport.record_lost_event( "before_send", data_category="transaction" ) event = new_event # type: ignore return event def _is_ignored_error(self, event, hint): # type: (Event, Hint) -> bool exc_info = hint.get("exc_info") if exc_info is None: return False error = exc_info[0] error_type_name = get_type_name(exc_info[0]) error_full_name = "%s.%s" % (exc_info[0].__module__, error_type_name) for ignored_error in self.options["ignore_errors"]: # String types are matched against the type name in the # exception only if isinstance(ignored_error, string_types): if ignored_error == error_full_name or ignored_error == error_type_name: return True else: if issubclass(error, ignored_error): return True return False def _should_capture( self, event, # type: Event hint, # type: Hint scope=None, # type: Optional[Scope] ): # type: (...) -> bool # Transactions are sampled independent of error events. is_transaction = event.get("type") == "transaction" if is_transaction: return True ignoring_prevents_recursion = scope is not None and not scope._should_capture if ignoring_prevents_recursion: return False ignored_by_config_option = self._is_ignored_error(event, hint) if ignored_by_config_option: return False return True def _should_sample_error( self, event, # type: Event hint, # type: Hint ): # type: (...) -> bool error_sampler = self.options.get("error_sampler", None) if callable(error_sampler): with capture_internal_exceptions(): sample_rate = error_sampler(event, hint) else: sample_rate = self.options["sample_rate"] try: not_in_sample_rate = sample_rate < 1.0 and random.random() >= sample_rate except NameError: logger.warning( "The provided error_sampler raised an error. Defaulting to sampling the event." ) # If the error_sampler raised an error, we should sample the event, since the default behavior # (when no sample_rate or error_sampler is provided) is to sample all events. not_in_sample_rate = False except TypeError: parameter, verb = ( ("error_sampler", "returned") if callable(error_sampler) else ("sample_rate", "contains") ) logger.warning( "The provided %s %s an invalid value of %s. The value should be a float or a bool. Defaulting to sampling the event." % (parameter, verb, repr(sample_rate)) ) # If the sample_rate has an invalid value, we should sample the event, since the default behavior # (when no sample_rate or error_sampler is provided) is to sample all events. not_in_sample_rate = False if not_in_sample_rate: # because we will not sample this event, record a "lost event". if self.transport: self.transport.record_lost_event("sample_rate", data_category="error") return False return True def _update_session_from_event( self, session, # type: Session event, # type: Event ): # type: (...) -> None crashed = False errored = False user_agent = None exceptions = (event.get("exception") or {}).get("values") if exceptions: errored = True for error in exceptions: mechanism = error.get("mechanism") if mechanism and mechanism.get("handled") is False: crashed = True break user = event.get("user") if session.user_agent is None: headers = (event.get("request") or {}).get("headers") for k, v in iteritems(headers or {}): if k.lower() == "user-agent": user_agent = v break session.update( status="crashed" if crashed else None, user=user, user_agent=user_agent, errors=session.errors + (errored or crashed), ) def capture_event( self, event, # type: Event hint=None, # type: Optional[Hint] scope=None, # type: Optional[Scope] ): # type: (...) -> Optional[str] """Captures an event. :param event: A ready-made event that can be directly sent to Sentry. :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. :param scope: An optional scope to use for determining whether this event should be captured. :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help. """ if disable_capture_event.get(False): return None if hint is None: hint = {} event_id = event.get("event_id") hint = dict(hint or ()) # type: Hint if event_id is None: event["event_id"] = event_id = uuid.uuid4().hex if not self._should_capture(event, hint, scope): return None profile = event.pop("profile", None) event_opt = self._prepare_event(event, hint, scope) if event_opt is None: return None # whenever we capture an event we also check if the session needs # to be updated based on that information. session = scope._session if scope else None if session: self._update_session_from_event(session, event) is_transaction = event_opt.get("type") == "transaction" is_checkin = event_opt.get("type") == "check_in" if ( not is_transaction and not is_checkin and not self._should_sample_error(event, hint) ): return None tracing_enabled = has_tracing_enabled(self.options) attachments = hint.get("attachments") trace_context = event_opt.get("contexts", {}).get("trace") or {} dynamic_sampling_context = trace_context.pop("dynamic_sampling_context", {}) # If tracing is enabled all events should go to /envelope endpoint. # If no tracing is enabled only transactions, events with attachments, and checkins should go to the /envelope endpoint. should_use_envelope_endpoint = ( tracing_enabled or is_transaction or is_checkin or bool(attachments) or bool(self.spotlight) ) if should_use_envelope_endpoint: headers = { "event_id": event_opt["event_id"], "sent_at": format_timestamp(datetime_utcnow()), } if dynamic_sampling_context: headers["trace"] = dynamic_sampling_context envelope = Envelope(headers=headers) if is_transaction: if profile is not None: envelope.add_profile(profile.to_json(event_opt, self.options)) envelope.add_transaction(event_opt) elif is_checkin: envelope.add_checkin(event_opt) else: envelope.add_event(event_opt) for attachment in attachments or (): envelope.add_item(attachment.to_envelope_item()) if self.spotlight: self.spotlight.capture_envelope(envelope) if self.transport is None: return None self.transport.capture_envelope(envelope) else: if self.transport is None: return None # All other events go to the legacy /store/ endpoint (will be removed in the future). self.transport.capture_event(event_opt) return event_id def capture_session( self, session # type: Session ): # type: (...) -> None if not session.release: logger.info("Discarded session update because of missing release") else: self.session_flusher.add_session(session) def close( self, timeout=None, # type: Optional[float] callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None """ Close the client and shut down the transport. Arguments have the same semantics as :py:meth:`Client.flush`. """ if self.transport is not None: self.flush(timeout=timeout, callback=callback) self.session_flusher.kill() if self.metrics_aggregator is not None: self.metrics_aggregator.kill() if self.monitor: self.monitor.kill() self.transport.kill() self.transport = None def flush( self, timeout=None, # type: Optional[float] callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None """ Wait for the current events to be sent. :param timeout: Wait for at most `timeout` seconds. If no `timeout` is provided, the `shutdown_timeout` option value is used. :param callback: Is invoked with the number of pending events and the configured timeout. """ if self.transport is not None: if timeout is None: timeout = self.options["shutdown_timeout"] self.session_flusher.flush() if self.metrics_aggregator is not None: self.metrics_aggregator.flush() self.transport.flush(timeout=timeout, callback=callback) def __enter__(self): # type: () -> _Client return self def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None self.close() from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: # Make mypy, PyCharm and other static analyzers think `get_options` is a # type to have nicer autocompletion for params. # # Use `ClientConstructor` to define the argument types of `init` and # `Dict[str, Any]` to tell static analyzers about the return type. class get_options(ClientConstructor, Dict[str, Any]): # noqa: N801 pass class Client(ClientConstructor, _Client): pass else: # Alias `get_options` for actual usage. Go through the lambda indirection # to throw PyCharm off of the weakly typed signature (it would otherwise # discover both the weakly typed signature of `_init` and our faked `init` # type). get_options = (lambda: _get_options)() Client = (lambda: _Client)() sentry-python-1.39.2/sentry_sdk/consts.py000066400000000000000000000253631454744723200205460ustar00rootroot00000000000000from sentry_sdk._types import TYPE_CHECKING # up top to prevent circular import due to integration import DEFAULT_MAX_VALUE_LENGTH = 1024 if TYPE_CHECKING: import sentry_sdk from typing import Optional from typing import Callable from typing import Union from typing import List from typing import Type from typing import Dict from typing import Any from typing import Sequence from typing_extensions import TypedDict from sentry_sdk.integrations import Integration from sentry_sdk._types import ( BreadcrumbProcessor, Event, EventProcessor, Hint, ProfilerMode, TracesSampler, TransactionProcessor, MetricTags, ) # Experiments are feature flags to enable and disable certain unstable SDK # functionality. Changing them from the defaults (`None`) in production # code is highly discouraged. They are not subject to any stability # guarantees such as the ones from semantic versioning. Experiments = TypedDict( "Experiments", { "attach_explain_plans": dict[str, Any], "max_spans": Optional[int], "record_sql_params": Optional[bool], # TODO: Remove these 2 profiling related experiments "profiles_sample_rate": Optional[float], "profiler_mode": Optional[ProfilerMode], "otel_powered_performance": Optional[bool], "transport_zlib_compression_level": Optional[int], "transport_num_pools": Optional[int], "enable_metrics": Optional[bool], "metrics_summary_sample_rate": Optional[float], "should_summarize_metric": Optional[Callable[[str, MetricTags], bool]], "before_emit_metric": Optional[Callable[[str, MetricTags], bool]], "metric_code_locations": Optional[bool], }, total=False, ) DEFAULT_QUEUE_SIZE = 100 DEFAULT_MAX_BREADCRUMBS = 100 MATCH_ALL = r".*" FALSE_VALUES = [ "false", "no", "off", "n", "0", ] class INSTRUMENTER: SENTRY = "sentry" OTEL = "otel" class SPANDATA: """ Additional information describing the type of the span. See: https://develop.sentry.dev/sdk/performance/span-data-conventions/ """ DB_NAME = "db.name" """ The name of the database being accessed. For commands that switch the database, this should be set to the target database (even if the command fails). Example: myDatabase """ DB_USER = "db.user" """ The name of the database user used for connecting to the database. See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md Example: my_user """ DB_OPERATION = "db.operation" """ The name of the operation being executed, e.g. the MongoDB command name such as findAndModify, or the SQL keyword. See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md Example: findAndModify, HMSET, SELECT """ DB_SYSTEM = "db.system" """ An identifier for the database management system (DBMS) product being used. See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md Example: postgresql """ CACHE_HIT = "cache.hit" """ A boolean indicating whether the requested data was found in the cache. Example: true """ CACHE_ITEM_SIZE = "cache.item_size" """ The size of the requested data in bytes. Example: 58 """ HTTP_QUERY = "http.query" """ The Query string present in the URL. Example: ?foo=bar&bar=baz """ HTTP_FRAGMENT = "http.fragment" """ The Fragments present in the URL. Example: #foo=bar """ HTTP_METHOD = "http.method" """ The HTTP method used. Example: GET """ HTTP_STATUS_CODE = "http.response.status_code" """ The HTTP status code as an integer. Example: 418 """ SERVER_ADDRESS = "server.address" """ Name of the database host. Example: example.com """ SERVER_PORT = "server.port" """ Logical server port number Example: 80; 8080; 443 """ SERVER_SOCKET_ADDRESS = "server.socket.address" """ Physical server IP address or Unix socket address. Example: 10.5.3.2 """ SERVER_SOCKET_PORT = "server.socket.port" """ Physical server port. Recommended: If different than server.port. Example: 16456 """ CODE_FILEPATH = "code.filepath" """ The source code file name that identifies the code unit as uniquely as possible (preferably an absolute file path). Example: "/app/myapplication/http/handler/server.py" """ CODE_LINENO = "code.lineno" """ The line number in `code.filepath` best representing the operation. It SHOULD point within the code unit named in `code.function`. Example: 42 """ CODE_FUNCTION = "code.function" """ The method or function name, or equivalent (usually rightmost part of the code unit's name). Example: "server_request" """ CODE_NAMESPACE = "code.namespace" """ The "namespace" within which `code.function` is defined. Usually the qualified class or module name, such that `code.namespace` + some separator + `code.function` form a unique identifier for the code unit. Example: "http.handler" """ class OP: CACHE_GET_ITEM = "cache.get_item" DB = "db" DB_REDIS = "db.redis" EVENT_DJANGO = "event.django" FUNCTION = "function" FUNCTION_AWS = "function.aws" FUNCTION_GCP = "function.gcp" GRAPHQL_EXECUTE = "graphql.execute" GRAPHQL_MUTATION = "graphql.mutation" GRAPHQL_PARSE = "graphql.parse" GRAPHQL_RESOLVE = "graphql.resolve" GRAPHQL_SUBSCRIPTION = "graphql.subscription" GRAPHQL_QUERY = "graphql.query" GRAPHQL_VALIDATE = "graphql.validate" GRPC_CLIENT = "grpc.client" GRPC_SERVER = "grpc.server" HTTP_CLIENT = "http.client" HTTP_CLIENT_STREAM = "http.client.stream" HTTP_SERVER = "http.server" MIDDLEWARE_DJANGO = "middleware.django" MIDDLEWARE_STARLETTE = "middleware.starlette" MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive" MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send" MIDDLEWARE_STARLITE = "middleware.starlite" MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive" MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send" QUEUE_SUBMIT_ARQ = "queue.submit.arq" QUEUE_TASK_ARQ = "queue.task.arq" QUEUE_SUBMIT_CELERY = "queue.submit.celery" QUEUE_TASK_CELERY = "queue.task.celery" QUEUE_TASK_RQ = "queue.task.rq" QUEUE_SUBMIT_HUEY = "queue.submit.huey" QUEUE_TASK_HUEY = "queue.task.huey" SUBPROCESS = "subprocess" SUBPROCESS_WAIT = "subprocess.wait" SUBPROCESS_COMMUNICATE = "subprocess.communicate" TEMPLATE_RENDER = "template.render" VIEW_RENDER = "view.render" VIEW_RESPONSE_RENDER = "view.response.render" WEBSOCKET_SERVER = "websocket.server" SOCKET_CONNECTION = "socket.connection" SOCKET_DNS = "socket.dns" # This type exists to trick mypy and PyCharm into thinking `init` and `Client` # take these arguments (even though they take opaque **kwargs) class ClientConstructor(object): def __init__( self, dsn=None, # type: Optional[str] max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS, # type: int release=None, # type: Optional[str] environment=None, # type: Optional[str] server_name=None, # type: Optional[str] shutdown_timeout=2, # type: float integrations=[], # type: Sequence[Integration] # noqa: B006 in_app_include=[], # type: List[str] # noqa: B006 in_app_exclude=[], # type: List[str] # noqa: B006 default_integrations=True, # type: bool dist=None, # type: Optional[str] transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]] transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int sample_rate=1.0, # type: float send_default_pii=False, # type: bool http_proxy=None, # type: Optional[str] https_proxy=None, # type: Optional[str] ignore_errors=[], # type: Sequence[Union[type, str]] # noqa: B006 max_request_body_size="medium", # type: str before_send=None, # type: Optional[EventProcessor] before_breadcrumb=None, # type: Optional[BreadcrumbProcessor] debug=None, # type: Optional[bool] attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool traces_sample_rate=None, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] profiles_sample_rate=None, # type: Optional[float] profiles_sampler=None, # type: Optional[TracesSampler] profiler_mode=None, # type: Optional[ProfilerMode] auto_enabling_integrations=True, # type: bool auto_session_tracking=True, # type: bool send_client_reports=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 proxy_headers=None, # type: Optional[Dict[str, str]] instrumenter=INSTRUMENTER.SENTRY, # type: Optional[str] before_send_transaction=None, # type: Optional[TransactionProcessor] project_root=None, # type: Optional[str] enable_tracing=None, # type: Optional[bool] include_local_variables=True, # type: Optional[bool] include_source_context=True, # type: Optional[bool] trace_propagation_targets=[ # noqa: B006 MATCH_ALL ], # type: Optional[Sequence[str]] functions_to_trace=[], # type: Sequence[Dict[str, str]] # noqa: B006 event_scrubber=None, # type: Optional[sentry_sdk.scrubber.EventScrubber] max_value_length=DEFAULT_MAX_VALUE_LENGTH, # type: int enable_backpressure_handling=True, # type: bool error_sampler=None, # type: Optional[Callable[[Event, Hint], Union[float, bool]]] enable_db_query_source=False, # type: bool db_query_source_threshold_ms=100, # type: int spotlight=None, # type: Optional[Union[bool, str]] ): # type: (...) -> None pass def _get_default_options(): # type: () -> Dict[str, Any] import inspect if hasattr(inspect, "getfullargspec"): getargspec = inspect.getfullargspec else: getargspec = inspect.getargspec # type: ignore a = getargspec(ClientConstructor.__init__) defaults = a.defaults or () return dict(zip(a.args[-len(defaults) :], defaults)) DEFAULT_OPTIONS = _get_default_options() del _get_default_options VERSION = "1.39.2" sentry-python-1.39.2/sentry_sdk/crons/000077500000000000000000000000001454744723200177765ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/crons/__init__.py000066400000000000000000000002521454744723200221060ustar00rootroot00000000000000from sentry_sdk.crons.api import capture_checkin # noqa from sentry_sdk.crons.consts import MonitorStatus # noqa from sentry_sdk.crons.decorator import monitor # noqa sentry-python-1.39.2/sentry_sdk/crons/api.py000066400000000000000000000027111454744723200211220ustar00rootroot00000000000000import uuid from sentry_sdk import Hub from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Dict, Optional def _create_check_in_event( monitor_slug=None, check_in_id=None, status=None, duration_s=None, monitor_config=None, ): # type: (Optional[str], Optional[str], Optional[str], Optional[float], Optional[Dict[str, Any]]) -> Dict[str, Any] options = Hub.current.client.options if Hub.current.client else {} check_in_id = check_in_id or uuid.uuid4().hex # type: str check_in = { "type": "check_in", "monitor_slug": monitor_slug, "check_in_id": check_in_id, "status": status, "duration": duration_s, "environment": options.get("environment", None), "release": options.get("release", None), } if monitor_config: check_in["monitor_config"] = monitor_config return check_in def capture_checkin( monitor_slug=None, check_in_id=None, status=None, duration=None, monitor_config=None, ): # type: (Optional[str], Optional[str], Optional[str], Optional[float], Optional[Dict[str, Any]]) -> str check_in_event = _create_check_in_event( monitor_slug=monitor_slug, check_in_id=check_in_id, status=status, duration_s=duration, monitor_config=monitor_config, ) hub = Hub.current hub.capture_event(check_in_event) return check_in_event["check_in_id"] sentry-python-1.39.2/sentry_sdk/crons/consts.py000066400000000000000000000001271454744723200216610ustar00rootroot00000000000000class MonitorStatus: IN_PROGRESS = "in_progress" OK = "ok" ERROR = "error" sentry-python-1.39.2/sentry_sdk/crons/decorator.py000066400000000000000000000032511454744723200223330ustar00rootroot00000000000000import sys from sentry_sdk._compat import contextmanager, reraise from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.crons import capture_checkin from sentry_sdk.crons.consts import MonitorStatus from sentry_sdk.utils import now if TYPE_CHECKING: from typing import Generator, Optional @contextmanager def monitor(monitor_slug=None): # type: (Optional[str]) -> Generator[None, None, None] """ Decorator/context manager to capture checkin events for a monitor. Usage (as decorator): ``` import sentry_sdk app = Celery() @app.task @sentry_sdk.monitor(monitor_slug='my-fancy-slug') def test(arg): print(arg) ``` This does not have to be used with Celery, but if you do use it with celery, put the `@sentry_sdk.monitor` decorator below Celery's `@app.task` decorator. Usage (as context manager): ``` import sentry_sdk def test(arg): with sentry_sdk.monitor(monitor_slug='my-fancy-slug'): print(arg) ``` """ start_timestamp = now() check_in_id = capture_checkin( monitor_slug=monitor_slug, status=MonitorStatus.IN_PROGRESS ) try: yield except Exception: duration_s = now() - start_timestamp capture_checkin( monitor_slug=monitor_slug, check_in_id=check_in_id, status=MonitorStatus.ERROR, duration=duration_s, ) exc_info = sys.exc_info() reraise(*exc_info) duration_s = now() - start_timestamp capture_checkin( monitor_slug=monitor_slug, check_in_id=check_in_id, status=MonitorStatus.OK, duration=duration_s, ) sentry-python-1.39.2/sentry_sdk/db/000077500000000000000000000000001454744723200172375ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/db/__init__.py000066400000000000000000000000001454744723200213360ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/db/explain_plan/000077500000000000000000000000001454744723200217115ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/db/explain_plan/__init__.py000066400000000000000000000027761454744723200240360ustar00rootroot00000000000000import datetime from sentry_sdk._compat import datetime_utcnow from sentry_sdk.consts import TYPE_CHECKING if TYPE_CHECKING: from typing import Any EXPLAIN_CACHE = {} EXPLAIN_CACHE_SIZE = 50 EXPLAIN_CACHE_TIMEOUT_SECONDS = 60 * 60 * 24 def cache_statement(statement, options): # type: (str, dict[str, Any]) -> None global EXPLAIN_CACHE now = datetime_utcnow() explain_cache_timeout_seconds = options.get( "explain_cache_timeout_seconds", EXPLAIN_CACHE_TIMEOUT_SECONDS ) expiration_time = now + datetime.timedelta(seconds=explain_cache_timeout_seconds) EXPLAIN_CACHE[hash(statement)] = expiration_time def remove_expired_cache_items(): # type: () -> None """ Remove expired cache items from the cache. """ global EXPLAIN_CACHE now = datetime_utcnow() for key, expiration_time in EXPLAIN_CACHE.items(): expiration_in_the_past = expiration_time < now if expiration_in_the_past: del EXPLAIN_CACHE[key] def should_run_explain_plan(statement, options): # type: (str, dict[str, Any]) -> bool """ Check cache if the explain plan for the given statement should be run. """ global EXPLAIN_CACHE remove_expired_cache_items() key = hash(statement) if key in EXPLAIN_CACHE: return False explain_cache_size = options.get("explain_cache_size", EXPLAIN_CACHE_SIZE) cache_is_full = len(EXPLAIN_CACHE.keys()) >= explain_cache_size if cache_is_full: return False return True sentry-python-1.39.2/sentry_sdk/db/explain_plan/django.py000066400000000000000000000030751454744723200235320ustar00rootroot00000000000000from sentry_sdk.consts import TYPE_CHECKING from sentry_sdk.db.explain_plan import cache_statement, should_run_explain_plan if TYPE_CHECKING: from typing import Any from typing import Callable from sentry_sdk.tracing import Span def attach_explain_plan_to_span( span, connection, statement, parameters, mogrify, options ): # type: (Span, Any, str, Any, Callable[[str, Any], bytes], dict[str, Any]) -> None """ Run EXPLAIN or EXPLAIN ANALYZE on the given statement and attach the explain plan to the span data. Usage: ``` sentry_sdk.init( dsn="...", _experiments={ "attach_explain_plans": { "explain_cache_size": 1000, # Run explain plan for the 1000 most run queries "explain_cache_timeout_seconds": 60 * 60 * 24, # Run the explain plan for each statement only every 24 hours "use_explain_analyze": True, # Run "explain analyze" instead of only "explain" } } ``` """ if not statement.strip().upper().startswith("SELECT"): return if not should_run_explain_plan(statement, options): return analyze = "ANALYZE" if options.get("use_explain_analyze", False) else "" explain_statement = ("EXPLAIN %s " % analyze) + mogrify( statement, parameters ).decode("utf-8") with connection.cursor() as cursor: cursor.execute(explain_statement) explain_plan = [row for row in cursor.fetchall()] span.set_data("db.explain_plan", explain_plan) cache_statement(statement, options) sentry-python-1.39.2/sentry_sdk/db/explain_plan/sqlalchemy.py000066400000000000000000000031701454744723200244260ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk.consts import TYPE_CHECKING from sentry_sdk.db.explain_plan import cache_statement, should_run_explain_plan from sentry_sdk.integrations import DidNotEnable try: from sqlalchemy.sql import text # type: ignore except ImportError: raise DidNotEnable("SQLAlchemy not installed.") if TYPE_CHECKING: from typing import Any from sentry_sdk.tracing import Span def attach_explain_plan_to_span(span, connection, statement, parameters, options): # type: (Span, Any, str, Any, dict[str, Any]) -> None """ Run EXPLAIN or EXPLAIN ANALYZE on the given statement and attach the explain plan to the span data. Usage: ``` sentry_sdk.init( dsn="...", _experiments={ "attach_explain_plans": { "explain_cache_size": 1000, # Run explain plan for the 1000 most run queries "explain_cache_timeout_seconds": 60 * 60 * 24, # Run the explain plan for each statement only every 24 hours "use_explain_analyze": True, # Run "explain analyze" instead of only "explain" } } ``` """ if not statement.strip().upper().startswith("SELECT"): return if not should_run_explain_plan(statement, options): return analyze = "ANALYZE" if options.get("use_explain_analyze", False) else "" explain_statement = (("EXPLAIN %s " % analyze) + statement) % parameters result = connection.execute(text(explain_statement)) explain_plan = [row for row in result] span.set_data("db.explain_plan", explain_plan) cache_statement(statement, options) sentry-python-1.39.2/sentry_sdk/debug.py000066400000000000000000000021541454744723200203140ustar00rootroot00000000000000import sys import logging from sentry_sdk import utils from sentry_sdk.hub import Hub from sentry_sdk.utils import logger from sentry_sdk.client import _client_init_debug from logging import LogRecord class _HubBasedClientFilter(logging.Filter): def filter(self, record): # type: (LogRecord) -> bool if _client_init_debug.get(False): return True hub = Hub.current if hub is not None and hub.client is not None: return hub.client.options["debug"] return False def init_debug_support(): # type: () -> None if not logger.handlers: configure_logger() configure_debug_hub() def configure_logger(): # type: () -> None _handler = logging.StreamHandler(sys.stderr) _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s")) logger.addHandler(_handler) logger.setLevel(logging.DEBUG) logger.addFilter(_HubBasedClientFilter()) def configure_debug_hub(): # type: () -> None def _get_debug_hub(): # type: () -> Hub return Hub.current utils._get_debug_hub = _get_debug_hub sentry-python-1.39.2/sentry_sdk/envelope.py000066400000000000000000000227541454744723200210530ustar00rootroot00000000000000import io import json import mimetypes from sentry_sdk._compat import text_type, PY2 from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.session import Session from sentry_sdk.utils import json_dumps, capture_internal_exceptions if TYPE_CHECKING: from typing import Any from typing import Optional from typing import Union from typing import Dict from typing import List from typing import Iterator from sentry_sdk._types import Event, EventDataCategory def parse_json(data): # type: (Union[bytes, text_type]) -> Any # on some python 3 versions this needs to be bytes if not PY2 and isinstance(data, bytes): data = data.decode("utf-8", "replace") return json.loads(data) class Envelope(object): def __init__( self, headers=None, # type: Optional[Dict[str, Any]] items=None, # type: Optional[List[Item]] ): # type: (...) -> None if headers is not None: headers = dict(headers) self.headers = headers or {} if items is None: items = [] else: items = list(items) self.items = items @property def description(self): # type: (...) -> str return "envelope with %s items (%s)" % ( len(self.items), ", ".join(x.data_category for x in self.items), ) def add_event( self, event # type: Event ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=event), type="event")) def add_transaction( self, transaction # type: Event ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction")) def add_profile( self, profile # type: Any ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=profile), type="profile")) def add_checkin( self, checkin # type: Any ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in")) def add_session( self, session # type: Union[Session, Any] ): # type: (...) -> None if isinstance(session, Session): session = session.to_json() self.add_item(Item(payload=PayloadRef(json=session), type="session")) def add_sessions( self, sessions # type: Any ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions")) def add_item( self, item # type: Item ): # type: (...) -> None self.items.append(item) def get_event(self): # type: (...) -> Optional[Event] for items in self.items: event = items.get_event() if event is not None: return event return None def get_transaction_event(self): # type: (...) -> Optional[Event] for item in self.items: event = item.get_transaction_event() if event is not None: return event return None def __iter__(self): # type: (...) -> Iterator[Item] return iter(self.items) def serialize_into( self, f # type: Any ): # type: (...) -> None f.write(json_dumps(self.headers)) f.write(b"\n") for item in self.items: item.serialize_into(f) def serialize(self): # type: (...) -> bytes out = io.BytesIO() self.serialize_into(out) return out.getvalue() @classmethod def deserialize_from( cls, f # type: Any ): # type: (...) -> Envelope headers = parse_json(f.readline()) items = [] while 1: item = Item.deserialize_from(f) if item is None: break items.append(item) return cls(headers=headers, items=items) @classmethod def deserialize( cls, bytes # type: bytes ): # type: (...) -> Envelope return cls.deserialize_from(io.BytesIO(bytes)) def __repr__(self): # type: (...) -> str return "" % (self.headers, self.items) class PayloadRef(object): def __init__( self, bytes=None, # type: Optional[bytes] path=None, # type: Optional[Union[bytes, text_type]] json=None, # type: Optional[Any] ): # type: (...) -> None self.json = json self.bytes = bytes self.path = path def get_bytes(self): # type: (...) -> bytes if self.bytes is None: if self.path is not None: with capture_internal_exceptions(): with open(self.path, "rb") as f: self.bytes = f.read() elif self.json is not None: self.bytes = json_dumps(self.json) else: self.bytes = b"" return self.bytes @property def inferred_content_type(self): # type: (...) -> str if self.json is not None: return "application/json" elif self.path is not None: path = self.path if isinstance(path, bytes): path = path.decode("utf-8", "replace") ty = mimetypes.guess_type(path)[0] if ty: return ty return "application/octet-stream" def __repr__(self): # type: (...) -> str return "" % (self.inferred_content_type,) class Item(object): def __init__( self, payload, # type: Union[bytes, text_type, PayloadRef] headers=None, # type: Optional[Dict[str, Any]] type=None, # type: Optional[str] content_type=None, # type: Optional[str] filename=None, # type: Optional[str] ): if headers is not None: headers = dict(headers) elif headers is None: headers = {} self.headers = headers if isinstance(payload, bytes): payload = PayloadRef(bytes=payload) elif isinstance(payload, text_type): payload = PayloadRef(bytes=payload.encode("utf-8")) else: payload = payload if filename is not None: headers["filename"] = filename if type is not None: headers["type"] = type if content_type is not None: headers["content_type"] = content_type elif "content_type" not in headers: headers["content_type"] = payload.inferred_content_type self.payload = payload def __repr__(self): # type: (...) -> str return "" % ( self.headers, self.payload, self.data_category, ) @property def type(self): # type: (...) -> Optional[str] return self.headers.get("type") @property def data_category(self): # type: (...) -> EventDataCategory ty = self.headers.get("type") if ty == "session": return "session" elif ty == "attachment": return "attachment" elif ty == "transaction": return "transaction" elif ty == "event": return "error" elif ty == "client_report": return "internal" elif ty == "profile": return "profile" elif ty == "statsd": return "statsd" elif ty == "check_in": return "monitor" else: return "default" def get_bytes(self): # type: (...) -> bytes return self.payload.get_bytes() def get_event(self): # type: (...) -> Optional[Event] """ Returns an error event if there is one. """ if self.type == "event" and self.payload.json is not None: return self.payload.json return None def get_transaction_event(self): # type: (...) -> Optional[Event] if self.type == "transaction" and self.payload.json is not None: return self.payload.json return None def serialize_into( self, f # type: Any ): # type: (...) -> None headers = dict(self.headers) bytes = self.get_bytes() headers["length"] = len(bytes) f.write(json_dumps(headers)) f.write(b"\n") f.write(bytes) f.write(b"\n") def serialize(self): # type: (...) -> bytes out = io.BytesIO() self.serialize_into(out) return out.getvalue() @classmethod def deserialize_from( cls, f # type: Any ): # type: (...) -> Optional[Item] line = f.readline().rstrip() if not line: return None headers = parse_json(line) length = headers.get("length") if length is not None: payload = f.read(length) f.readline() else: # if no length was specified we need to read up to the end of line # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope) payload = f.readline().rstrip(b"\n") if headers.get("type") in ("event", "transaction", "metric_buckets"): rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload))) else: rv = cls(headers=headers, payload=payload) return rv @classmethod def deserialize( cls, bytes # type: bytes ): # type: (...) -> Optional[Item] return cls.deserialize_from(io.BytesIO(bytes)) sentry-python-1.39.2/sentry_sdk/hub.py000066400000000000000000000656451454744723200200220ustar00rootroot00000000000000import copy import sys from contextlib import contextmanager from sentry_sdk._compat import datetime_utcnow, with_metaclass from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.scope import Scope from sentry_sdk.client import Client from sentry_sdk.profiler import Profile from sentry_sdk.tracing import ( NoOpSpan, Span, Transaction, BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, ) from sentry_sdk.session import Session from sentry_sdk.tracing_utils import ( has_tracing_enabled, normalize_incoming_data, ) from sentry_sdk.utils import ( exc_info_from_error, event_from_exception, logger, ContextVar, ) from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Union from typing import Any from typing import Optional from typing import Tuple from typing import Dict from typing import List from typing import Callable from typing import Generator from typing import Type from typing import TypeVar from typing import overload from typing import ContextManager from sentry_sdk.integrations import Integration from sentry_sdk._types import ( Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo, ) from sentry_sdk.consts import ClientConstructor T = TypeVar("T") else: def overload(x): # type: (T) -> T return x _local = ContextVar("sentry_current_hub") def _update_scope(base, scope_change, scope_kwargs): # type: (Scope, Optional[Any], Dict[str, Any]) -> Scope if scope_change and scope_kwargs: raise TypeError("cannot provide scope and kwargs") if scope_change is not None: final_scope = copy.copy(base) if callable(scope_change): scope_change(final_scope) else: final_scope.update_from_scope(scope_change) elif scope_kwargs: final_scope = copy.copy(base) final_scope.update_from_kwargs(**scope_kwargs) else: final_scope = base return final_scope def _should_send_default_pii(): # type: () -> bool client = Hub.current.client if not client: return False return client.options["send_default_pii"] class _InitGuard(object): def __init__(self, client): # type: (Client) -> None self._client = client def __enter__(self): # type: () -> _InitGuard return self def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None c = self._client if c is not None: c.close() def _check_python_deprecations(): # type: () -> None version = sys.version_info[:2] if version == (3, 4) or version == (3, 5): logger.warning( "sentry-sdk 2.0.0 will drop support for Python %s.", "{}.{}".format(*version), ) logger.warning( "Please upgrade to the latest version to continue receiving upgrades and bugfixes." ) def _init(*args, **kwargs): # type: (*Optional[str], **Any) -> ContextManager[Any] """Initializes the SDK and optionally integrations. This takes the same arguments as the client constructor. """ client = Client(*args, **kwargs) # type: ignore Hub.current.bind_client(client) _check_python_deprecations() rv = _InitGuard(client) return rv from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: # Make mypy, PyCharm and other static analyzers think `init` is a type to # have nicer autocompletion for params. # # Use `ClientConstructor` to define the argument types of `init` and # `ContextManager[Any]` to tell static analyzers about the return type. class init(ClientConstructor, _InitGuard): # noqa: N801 pass else: # Alias `init` for actual usage. Go through the lambda indirection to throw # PyCharm off of the weakly typed signature (it would otherwise discover # both the weakly typed signature of `_init` and our faked `init` type). init = (lambda: _init)() class HubMeta(type): @property def current(cls): # type: () -> Hub """Returns the current instance of the hub.""" rv = _local.get(None) if rv is None: rv = Hub(GLOBAL_HUB) _local.set(rv) return rv @property def main(cls): # type: () -> Hub """Returns the main instance of the hub.""" return GLOBAL_HUB class _ScopeManager(object): def __init__(self, hub): # type: (Hub) -> None self._hub = hub self._original_len = len(hub._stack) self._layer = hub._stack[-1] def __enter__(self): # type: () -> Scope scope = self._layer[1] assert scope is not None return scope def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None current_len = len(self._hub._stack) if current_len < self._original_len: logger.error( "Scope popped too soon. Popped %s scopes too many.", self._original_len - current_len, ) return elif current_len > self._original_len: logger.warning( "Leaked %s scopes: %s", current_len - self._original_len, self._hub._stack[self._original_len :], ) layer = self._hub._stack[self._original_len - 1] del self._hub._stack[self._original_len - 1 :] if layer[1] != self._layer[1]: logger.error( "Wrong scope found. Meant to pop %s, but popped %s.", layer[1], self._layer[1], ) elif layer[0] != self._layer[0]: warning = ( "init() called inside of pushed scope. This might be entirely " "legitimate but usually occurs when initializing the SDK inside " "a request handler or task/job function. Try to initialize the " "SDK as early as possible instead." ) logger.warning(warning) class Hub(with_metaclass(HubMeta)): # type: ignore """The hub wraps the concurrency management of the SDK. Each thread has its own hub but the hub might transfer with the flow of execution if context vars are available. If the hub is used with a with statement it's temporarily activated. """ _stack = None # type: List[Tuple[Optional[Client], Scope]] # Mypy doesn't pick up on the metaclass. if TYPE_CHECKING: current = None # type: Hub main = None # type: Hub def __init__( self, client_or_hub=None, # type: Optional[Union[Hub, Client]] scope=None, # type: Optional[Any] ): # type: (...) -> None if isinstance(client_or_hub, Hub): hub = client_or_hub client, other_scope = hub._stack[-1] if scope is None: scope = copy.copy(other_scope) else: client = client_or_hub if scope is None: scope = Scope() self._stack = [(client, scope)] self._last_event_id = None # type: Optional[str] self._old_hubs = [] # type: List[Hub] def __enter__(self): # type: () -> Hub self._old_hubs.append(Hub.current) _local.set(self) return self def __exit__( self, exc_type, # type: Optional[type] exc_value, # type: Optional[BaseException] tb, # type: Optional[Any] ): # type: (...) -> None old = self._old_hubs.pop() _local.set(old) def run( self, callback # type: Callable[[], T] ): # type: (...) -> T """Runs a callback in the context of the hub. Alternatively the with statement can be used on the hub directly. """ with self: return callback() def get_integration( self, name_or_class # type: Union[str, Type[Integration]] ): # type: (...) -> Any """Returns the integration for this hub by name or class. If there is no client bound or the client does not have that integration then `None` is returned. If the return value is not `None` the hub is guaranteed to have a client attached. """ if isinstance(name_or_class, str): integration_name = name_or_class elif name_or_class.identifier is not None: integration_name = name_or_class.identifier else: raise ValueError("Integration has no name") client = self.client if client is not None: rv = client.integrations.get(integration_name) if rv is not None: return rv @property def client(self): # type: () -> Optional[Client] """Returns the current client on the hub.""" return self._stack[-1][0] @property def scope(self): # type: () -> Scope """Returns the current scope on the hub.""" return self._stack[-1][1] def last_event_id(self): # type: () -> Optional[str] """Returns the last event ID.""" return self._last_event_id def bind_client( self, new # type: Optional[Client] ): # type: (...) -> None """Binds a new client to the hub.""" top = self._stack[-1] self._stack[-1] = (new, top[1]) def capture_event(self, event, hint=None, scope=None, **scope_args): # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] """ Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`. :param scope_args: For supported `**scope_args` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. """ client, top_scope = self._stack[-1] scope = _update_scope(top_scope, scope, scope_args) if client is not None: is_transaction = event.get("type") == "transaction" rv = client.capture_event(event, hint, scope) if rv is not None and not is_transaction: self._last_event_id = rv return rv return None def capture_message(self, message, level=None, scope=None, **scope_args): # type: (str, Optional[str], Optional[Scope], Any) -> Optional[str] """ Captures a message. :param message: The string to send as the message. :param level: If no level is provided, the default level is `info`. :param scope: An optional :py:class:`sentry_sdk.Scope` to use. :param scope_args: For supported `**scope_args` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`). """ if self.client is None: return None if level is None: level = "info" return self.capture_event( {"message": message, "level": level}, scope=scope, **scope_args ) def capture_exception(self, error=None, scope=None, **scope_args): # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] """Captures an exception. :param error: An exception to catch. If `None`, `sys.exc_info()` will be used. :param scope_args: For supported `**scope_args` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`). """ client = self.client if client is None: return None if error is not None: exc_info = exc_info_from_error(error) else: exc_info = sys.exc_info() event, hint = event_from_exception(exc_info, client_options=client.options) try: return self.capture_event(event, hint=hint, scope=scope, **scope_args) except Exception: self._capture_internal_exception(sys.exc_info()) return None def _capture_internal_exception( self, exc_info # type: Any ): # type: (...) -> Any """ Capture an exception that is likely caused by a bug in the SDK itself. These exceptions do not end up in Sentry and are just logged instead. """ logger.error("Internal error in sentry_sdk", exc_info=exc_info) def add_breadcrumb(self, crumb=None, hint=None, **kwargs): # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None """ Adds a breadcrumb. :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects. :param hint: An optional value that can be used by `before_breadcrumb` to customize the breadcrumbs that are emitted. """ client, scope = self._stack[-1] if client is None: logger.info("Dropped breadcrumb because no client bound") return crumb = dict(crumb or ()) # type: Breadcrumb crumb.update(kwargs) if not crumb: return hint = dict(hint or ()) # type: Hint if crumb.get("timestamp") is None: crumb["timestamp"] = datetime_utcnow() if crumb.get("type") is None: crumb["type"] = "default" if client.options["before_breadcrumb"] is not None: new_crumb = client.options["before_breadcrumb"](crumb, hint) else: new_crumb = crumb if new_crumb is not None: scope._breadcrumbs.append(new_crumb) else: logger.info("before breadcrumb dropped breadcrumb (%s)", crumb) max_breadcrumbs = client.options["max_breadcrumbs"] # type: int while len(scope._breadcrumbs) > max_breadcrumbs: scope._breadcrumbs.popleft() def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs): # type: (Optional[Span], str, Any) -> Span """ Start a span whose parent is the currently active span or transaction, if any. The return value is a :py:class:`sentry_sdk.tracing.Span` instance, typically used as a context manager to start and stop timing in a `with` block. Only spans contained in a transaction are sent to Sentry. Most integrations start a transaction at the appropriate time, for example for every incoming HTTP request. Use :py:meth:`sentry_sdk.start_transaction` to start a new transaction when one is not already in progress. For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. """ configuration_instrumenter = self.client and self.client.options["instrumenter"] if instrumenter != configuration_instrumenter: return NoOpSpan() # THIS BLOCK IS DEPRECATED # TODO: consider removing this in a future release. # This is for backwards compatibility with releases before # start_transaction existed, to allow for a smoother transition. if isinstance(span, Transaction) or "transaction" in kwargs: deprecation_msg = ( "Deprecated: use start_transaction to start transactions and " "Transaction.start_child to start spans." ) if isinstance(span, Transaction): logger.warning(deprecation_msg) return self.start_transaction(span) if "transaction" in kwargs: logger.warning(deprecation_msg) name = kwargs.pop("transaction") return self.start_transaction(name=name, **kwargs) # THIS BLOCK IS DEPRECATED # We do not pass a span into start_span in our code base, so I deprecate this. if span is not None: deprecation_msg = "Deprecated: passing a span into `start_span` is deprecated and will be removed in the future." logger.warning(deprecation_msg) return span kwargs.setdefault("hub", self) active_span = self.scope.span if active_span is not None: new_child_span = active_span.start_child(**kwargs) return new_child_span # If there is already a trace_id in the propagation context, use it. # This does not need to be done for `start_child` above because it takes # the trace_id from the parent span. if "trace_id" not in kwargs: traceparent = self.get_traceparent() trace_id = traceparent.split("-")[0] if traceparent else None if trace_id is not None: kwargs["trace_id"] = trace_id return Span(**kwargs) def start_transaction( self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs ): # type: (Optional[Transaction], str, Any) -> Union[Transaction, NoOpSpan] """ Start and return a transaction. Start an existing transaction if given, otherwise create and start a new transaction with kwargs. This is the entry point to manual tracing instrumentation. A tree structure can be built by adding child spans to the transaction, and child spans to other spans. To start a new child span within the transaction or any span, call the respective `.start_child()` method. Every child span must be finished before the transaction is finished, otherwise the unfinished spans are discarded. When used as context managers, spans and transactions are automatically finished at the end of the `with` block. If not using context managers, call the `.finish()` method. When the transaction is finished, it will be sent to Sentry with all its finished child spans. For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`. """ configuration_instrumenter = self.client and self.client.options["instrumenter"] if instrumenter != configuration_instrumenter: return NoOpSpan() custom_sampling_context = kwargs.pop("custom_sampling_context", {}) # if we haven't been given a transaction, make one if transaction is None: kwargs.setdefault("hub", self) transaction = Transaction(**kwargs) # use traces_sample_rate, traces_sampler, and/or inheritance to make a # sampling decision sampling_context = { "transaction_context": transaction.to_json(), "parent_sampled": transaction.parent_sampled, } sampling_context.update(custom_sampling_context) transaction._set_initial_sampling_decision(sampling_context=sampling_context) profile = Profile(transaction, hub=self) profile._set_initial_sampling_decision(sampling_context=sampling_context) # we don't bother to keep spans if we already know we're not going to # send the transaction if transaction.sampled: max_spans = ( self.client and self.client.options["_experiments"].get("max_spans") ) or 1000 transaction.init_span_recorder(maxlen=max_spans) return transaction def continue_trace(self, environ_or_headers, op=None, name=None, source=None): # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction """ Sets the propagation context from environment or headers and returns a transaction. """ with self.configure_scope() as scope: scope.generate_propagation_context(environ_or_headers) transaction = Transaction.continue_from_headers( normalize_incoming_data(environ_or_headers), op=op, name=name, source=source, ) return transaction @overload def push_scope( self, callback=None # type: Optional[None] ): # type: (...) -> ContextManager[Scope] pass @overload def push_scope( # noqa: F811 self, callback # type: Callable[[Scope], None] ): # type: (...) -> None pass def push_scope( # noqa self, callback=None, # type: Optional[Callable[[Scope], None]] continue_trace=True, # type: bool ): # type: (...) -> Optional[ContextManager[Scope]] """ Pushes a new layer on the scope stack. :param callback: If provided, this method pushes a scope, calls `callback`, and pops the scope again. :returns: If no `callback` is provided, a context manager that should be used to pop the scope again. """ if callback is not None: with self.push_scope() as scope: callback(scope) return None client, scope = self._stack[-1] new_scope = copy.copy(scope) if continue_trace: new_scope.generate_propagation_context() new_layer = (client, new_scope) self._stack.append(new_layer) return _ScopeManager(self) def pop_scope_unsafe(self): # type: () -> Tuple[Optional[Client], Scope] """ Pops a scope layer from the stack. Try to use the context manager :py:meth:`push_scope` instead. """ rv = self._stack.pop() assert self._stack, "stack must have at least one layer" return rv @overload def configure_scope( self, callback=None # type: Optional[None] ): # type: (...) -> ContextManager[Scope] pass @overload def configure_scope( # noqa: F811 self, callback # type: Callable[[Scope], None] ): # type: (...) -> None pass def configure_scope( # noqa self, callback=None, # type: Optional[Callable[[Scope], None]] continue_trace=True, # type: bool ): # type: (...) -> Optional[ContextManager[Scope]] """ Reconfigures the scope. :param callback: If provided, call the callback with the current scope. :returns: If no callback is provided, returns a context manager that returns the scope. """ client, scope = self._stack[-1] if continue_trace: scope.generate_propagation_context() if callback is not None: if client is not None: callback(scope) return None @contextmanager def inner(): # type: () -> Generator[Scope, None, None] if client is not None: yield scope else: yield Scope() return inner() def start_session( self, session_mode="application" # type: str ): # type: (...) -> None """Starts a new session.""" self.end_session() client, scope = self._stack[-1] scope._session = Session( release=client.options["release"] if client else None, environment=client.options["environment"] if client else None, user=scope._user, session_mode=session_mode, ) def end_session(self): # type: (...) -> None """Ends the current session if there is one.""" client, scope = self._stack[-1] session = scope._session self.scope._session = None if session is not None: session.close() if client is not None: client.capture_session(session) def stop_auto_session_tracking(self): # type: (...) -> None """Stops automatic session tracking. This temporarily session tracking for the current scope when called. To resume session tracking call `resume_auto_session_tracking`. """ self.end_session() client, scope = self._stack[-1] scope._force_auto_session_tracking = False def resume_auto_session_tracking(self): # type: (...) -> None """Resumes automatic session tracking for the current scope if disabled earlier. This requires that generally automatic session tracking is enabled. """ client, scope = self._stack[-1] scope._force_auto_session_tracking = None def flush( self, timeout=None, # type: Optional[float] callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None """ Alias for :py:meth:`sentry_sdk.Client.flush` """ client, scope = self._stack[-1] if client is not None: return client.flush(timeout=timeout, callback=callback) def get_traceparent(self): # type: () -> Optional[str] """ Returns the traceparent either from the active span or from the scope. """ if self.client is not None: if has_tracing_enabled(self.client.options) and self.scope.span is not None: return self.scope.span.to_traceparent() return self.scope.get_traceparent() def get_baggage(self): # type: () -> Optional[str] """ Returns Baggage either from the active span or from the scope. """ if ( self.client is not None and has_tracing_enabled(self.client.options) and self.scope.span is not None ): baggage = self.scope.span.to_baggage() else: baggage = self.scope.get_baggage() if baggage is not None: return baggage.serialize() return None def iter_trace_propagation_headers(self, span=None): # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None] """ Return HTTP headers which allow propagation of trace data. Data taken from the span representing the request, if available, or the current span on the scope if not. """ client = self._stack[-1][0] propagate_traces = client and client.options["propagate_traces"] if not propagate_traces: return span = span or self.scope.span if client and has_tracing_enabled(client.options) and span is not None: for header in span.iter_headers(): yield header else: for header in self.scope.iter_headers(): yield header def trace_propagation_meta(self, span=None): # type: (Optional[Span]) -> str """ Return meta tags which should be injected into HTML templates to allow propagation of trace information. """ if span is not None: logger.warning( "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." ) meta = "" sentry_trace = self.get_traceparent() if sentry_trace is not None: meta += '' % ( SENTRY_TRACE_HEADER_NAME, sentry_trace, ) baggage = self.get_baggage() if baggage is not None: meta += '' % ( BAGGAGE_HEADER_NAME, baggage, ) return meta GLOBAL_HUB = Hub() _local.set(GLOBAL_HUB) sentry-python-1.39.2/sentry_sdk/integrations/000077500000000000000000000000001454744723200213605ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/integrations/__init__.py000066400000000000000000000161121454744723200234720ustar00rootroot00000000000000from __future__ import absolute_import from threading import Lock from sentry_sdk._compat import iteritems from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import logger if TYPE_CHECKING: from typing import Callable from typing import Dict from typing import Iterator from typing import List from typing import Set from typing import Type _installer_lock = Lock() # Set of all integration identifiers we have attempted to install _processed_integrations = set() # type: Set[str] # Set of all integration identifiers we have actually installed _installed_integrations = set() # type: Set[str] def _generate_default_integrations_iterator( integrations, # type: List[str] auto_enabling_integrations, # type: List[str] ): # type: (...) -> Callable[[bool], Iterator[Type[Integration]]] def iter_default_integrations(with_auto_enabling_integrations): # type: (bool) -> Iterator[Type[Integration]] """Returns an iterator of the default integration classes:""" from importlib import import_module if with_auto_enabling_integrations: all_import_strings = integrations + auto_enabling_integrations else: all_import_strings = integrations for import_string in all_import_strings: try: module, cls = import_string.rsplit(".", 1) yield getattr(import_module(module), cls) except (DidNotEnable, SyntaxError) as e: logger.debug( "Did not import default integration %s: %s", import_string, e ) if isinstance(iter_default_integrations.__doc__, str): for import_string in integrations: iter_default_integrations.__doc__ += "\n- `{}`".format(import_string) return iter_default_integrations _DEFAULT_INTEGRATIONS = [ # stdlib/base runtime integrations "sentry_sdk.integrations.argv.ArgvIntegration", "sentry_sdk.integrations.atexit.AtexitIntegration", "sentry_sdk.integrations.dedupe.DedupeIntegration", "sentry_sdk.integrations.excepthook.ExcepthookIntegration", "sentry_sdk.integrations.logging.LoggingIntegration", "sentry_sdk.integrations.modules.ModulesIntegration", "sentry_sdk.integrations.stdlib.StdlibIntegration", "sentry_sdk.integrations.threading.ThreadingIntegration", ] _AUTO_ENABLING_INTEGRATIONS = [ "sentry_sdk.integrations.aiohttp.AioHttpIntegration", "sentry_sdk.integrations.boto3.Boto3Integration", "sentry_sdk.integrations.bottle.BottleIntegration", "sentry_sdk.integrations.celery.CeleryIntegration", "sentry_sdk.integrations.django.DjangoIntegration", "sentry_sdk.integrations.falcon.FalconIntegration", "sentry_sdk.integrations.fastapi.FastApiIntegration", "sentry_sdk.integrations.flask.FlaskIntegration", "sentry_sdk.integrations.httpx.HttpxIntegration", "sentry_sdk.integrations.pyramid.PyramidIntegration", "sentry_sdk.integrations.redis.RedisIntegration", "sentry_sdk.integrations.rq.RqIntegration", "sentry_sdk.integrations.sanic.SanicIntegration", "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration", "sentry_sdk.integrations.starlette.StarletteIntegration", "sentry_sdk.integrations.tornado.TornadoIntegration", ] iter_default_integrations = _generate_default_integrations_iterator( integrations=_DEFAULT_INTEGRATIONS, auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS, ) del _generate_default_integrations_iterator def setup_integrations( integrations, with_defaults=True, with_auto_enabling_integrations=False ): # type: (List[Integration], bool, bool) -> Dict[str, Integration] """ Given a list of integration instances, this installs them all. When `with_defaults` is set to `True` all default integrations are added unless they were already provided before. """ integrations = dict( (integration.identifier, integration) for integration in integrations or () ) logger.debug("Setting up integrations (with default = %s)", with_defaults) # Integrations that are not explicitly set up by the user. used_as_default_integration = set() if with_defaults: for integration_cls in iter_default_integrations( with_auto_enabling_integrations ): if integration_cls.identifier not in integrations: instance = integration_cls() integrations[instance.identifier] = instance used_as_default_integration.add(instance.identifier) for identifier, integration in iteritems(integrations): with _installer_lock: if identifier not in _processed_integrations: logger.debug( "Setting up previously not enabled integration %s", identifier ) try: type(integration).setup_once() except NotImplementedError: if getattr(integration, "install", None) is not None: logger.warning( "Integration %s: The install method is " "deprecated. Use `setup_once`.", identifier, ) integration.install() else: raise except DidNotEnable as e: if identifier not in used_as_default_integration: raise logger.debug( "Did not enable default integration %s: %s", identifier, e ) else: _installed_integrations.add(identifier) _processed_integrations.add(identifier) integrations = { identifier: integration for identifier, integration in iteritems(integrations) if identifier in _installed_integrations } for identifier in integrations: logger.debug("Enabling integration %s", identifier) return integrations class DidNotEnable(Exception): # noqa: N818 """ The integration could not be enabled due to a trivial user error like `flask` not being installed for the `FlaskIntegration`. This exception is silently swallowed for default integrations, but reraised for explicitly enabled integrations. """ class Integration(object): """Baseclass for all integrations. To accept options for an integration, implement your own constructor that saves those options on `self`. """ install = None """Legacy method, do not implement.""" identifier = None # type: str """String unique ID of integration type""" @staticmethod def setup_once(): # type: () -> None """ Initialize the integration. This function is only called once, ever. Configuration is not available at this point, so the only thing to do here is to hook into exception handlers, and perhaps do monkeypatches. Inside those hooks `Integration.current` can be used to access the instance again. """ raise NotImplementedError() sentry-python-1.39.2/sentry_sdk/integrations/_asgi_common.py000066400000000000000000000060301454744723200243630ustar00rootroot00000000000000import urllib from sentry_sdk.hub import _should_send_default_pii from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from typing_extensions import Literal def _get_headers(asgi_scope): # type: (Any) -> Dict[str, str] """ Extract headers from the ASGI scope, in the format that the Sentry protocol expects. """ headers = {} # type: Dict[str, str] for raw_key, raw_value in asgi_scope["headers"]: key = raw_key.decode("latin-1") value = raw_value.decode("latin-1") if key in headers: headers[key] = headers[key] + ", " + value else: headers[key] = value return headers def _get_url(asgi_scope, default_scheme, host): # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str """ Extract URL from the ASGI scope, without also including the querystring. """ scheme = asgi_scope.get("scheme", default_scheme) server = asgi_scope.get("server", None) path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "") if host: return "%s://%s%s" % (scheme, host, path) if server is not None: host, port = server default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme) if port != default_port: return "%s://%s:%s%s" % (scheme, host, port, path) return "%s://%s%s" % (scheme, host, path) return path def _get_query(asgi_scope): # type: (Any) -> Any """ Extract querystring from the ASGI scope, in the format that the Sentry protocol expects. """ qs = asgi_scope.get("query_string") if not qs: return None return urllib.parse.unquote(qs.decode("latin-1")) def _get_ip(asgi_scope): # type: (Any) -> str """ Extract IP Address from the ASGI scope based on request headers with fallback to scope client. """ headers = _get_headers(asgi_scope) try: return headers["x-forwarded-for"].split(",")[0].strip() except (KeyError, IndexError): pass try: return headers["x-real-ip"] except KeyError: pass return asgi_scope.get("client")[0] def _get_request_data(asgi_scope): # type: (Any) -> Dict[str, Any] """ Returns data related to the HTTP request from the ASGI scope. """ request_data = {} # type: Dict[str, Any] ty = asgi_scope["type"] if ty in ("http", "websocket"): request_data["method"] = asgi_scope.get("method") request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope)) request_data["query_string"] = _get_query(asgi_scope) request_data["url"] = _get_url( asgi_scope, "http" if ty == "http" else "ws", headers.get("host") ) client = asgi_scope.get("client") if client and _should_send_default_pii(): request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)} return request_data sentry-python-1.39.2/sentry_sdk/integrations/_wsgi_common.py000066400000000000000000000122071454744723200244140ustar00rootroot00000000000000from __future__ import absolute_import import json from copy import deepcopy from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.utils import AnnotatedValue from sentry_sdk._compat import text_type, iteritems from sentry_sdk._types import TYPE_CHECKING try: from django.http.request import RawPostDataException except ImportError: RawPostDataException = None if TYPE_CHECKING: import sentry_sdk from typing import Any from typing import Dict from typing import Optional from typing import Union SENSITIVE_ENV_KEYS = ( "REMOTE_ADDR", "HTTP_X_FORWARDED_FOR", "HTTP_SET_COOKIE", "HTTP_COOKIE", "HTTP_AUTHORIZATION", "HTTP_X_API_KEY", "HTTP_X_FORWARDED_FOR", "HTTP_X_REAL_IP", ) SENSITIVE_HEADERS = tuple( x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_") ) def request_body_within_bounds(client, content_length): # type: (Optional[sentry_sdk.Client], int) -> bool if client is None: return False bodies = client.options["max_request_body_size"] return not ( bodies == "never" or (bodies == "small" and content_length > 10**3) or (bodies == "medium" and content_length > 10**4) ) class RequestExtractor(object): def __init__(self, request): # type: (Any) -> None self.request = request def extract_into_event(self, event): # type: (Dict[str, Any]) -> None client = Hub.current.client if client is None: return data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]] content_length = self.content_length() request_info = event.get("request", {}) if _should_send_default_pii(): request_info["cookies"] = dict(self.cookies()) if not request_body_within_bounds(client, content_length): data = AnnotatedValue.removed_because_over_size_limit() else: # First read the raw body data # It is important to read this first because if it is Django # it will cache the body and then we can read the cached version # again in parsed_body() (or json() or wherever). raw_data = None try: raw_data = self.raw_data() except (RawPostDataException, ValueError): # If DjangoRestFramework is used it already read the body for us # so reading it here will fail. We can ignore this. pass parsed_body = self.parsed_body() if parsed_body is not None: data = parsed_body elif raw_data: data = AnnotatedValue.removed_because_raw_data() else: data = None if data is not None: request_info["data"] = data event["request"] = deepcopy(request_info) def content_length(self): # type: () -> int try: return int(self.env().get("CONTENT_LENGTH", 0)) except ValueError: return 0 def cookies(self): # type: () -> Dict[str, Any] raise NotImplementedError() def raw_data(self): # type: () -> Optional[Union[str, bytes]] raise NotImplementedError() def form(self): # type: () -> Optional[Dict[str, Any]] raise NotImplementedError() def parsed_body(self): # type: () -> Optional[Dict[str, Any]] form = self.form() files = self.files() if form or files: data = dict(iteritems(form)) for key, _ in iteritems(files): data[key] = AnnotatedValue.removed_because_raw_data() return data return self.json() def is_json(self): # type: () -> bool return _is_json_content_type(self.env().get("CONTENT_TYPE")) def json(self): # type: () -> Optional[Any] try: if not self.is_json(): return None raw_data = self.raw_data() if raw_data is None: return None if isinstance(raw_data, text_type): return json.loads(raw_data) else: return json.loads(raw_data.decode("utf-8")) except ValueError: pass return None def files(self): # type: () -> Optional[Dict[str, Any]] raise NotImplementedError() def size_of_file(self, file): # type: (Any) -> int raise NotImplementedError() def env(self): # type: () -> Dict[str, Any] raise NotImplementedError() def _is_json_content_type(ct): # type: (Optional[str]) -> bool mt = (ct or "").split(";", 1)[0] return ( mt == "application/json" or (mt.startswith("application/")) and mt.endswith("+json") ) def _filter_headers(headers): # type: (Dict[str, str]) -> Dict[str, str] if _should_send_default_pii(): return headers return { k: ( v if k.upper().replace("-", "_") not in SENSITIVE_HEADERS else AnnotatedValue.removed_because_over_size_limit() ) for k, v in iteritems(headers) } sentry-python-1.39.2/sentry_sdk/integrations/aiohttp.py000066400000000000000000000264771454744723200234220ustar00rootroot00000000000000import sys import weakref from sentry_sdk.api import continue_trace from sentry_sdk._compat import reraise from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.sessions import auto_session_tracking from sentry_sdk.integrations._wsgi_common import ( _filter_headers, request_body_within_bounds, ) from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE, ) from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, logger, parse_url, parse_version, transaction_from_function, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, SENSITIVE_DATA_SUBSTITUTE, AnnotatedValue, ) try: import asyncio from aiohttp import __version__ as AIOHTTP_VERSION from aiohttp import ClientSession, TraceConfig from aiohttp.web import Application, HTTPException, UrlDispatcher except ImportError: raise DidNotEnable("AIOHTTP not installed") from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from aiohttp.web_request import Request from aiohttp.web_urldispatcher import UrlMappingMatchInfo from aiohttp import TraceRequestStartParams, TraceRequestEndParams from types import SimpleNamespace from typing import Any from typing import Dict from typing import Optional from typing import Tuple from typing import Union from sentry_sdk.utils import ExcInfo from sentry_sdk._types import EventProcessor TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern") class AioHttpIntegration(Integration): identifier = "aiohttp" def __init__(self, transaction_style="handler_name"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None version = parse_version(AIOHTTP_VERSION) if version is None: raise DidNotEnable("Unparsable AIOHTTP version: {}".format(AIOHTTP_VERSION)) if version < (3, 4): raise DidNotEnable("AIOHTTP 3.4 or newer required.") if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. raise DidNotEnable( "The aiohttp integration for Sentry requires Python 3.7+ " " or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) ignore_logger("aiohttp.server") old_handle = Application._handle async def sentry_app_handle(self, request, *args, **kwargs): # type: (Any, Request, *Any, **Any) -> Any hub = Hub.current if hub.get_integration(AioHttpIntegration) is None: return await old_handle(self, request, *args, **kwargs) weak_request = weakref.ref(request) with Hub(hub) as hub: with auto_session_tracking(hub, session_mode="request"): # Scope data will not leak between requests because aiohttp # create a task to wrap each request. with hub.configure_scope() as scope: scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) headers = dict(request.headers) transaction = continue_trace( headers, op=OP.HTTP_SERVER, # If this transaction name makes it to the UI, AIOHTTP's # URL resolver did not find a route or died trying. name="generic AIOHTTP request", source=TRANSACTION_SOURCE_ROUTE, ) with hub.start_transaction( transaction, custom_sampling_context={"aiohttp_request": request}, ): try: response = await old_handle(self, request) except HTTPException as e: transaction.set_http_status(e.status_code) raise except (asyncio.CancelledError, ConnectionResetError): transaction.set_status("cancelled") raise except Exception: # This will probably map to a 500 but seems like we # have no way to tell. Do not set span status. reraise(*_capture_exception(hub)) transaction.set_http_status(response.status) return response Application._handle = sentry_app_handle old_urldispatcher_resolve = UrlDispatcher.resolve async def sentry_urldispatcher_resolve(self, request): # type: (UrlDispatcher, Request) -> UrlMappingMatchInfo rv = await old_urldispatcher_resolve(self, request) hub = Hub.current integration = hub.get_integration(AioHttpIntegration) name = None try: if integration.transaction_style == "handler_name": name = transaction_from_function(rv.handler) elif integration.transaction_style == "method_and_path_pattern": route_info = rv.get_info() pattern = route_info.get("path") or route_info.get("formatter") name = "{} {}".format(request.method, pattern) except Exception: pass if name is not None: with Hub.current.configure_scope() as scope: scope.set_transaction_name( name, source=SOURCE_FOR_STYLE[integration.transaction_style], ) return rv UrlDispatcher.resolve = sentry_urldispatcher_resolve old_client_session_init = ClientSession.__init__ def init(*args, **kwargs): # type: (Any, Any) -> None hub = Hub.current if hub.get_integration(AioHttpIntegration) is None: return old_client_session_init(*args, **kwargs) client_trace_configs = list(kwargs.get("trace_configs") or ()) trace_config = create_trace_config() client_trace_configs.append(trace_config) kwargs["trace_configs"] = client_trace_configs return old_client_session_init(*args, **kwargs) ClientSession.__init__ = init def create_trace_config(): # type: () -> TraceConfig async def on_request_start(session, trace_config_ctx, params): # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None hub = Hub.current if hub.get_integration(AioHttpIntegration) is None: return method = params.method.upper() parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(str(params.url), sanitize=False) span = hub.start_span( op=OP.HTTP_CLIENT, description="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), ) span.set_data(SPANDATA.HTTP_METHOD, method) span.set_data("url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) if should_propagate_trace(hub, str(params.url)): for key, value in hub.iter_trace_propagation_headers(span): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( key=key, value=value, url=params.url ) ) if key == BAGGAGE_HEADER_NAME and params.headers.get( BAGGAGE_HEADER_NAME ): # do not overwrite any existing baggage, just append to it params.headers[key] += "," + value else: params.headers[key] = value trace_config_ctx.span = span async def on_request_end(session, trace_config_ctx, params): # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None if trace_config_ctx.span is None: return span = trace_config_ctx.span span.set_http_status(int(params.response.status)) span.set_data("reason", params.response.reason) span.finish() trace_config = TraceConfig() trace_config.on_request_start.append(on_request_start) trace_config.on_request_end.append(on_request_end) return trace_config def _make_request_processor(weak_request): # type: (weakref.ReferenceType[Request]) -> EventProcessor def aiohttp_processor( event, # type: Dict[str, Any] hint, # type: Dict[str, Tuple[type, BaseException, Any]] ): # type: (...) -> Dict[str, Any] request = weak_request() if request is None: return event with capture_internal_exceptions(): request_info = event.setdefault("request", {}) request_info["url"] = "%s://%s%s" % ( request.scheme, request.host, request.path, ) request_info["query_string"] = request.query_string request_info["method"] = request.method request_info["env"] = {"REMOTE_ADDR": request.remote} hub = Hub.current request_info["headers"] = _filter_headers(dict(request.headers)) # Just attach raw data here if it is within bounds, if available. # Unfortunately there's no way to get structured data from aiohttp # without awaiting on some coroutine. request_info["data"] = get_aiohttp_request_data(hub, request) return event return aiohttp_processor def _capture_exception(hub): # type: (Hub) -> ExcInfo exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, client_options=hub.client.options, # type: ignore mechanism={"type": "aiohttp", "handled": False}, ) hub.capture_event(event, hint=hint) return exc_info BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]" def get_aiohttp_request_data(hub, request): # type: (Hub, Request) -> Union[Optional[str], AnnotatedValue] bytes_body = request._read_bytes if bytes_body is not None: # we have body to show if not request_body_within_bounds(hub.client, len(bytes_body)): return AnnotatedValue.removed_because_over_size_limit() encoding = request.charset or "utf-8" return bytes_body.decode(encoding, "replace") if request.can_read_body: # body exists but we can't show it return BODY_NOT_READ_MESSAGE # request has no body return None sentry-python-1.39.2/sentry_sdk/integrations/argv.py000066400000000000000000000017031454744723200226720ustar00rootroot00000000000000from __future__ import absolute_import import sys from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from sentry_sdk._types import Event, Hint class ArgvIntegration(Integration): identifier = "argv" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def processor(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if Hub.current.get_integration(ArgvIntegration) is not None: extra = event.setdefault("extra", {}) # If some event processor decided to set extra to e.g. an # `int`, don't crash. Not here. if isinstance(extra, dict): extra["sys.argv"] = sys.argv return event sentry-python-1.39.2/sentry_sdk/integrations/ariadne.py000066400000000000000000000141611454744723200233400ustar00rootroot00000000000000from importlib import import_module from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.integrations._wsgi_common import request_body_within_bounds from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, package_version, ) from sentry_sdk._types import TYPE_CHECKING try: # importing like this is necessary due to name shadowing in ariadne # (ariadne.graphql is also a function) ariadne_graphql = import_module("ariadne.graphql") except ImportError: raise DidNotEnable("ariadne is not installed") if TYPE_CHECKING: from typing import Any, Dict, List, Optional from ariadne.types import GraphQLError, GraphQLResult, GraphQLSchema, QueryParser # type: ignore from graphql.language.ast import DocumentNode # type: ignore from sentry_sdk._types import EventProcessor class AriadneIntegration(Integration): identifier = "ariadne" @staticmethod def setup_once(): # type: () -> None version = package_version("ariadne") if version is None: raise DidNotEnable("Unparsable ariadne version.") if version < (0, 20): raise DidNotEnable("ariadne 0.20 or newer required.") ignore_logger("ariadne") _patch_graphql() def _patch_graphql(): # type: () -> None old_parse_query = ariadne_graphql.parse_query old_handle_errors = ariadne_graphql.handle_graphql_errors old_handle_query_result = ariadne_graphql.handle_query_result def _sentry_patched_parse_query(context_value, query_parser, data): # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode hub = Hub.current integration = hub.get_integration(AriadneIntegration) if integration is None: return old_parse_query(context_value, query_parser, data) with hub.configure_scope() as scope: event_processor = _make_request_event_processor(data) scope.add_event_processor(event_processor) result = old_parse_query(context_value, query_parser, data) return result def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs): # type: (List[GraphQLError], Any, Any) -> GraphQLResult hub = Hub.current integration = hub.get_integration(AriadneIntegration) if integration is None: return old_handle_errors(errors, *args, **kwargs) result = old_handle_errors(errors, *args, **kwargs) with hub.configure_scope() as scope: event_processor = _make_response_event_processor(result[1]) scope.add_event_processor(event_processor) if hub.client: with capture_internal_exceptions(): for error in errors: event, hint = event_from_exception( error, client_options=hub.client.options, mechanism={ "type": integration.identifier, "handled": False, }, ) hub.capture_event(event, hint=hint) return result def _sentry_patched_handle_query_result(result, *args, **kwargs): # type: (Any, Any, Any) -> GraphQLResult hub = Hub.current integration = hub.get_integration(AriadneIntegration) if integration is None: return old_handle_query_result(result, *args, **kwargs) query_result = old_handle_query_result(result, *args, **kwargs) with hub.configure_scope() as scope: event_processor = _make_response_event_processor(query_result[1]) scope.add_event_processor(event_processor) if hub.client: with capture_internal_exceptions(): for error in result.errors or []: event, hint = event_from_exception( error, client_options=hub.client.options, mechanism={ "type": integration.identifier, "handled": False, }, ) hub.capture_event(event, hint=hint) return query_result ariadne_graphql.parse_query = _sentry_patched_parse_query # type: ignore ariadne_graphql.handle_graphql_errors = _sentry_patched_handle_graphql_errors # type: ignore ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result # type: ignore def _make_request_event_processor(data): # type: (GraphQLSchema) -> EventProcessor """Add request data and api_target to events.""" def inner(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] if not isinstance(data, dict): return event with capture_internal_exceptions(): try: content_length = int( (data.get("headers") or {}).get("Content-Length", 0) ) except (TypeError, ValueError): return event if _should_send_default_pii() and request_body_within_bounds( Hub.current.client, content_length ): request_info = event.setdefault("request", {}) request_info["api_target"] = "graphql" request_info["data"] = data elif event.get("request", {}).get("data"): del event["request"]["data"] return event return inner def _make_response_event_processor(response): # type: (Dict[str, Any]) -> EventProcessor """Add response data to the event's response context.""" def inner(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] with capture_internal_exceptions(): if _should_send_default_pii() and response.get("errors"): contexts = event.setdefault("contexts", {}) contexts["response"] = { "data": response, } return event return inner sentry-python-1.39.2/sentry_sdk/integrations/arq.py000066400000000000000000000157741454744723200225330ustar00rootroot00000000000000from __future__ import absolute_import import sys from sentry_sdk._compat import reraise from sentry_sdk._types import TYPE_CHECKING from sentry_sdk import Hub from sentry_sdk.consts import OP from sentry_sdk.hub import _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, SENSITIVE_DATA_SUBSTITUTE, parse_version, ) try: import arq.worker from arq.version import VERSION as ARQ_VERSION from arq.connections import ArqRedis from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker except ImportError: raise DidNotEnable("Arq is not installed") if TYPE_CHECKING: from typing import Any, Dict, Optional, Union from sentry_sdk._types import EventProcessor, Event, ExcInfo, Hint from arq.cron import CronJob from arq.jobs import Job from arq.typing import WorkerCoroutine from arq.worker import Function ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob) class ArqIntegration(Integration): identifier = "arq" @staticmethod def setup_once(): # type: () -> None try: if isinstance(ARQ_VERSION, str): version = parse_version(ARQ_VERSION) else: version = ARQ_VERSION.version[:2] except (TypeError, ValueError): version = None if version is None: raise DidNotEnable("Unparsable arq version: {}".format(ARQ_VERSION)) if version < (0, 23): raise DidNotEnable("arq 0.23 or newer required.") patch_enqueue_job() patch_run_job() patch_create_worker() ignore_logger("arq.worker") def patch_enqueue_job(): # type: () -> None old_enqueue_job = ArqRedis.enqueue_job async def _sentry_enqueue_job(self, function, *args, **kwargs): # type: (ArqRedis, str, *Any, **Any) -> Optional[Job] hub = Hub.current if hub.get_integration(ArqIntegration) is None: return await old_enqueue_job(self, function, *args, **kwargs) with hub.start_span(op=OP.QUEUE_SUBMIT_ARQ, description=function): return await old_enqueue_job(self, function, *args, **kwargs) ArqRedis.enqueue_job = _sentry_enqueue_job def patch_run_job(): # type: () -> None old_run_job = Worker.run_job async def _sentry_run_job(self, job_id, score): # type: (Worker, str, int) -> None hub = Hub(Hub.current) if hub.get_integration(ArqIntegration) is None: return await old_run_job(self, job_id, score) with hub.push_scope() as scope: scope._name = "arq" scope.clear_breadcrumbs() transaction = Transaction( name="unknown arq task", status="ok", op=OP.QUEUE_TASK_ARQ, source=TRANSACTION_SOURCE_TASK, ) with hub.start_transaction(transaction): return await old_run_job(self, job_id, score) Worker.run_job = _sentry_run_job def _capture_exception(exc_info): # type: (ExcInfo) -> None hub = Hub.current if hub.scope.transaction is not None: if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS: hub.scope.transaction.set_status("aborted") return hub.scope.transaction.set_status("internal_error") event, hint = event_from_exception( exc_info, client_options=hub.client.options if hub.client else None, mechanism={"type": ArqIntegration.identifier, "handled": False}, ) hub.capture_event(event, hint=hint) def _make_event_processor(ctx, *args, **kwargs): # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] hub = Hub.current with capture_internal_exceptions(): if hub.scope.transaction is not None: hub.scope.transaction.name = ctx["job_name"] event["transaction"] = ctx["job_name"] tags = event.setdefault("tags", {}) tags["arq_task_id"] = ctx["job_id"] tags["arq_task_retry"] = ctx["job_try"] > 1 extra = event.setdefault("extra", {}) extra["arq-job"] = { "task": ctx["job_name"], "args": args if _should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE, "kwargs": kwargs if _should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE, "retry": ctx["job_try"], } return event return event_processor def _wrap_coroutine(name, coroutine): # type: (str, WorkerCoroutine) -> WorkerCoroutine async def _sentry_coroutine(ctx, *args, **kwargs): # type: (Dict[Any, Any], *Any, **Any) -> Any hub = Hub.current if hub.get_integration(ArqIntegration) is None: return await coroutine(ctx, *args, **kwargs) hub.scope.add_event_processor( _make_event_processor({**ctx, "job_name": name}, *args, **kwargs) ) try: result = await coroutine(ctx, *args, **kwargs) except Exception: exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) return result return _sentry_coroutine def patch_create_worker(): # type: () -> None old_create_worker = arq.worker.create_worker def _sentry_create_worker(*args, **kwargs): # type: (*Any, **Any) -> Worker hub = Hub.current if hub.get_integration(ArqIntegration) is None: return old_create_worker(*args, **kwargs) settings_cls = args[0] if hasattr(settings_cls, "functions"): settings_cls.functions = [ _get_arq_function(func) for func in settings_cls.functions ] if hasattr(settings_cls, "cron_jobs"): settings_cls.cron_jobs = [ _get_arq_cron_job(cron_job) for cron_job in settings_cls.cron_jobs ] if "functions" in kwargs: kwargs["functions"] = [ _get_arq_function(func) for func in kwargs["functions"] ] if "cron_jobs" in kwargs: kwargs["cron_jobs"] = [ _get_arq_cron_job(cron_job) for cron_job in kwargs["cron_jobs"] ] return old_create_worker(*args, **kwargs) arq.worker.create_worker = _sentry_create_worker def _get_arq_function(func): # type: (Union[str, Function, WorkerCoroutine]) -> Function arq_func = arq.worker.func(func) arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine) return arq_func def _get_arq_cron_job(cron_job): # type: (CronJob) -> CronJob cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine) return cron_job sentry-python-1.39.2/sentry_sdk/integrations/asgi.py000066400000000000000000000271411454744723200226620ustar00rootroot00000000000000""" An ASGI middleware. Based on Tom Christie's `sentry-asgi `. """ import asyncio import inspect from copy import deepcopy from sentry_sdk._functools import partial from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.hub import Hub from sentry_sdk.integrations._asgi_common import ( _get_headers, _get_request_data, _get_url, ) from sentry_sdk.sessions import auto_session_tracking from sentry_sdk.tracing import ( SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE, TRANSACTION_SOURCE_URL, TRANSACTION_SOURCE_COMPONENT, ) from sentry_sdk.utils import ( ContextVar, event_from_exception, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, logger, transaction_from_function, _get_installed_modules, ) from sentry_sdk.tracing import Transaction if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from typing import Tuple from sentry_sdk._types import Event, Hint _asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied") _DEFAULT_TRANSACTION_NAME = "generic ASGI request" TRANSACTION_STYLE_VALUES = ("endpoint", "url") def _capture_exception(hub, exc, mechanism_type="asgi"): # type: (Hub, Any, str) -> None # Check client here as it might have been unset while streaming response if hub.client is not None: event, hint = event_from_exception( exc, client_options=hub.client.options, mechanism={"type": mechanism_type, "handled": False}, ) hub.capture_event(event, hint=hint) def _looks_like_asgi3(app): # type: (Any) -> bool """ Try to figure out if an application object supports ASGI3. This is how uvicorn figures out the application version as well. """ if inspect.isclass(app): return hasattr(app, "__await__") elif inspect.isfunction(app): return asyncio.iscoroutinefunction(app) else: call = getattr(app, "__call__", None) # noqa return asyncio.iscoroutinefunction(call) class SentryAsgiMiddleware: __slots__ = ("app", "__call__", "transaction_style", "mechanism_type") def __init__( self, app, unsafe_context_data=False, transaction_style="endpoint", mechanism_type="asgi", ): # type: (Any, bool, str, str) -> None """ Instrument an ASGI application with Sentry. Provides HTTP/websocket data to sent events and basic handling for exceptions bubbling up through the middleware. :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default. """ if not unsafe_context_data and not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. raise RuntimeError( "The ASGI middleware for Sentry requires Python 3.7+ " "or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) asgi_middleware_while_using_starlette_or_fastapi = ( mechanism_type == "asgi" and "starlette" in _get_installed_modules() ) if asgi_middleware_while_using_starlette_or_fastapi: logger.warning( "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. " "Please remove 'SentryAsgiMiddleware' from your project. " "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information." ) self.transaction_style = transaction_style self.mechanism_type = mechanism_type self.app = app if _looks_like_asgi3(app): self.__call__ = self._run_asgi3 # type: Callable[..., Any] else: self.__call__ = self._run_asgi2 def _run_asgi2(self, scope): # type: (Any) -> Any async def inner(receive, send): # type: (Any, Any) -> Any return await self._run_app(scope, receive, send, asgi_version=2) return inner async def _run_asgi3(self, scope, receive, send): # type: (Any, Any, Any) -> Any return await self._run_app(scope, receive, send, asgi_version=3) async def _run_app(self, scope, receive, send, asgi_version): # type: (Any, Any, Any, Any, int) -> Any is_recursive_asgi_middleware = _asgi_middleware_applied.get(False) is_lifespan = scope["type"] == "lifespan" if is_recursive_asgi_middleware or is_lifespan: try: if asgi_version == 2: return await self.app(scope)(receive, send) else: return await self.app(scope, receive, send) except Exception as exc: _capture_exception(Hub.current, exc, mechanism_type=self.mechanism_type) raise exc from None _asgi_middleware_applied.set(True) try: hub = Hub(Hub.current) with auto_session_tracking(hub, session_mode="request"): with hub: with hub.configure_scope() as sentry_scope: sentry_scope.clear_breadcrumbs() sentry_scope._name = "asgi" processor = partial(self.event_processor, asgi_scope=scope) sentry_scope.add_event_processor(processor) ty = scope["type"] ( transaction_name, transaction_source, ) = self._get_transaction_name_and_source( self.transaction_style, scope, ) if ty in ("http", "websocket"): transaction = continue_trace( _get_headers(scope), op="{}.server".format(ty), name=transaction_name, source=transaction_source, ) logger.debug( "[ASGI] Created transaction (continuing trace): %s", transaction, ) else: transaction = Transaction( op=OP.HTTP_SERVER, name=transaction_name, source=transaction_source, ) logger.debug( "[ASGI] Created transaction (new): %s", transaction ) transaction.set_tag("asgi.type", ty) logger.debug( "[ASGI] Set transaction name and source on transaction: '%s' / '%s'", transaction.name, transaction.source, ) with hub.start_transaction( transaction, custom_sampling_context={"asgi_scope": scope} ): logger.debug("[ASGI] Started transaction: %s", transaction) try: async def _sentry_wrapped_send(event): # type: (Dict[str, Any]) -> Any is_http_response = ( event.get("type") == "http.response.start" and transaction is not None and "status" in event ) if is_http_response: transaction.set_http_status(event["status"]) return await send(event) if asgi_version == 2: return await self.app(scope)( receive, _sentry_wrapped_send ) else: return await self.app( scope, receive, _sentry_wrapped_send ) except Exception as exc: _capture_exception( hub, exc, mechanism_type=self.mechanism_type ) raise exc from None finally: _asgi_middleware_applied.set(False) def event_processor(self, event, hint, asgi_scope): # type: (Event, Hint, Any) -> Optional[Event] request_data = event.get("request", {}) request_data.update(_get_request_data(asgi_scope)) event["request"] = deepcopy(request_data) # Only set transaction name if not already set by Starlette or FastAPI (or other frameworks) already_set = event["transaction"] != _DEFAULT_TRANSACTION_NAME and event[ "transaction_info" ].get("source") in [ TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_ROUTE, ] if not already_set: name, source = self._get_transaction_name_and_source( self.transaction_style, asgi_scope ) event["transaction"] = name event["transaction_info"] = {"source": source} logger.debug( "[ASGI] Set transaction name and source in event_processor: '%s' / '%s'", event["transaction"], event["transaction_info"]["source"], ) return event # Helper functions. # # Note: Those functions are not public API. If you want to mutate request # data to your liking it's recommended to use the `before_send` callback # for that. def _get_transaction_name_and_source(self, transaction_style, asgi_scope): # type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str] name = None source = SOURCE_FOR_STYLE[transaction_style] ty = asgi_scope.get("type") if transaction_style == "endpoint": endpoint = asgi_scope.get("endpoint") # Webframeworks like Starlette mutate the ASGI env once routing is # done, which is sometime after the request has started. If we have # an endpoint, overwrite our generic transaction name. if endpoint: name = transaction_from_function(endpoint) or "" else: name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) source = TRANSACTION_SOURCE_URL elif transaction_style == "url": # FastAPI includes the route object in the scope to let Sentry extract the # path from it for the transaction name route = asgi_scope.get("route") if route: path = getattr(route, "path", None) if path is not None: name = path else: name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) source = TRANSACTION_SOURCE_URL if name is None: name = _DEFAULT_TRANSACTION_NAME source = TRANSACTION_SOURCE_ROUTE return name, source return name, source sentry-python-1.39.2/sentry_sdk/integrations/asyncio.py000066400000000000000000000062011454744723200233760ustar00rootroot00000000000000from __future__ import absolute_import import sys from sentry_sdk._compat import reraise from sentry_sdk.consts import OP from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import event_from_exception try: import asyncio from asyncio.tasks import Task except ImportError: raise DidNotEnable("asyncio not available") if TYPE_CHECKING: from typing import Any from collections.abc import Coroutine from sentry_sdk._types import ExcInfo def get_name(coro): # type: (Any) -> str return ( getattr(coro, "__qualname__", None) or getattr(coro, "__name__", None) or "coroutine without __name__" ) def patch_asyncio(): # type: () -> None orig_task_factory = None try: loop = asyncio.get_running_loop() orig_task_factory = loop.get_task_factory() def _sentry_task_factory(loop, coro, **kwargs): # type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any] async def _coro_creating_hub_and_span(): # type: () -> Any hub = Hub(Hub.current) result = None with hub: with hub.start_span(op=OP.FUNCTION, description=get_name(coro)): try: result = await coro except Exception: reraise(*_capture_exception(hub)) return result # Trying to use user set task factory (if there is one) if orig_task_factory: return orig_task_factory(loop, _coro_creating_hub_and_span(), **kwargs) # The default task factory in `asyncio` does not have its own function # but is just a couple of lines in `asyncio.base_events.create_task()` # Those lines are copied here. # WARNING: # If the default behavior of the task creation in asyncio changes, # this will break! task = Task(_coro_creating_hub_and_span(), loop=loop, **kwargs) if task._source_traceback: # type: ignore del task._source_traceback[-1] # type: ignore return task loop.set_task_factory(_sentry_task_factory) # type: ignore except RuntimeError: # When there is no running loop, we have nothing to patch. pass def _capture_exception(hub): # type: (Hub) -> ExcInfo exc_info = sys.exc_info() integration = hub.get_integration(AsyncioIntegration) if integration is not None: # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "asyncio", "handled": False}, ) hub.capture_event(event, hint=hint) return exc_info class AsyncioIntegration(Integration): identifier = "asyncio" @staticmethod def setup_once(): # type: () -> None patch_asyncio() sentry-python-1.39.2/sentry_sdk/integrations/asyncpg.py000066400000000000000000000142311454744723200233770ustar00rootroot00000000000000from __future__ import annotations import contextlib from typing import Any, TypeVar, Callable, Awaitable, Iterator from asyncpg.cursor import BaseCursor # type: ignore from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import parse_version, capture_internal_exceptions try: import asyncpg # type: ignore[import-not-found] except ImportError: raise DidNotEnable("asyncpg not installed.") # asyncpg.__version__ is a string containing the semantic version in the form of ".." asyncpg_version = parse_version(asyncpg.__version__) if asyncpg_version is not None and asyncpg_version < (0, 23, 0): raise DidNotEnable("asyncpg >= 0.23.0 required") class AsyncPGIntegration(Integration): identifier = "asyncpg" _record_params = False def __init__(self, *, record_params: bool = False): AsyncPGIntegration._record_params = record_params @staticmethod def setup_once() -> None: asyncpg.Connection.execute = _wrap_execute( asyncpg.Connection.execute, ) asyncpg.Connection._execute = _wrap_connection_method( asyncpg.Connection._execute ) asyncpg.Connection._executemany = _wrap_connection_method( asyncpg.Connection._executemany, executemany=True ) asyncpg.Connection.cursor = _wrap_cursor_creation(asyncpg.Connection.cursor) asyncpg.Connection.prepare = _wrap_connection_method(asyncpg.Connection.prepare) asyncpg.connect_utils._connect_addr = _wrap_connect_addr( asyncpg.connect_utils._connect_addr ) T = TypeVar("T") def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]: async def _inner(*args: Any, **kwargs: Any) -> T: hub = Hub.current integration = hub.get_integration(AsyncPGIntegration) # Avoid recording calls to _execute twice. # Calls to Connection.execute with args also call # Connection._execute, which is recorded separately # args[0] = the connection object, args[1] is the query if integration is None or len(args) > 2: return await f(*args, **kwargs) query = args[1] with record_sql_queries( hub, None, query, None, None, executemany=False ) as span: res = await f(*args, **kwargs) with capture_internal_exceptions(): add_query_source(hub, span) return res return _inner SubCursor = TypeVar("SubCursor", bound=BaseCursor) @contextlib.contextmanager def _record( hub: Hub, cursor: SubCursor | None, query: str, params_list: tuple[Any, ...] | None, *, executemany: bool = False, ) -> Iterator[Span]: integration = hub.get_integration(AsyncPGIntegration) if not integration._record_params: params_list = None param_style = "pyformat" if params_list else None with record_sql_queries( hub, cursor, query, params_list, param_style, executemany=executemany, record_cursor_repr=cursor is not None, ) as span: yield span def _wrap_connection_method( f: Callable[..., Awaitable[T]], *, executemany: bool = False ) -> Callable[..., Awaitable[T]]: async def _inner(*args: Any, **kwargs: Any) -> T: hub = Hub.current integration = hub.get_integration(AsyncPGIntegration) if integration is None: return await f(*args, **kwargs) query = args[1] params_list = args[2] if len(args) > 2 else None with _record(hub, None, query, params_list, executemany=executemany) as span: _set_db_data(span, args[0]) res = await f(*args, **kwargs) return res return _inner def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]: def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 hub = Hub.current integration = hub.get_integration(AsyncPGIntegration) if integration is None: return f(*args, **kwargs) query = args[1] params_list = args[2] if len(args) > 2 else None with _record( hub, None, query, params_list, executemany=False, ) as span: _set_db_data(span, args[0]) res = f(*args, **kwargs) span.set_data("db.cursor", res) return res return _inner def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]: async def _inner(*args: Any, **kwargs: Any) -> T: hub = Hub.current integration = hub.get_integration(AsyncPGIntegration) if integration is None: return await f(*args, **kwargs) user = kwargs["params"].user database = kwargs["params"].database with hub.start_span(op=OP.DB, description="connect") as span: span.set_data(SPANDATA.DB_SYSTEM, "postgresql") addr = kwargs.get("addr") if addr: try: span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) span.set_data(SPANDATA.SERVER_PORT, addr[1]) except IndexError: pass span.set_data(SPANDATA.DB_NAME, database) span.set_data(SPANDATA.DB_USER, user) with capture_internal_exceptions(): hub.add_breadcrumb(message="connect", category="query", data=span._data) res = await f(*args, **kwargs) return res return _inner def _set_db_data(span: Span, conn: Any) -> None: span.set_data(SPANDATA.DB_SYSTEM, "postgresql") addr = conn._addr if addr: try: span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) span.set_data(SPANDATA.SERVER_PORT, addr[1]) except IndexError: pass database = conn._params.database if database: span.set_data(SPANDATA.DB_NAME, database) user = conn._params.user if user: span.set_data(SPANDATA.DB_USER, user) sentry-python-1.39.2/sentry_sdk/integrations/atexit.py000066400000000000000000000034661454744723200232410ustar00rootroot00000000000000from __future__ import absolute_import import os import sys import atexit from sentry_sdk.hub import Hub from sentry_sdk.utils import logger from sentry_sdk.integrations import Integration from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional def default_callback(pending, timeout): # type: (int, int) -> None """This is the default shutdown callback that is set on the options. It prints out a message to stderr that informs the user that some events are still pending and the process is waiting for them to flush out. """ def echo(msg): # type: (str) -> None sys.stderr.write(msg + "\n") echo("Sentry is attempting to send %i pending events" % pending) echo("Waiting up to %s seconds" % timeout) echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C")) sys.stderr.flush() class AtexitIntegration(Integration): identifier = "atexit" def __init__(self, callback=None): # type: (Optional[Any]) -> None if callback is None: callback = default_callback self.callback = callback @staticmethod def setup_once(): # type: () -> None @atexit.register def _shutdown(): # type: () -> None logger.debug("atexit: got shutdown signal") hub = Hub.main integration = hub.get_integration(AtexitIntegration) if integration is not None: logger.debug("atexit: shutting down client") # If there is a session on the hub, close it now. hub.end_session() # If an integration is there, a client has to be there. client = hub.client # type: Any client.close(callback=integration.callback) sentry-python-1.39.2/sentry_sdk/integrations/aws_lambda.py000066400000000000000000000370321454744723200240310ustar00rootroot00000000000000import sys from copy import deepcopy from datetime import timedelta from os import environ from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, event_from_exception, logger, TimeoutThread, ) from sentry_sdk.integrations import Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk._compat import datetime_utcnow, reraise from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from datetime import datetime from typing import Any from typing import TypeVar from typing import Callable from typing import Optional from sentry_sdk._types import EventProcessor, Event, Hint F = TypeVar("F", bound=Callable[..., Any]) # Constants TIMEOUT_WARNING_BUFFER = 1500 # Buffer time required to send timeout warning to Sentry MILLIS_TO_SECONDS = 1000.0 def _wrap_init_error(init_error): # type: (F) -> F def sentry_init_error(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(AwsLambdaIntegration) if integration is None: return init_error(*args, **kwargs) # If an integration is there, a client has to be there. client = hub.client # type: Any with capture_internal_exceptions(): with hub.configure_scope() as scope: scope.clear_breadcrumbs() exc_info = sys.exc_info() if exc_info and all(exc_info): sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "aws_lambda", "handled": False}, ) hub.capture_event(sentry_event, hint=hint) return init_error(*args, **kwargs) return sentry_init_error # type: ignore def _wrap_handler(handler): # type: (F) -> F def sentry_handler(aws_event, aws_context, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html, # `event` here is *likely* a dictionary, but also might be a number of # other types (str, int, float, None). # # In some cases, it is a list (if the user is batch-invoking their # function, for example), in which case we'll use the first entry as a # representative from which to try pulling request data. (Presumably it # will be the same for all events in the list, since they're all hitting # the lambda in the same request.) if isinstance(aws_event, list): request_data = aws_event[0] batch_size = len(aws_event) else: request_data = aws_event batch_size = 1 if not isinstance(request_data, dict): # If we're not dealing with a dictionary, we won't be able to get # headers, path, http method, etc in any case, so it's fine that # this is empty request_data = {} hub = Hub.current integration = hub.get_integration(AwsLambdaIntegration) if integration is None: return handler(aws_event, aws_context, *args, **kwargs) # If an integration is there, a client has to be there. client = hub.client # type: Any configured_time = aws_context.get_remaining_time_in_millis() with hub.push_scope() as scope: timeout_thread = None with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.add_event_processor( _make_request_event_processor( request_data, aws_context, configured_time ) ) scope.set_tag( "aws_region", aws_context.invoked_function_arn.split(":")[3] ) if batch_size > 1: scope.set_tag("batch_request", True) scope.set_tag("batch_size", batch_size) # Starting the Timeout thread only if the configured time is greater than Timeout warning # buffer and timeout_warning parameter is set True. if ( integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER ): waiting_time = ( configured_time - TIMEOUT_WARNING_BUFFER ) / MILLIS_TO_SECONDS timeout_thread = TimeoutThread( waiting_time, configured_time / MILLIS_TO_SECONDS, ) # Starting the thread to raise timeout warning exception timeout_thread.start() headers = request_data.get("headers", {}) # Some AWS Services (ie. EventBridge) set headers as a list # or None, so we must ensure it is a dict if not isinstance(headers, dict): headers = {} transaction = continue_trace( headers, op=OP.FUNCTION_AWS, name=aws_context.function_name, source=TRANSACTION_SOURCE_COMPONENT, ) with hub.start_transaction( transaction, custom_sampling_context={ "aws_event": aws_event, "aws_context": aws_context, }, ): try: return handler(aws_event, aws_context, *args, **kwargs) except Exception: exc_info = sys.exc_info() sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "aws_lambda", "handled": False}, ) hub.capture_event(sentry_event, hint=hint) reraise(*exc_info) finally: if timeout_thread: timeout_thread.stop() return sentry_handler # type: ignore def _drain_queue(): # type: () -> None with capture_internal_exceptions(): hub = Hub.current integration = hub.get_integration(AwsLambdaIntegration) if integration is not None: # Flush out the event queue before AWS kills the # process. hub.flush() class AwsLambdaIntegration(Integration): identifier = "aws_lambda" def __init__(self, timeout_warning=False): # type: (bool) -> None self.timeout_warning = timeout_warning @staticmethod def setup_once(): # type: () -> None lambda_bootstrap = get_lambda_bootstrap() if not lambda_bootstrap: logger.warning( "Not running in AWS Lambda environment, " "AwsLambdaIntegration disabled (could not find bootstrap module)" ) return if not hasattr(lambda_bootstrap, "handle_event_request"): logger.warning( "Not running in AWS Lambda environment, " "AwsLambdaIntegration disabled (could not find handle_event_request)" ) return pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6 or 2.7 if pre_37: old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request(request_handler, *args, **kwargs): # type: (Any, *Any, **Any) -> Any request_handler = _wrap_handler(request_handler) return old_handle_event_request(request_handler, *args, **kwargs) lambda_bootstrap.handle_event_request = sentry_handle_event_request old_handle_http_request = lambda_bootstrap.handle_http_request def sentry_handle_http_request(request_handler, *args, **kwargs): # type: (Any, *Any, **Any) -> Any request_handler = _wrap_handler(request_handler) return old_handle_http_request(request_handler, *args, **kwargs) lambda_bootstrap.handle_http_request = sentry_handle_http_request # Patch to_json to drain the queue. This should work even when the # SDK is initialized inside of the handler old_to_json = lambda_bootstrap.to_json def sentry_to_json(*args, **kwargs): # type: (*Any, **Any) -> Any _drain_queue() return old_to_json(*args, **kwargs) lambda_bootstrap.to_json = sentry_to_json else: lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( lambda_bootstrap.LambdaRuntimeClient.post_init_error ) old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request( # type: ignore lambda_runtime_client, request_handler, *args, **kwargs ): request_handler = _wrap_handler(request_handler) return old_handle_event_request( lambda_runtime_client, request_handler, *args, **kwargs ) lambda_bootstrap.handle_event_request = sentry_handle_event_request # Patch the runtime client to drain the queue. This should work # even when the SDK is initialized inside of the handler def _wrap_post_function(f): # type: (F) -> F def inner(*args, **kwargs): # type: (*Any, **Any) -> Any _drain_queue() return f(*args, **kwargs) return inner # type: ignore lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( _wrap_post_function( lambda_bootstrap.LambdaRuntimeClient.post_invocation_result ) ) lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( _wrap_post_function( lambda_bootstrap.LambdaRuntimeClient.post_invocation_error ) ) def get_lambda_bootstrap(): # type: () -> Optional[Any] # Python 2.7: Everything is in `__main__`. # # Python 3.7: If the bootstrap module is *already imported*, it is the # one we actually want to use (no idea what's in __main__) # # Python 3.8: bootstrap is also importable, but will be the same file # as __main__ imported under a different name: # # sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__ # sys.modules['__main__'] is not sys.modules['bootstrap'] # # Python 3.9: bootstrap is in __main__.awslambdaricmain # # On container builds using the `aws-lambda-python-runtime-interface-client` # (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap # # Such a setup would then make all monkeypatches useless. if "bootstrap" in sys.modules: return sys.modules["bootstrap"] elif "__main__" in sys.modules: module = sys.modules["__main__"] # python3.9 runtime if hasattr(module, "awslambdaricmain") and hasattr( module.awslambdaricmain, "bootstrap" ): return module.awslambdaricmain.bootstrap elif hasattr(module, "bootstrap"): # awslambdaric python module in container builds return module.bootstrap # python3.8 runtime return module else: return None def _make_request_event_processor(aws_event, aws_context, configured_timeout): # type: (Any, Any, Any) -> EventProcessor start_time = datetime_utcnow() def event_processor(sentry_event, hint, start_time=start_time): # type: (Event, Hint, datetime) -> Optional[Event] remaining_time_in_milis = aws_context.get_remaining_time_in_millis() exec_duration = configured_timeout - remaining_time_in_milis extra = sentry_event.setdefault("extra", {}) extra["lambda"] = { "function_name": aws_context.function_name, "function_version": aws_context.function_version, "invoked_function_arn": aws_context.invoked_function_arn, "aws_request_id": aws_context.aws_request_id, "execution_duration_in_millis": exec_duration, "remaining_time_in_millis": remaining_time_in_milis, } extra["cloudwatch logs"] = { "url": _get_cloudwatch_logs_url(aws_context, start_time), "log_group": aws_context.log_group_name, "log_stream": aws_context.log_stream_name, } request = sentry_event.get("request", {}) if "httpMethod" in aws_event: request["method"] = aws_event["httpMethod"] request["url"] = _get_url(aws_event, aws_context) if "queryStringParameters" in aws_event: request["query_string"] = aws_event["queryStringParameters"] if "headers" in aws_event: request["headers"] = _filter_headers(aws_event["headers"]) if _should_send_default_pii(): user_info = sentry_event.setdefault("user", {}) identity = aws_event.get("identity") if identity is None: identity = {} id = identity.get("userArn") if id is not None: user_info.setdefault("id", id) ip = identity.get("sourceIp") if ip is not None: user_info.setdefault("ip_address", ip) if "body" in aws_event: request["data"] = aws_event.get("body", "") else: if aws_event.get("body", None): # Unfortunately couldn't find a way to get structured body from AWS # event. Meaning every body is unstructured to us. request["data"] = AnnotatedValue.removed_because_raw_data() sentry_event["request"] = deepcopy(request) return sentry_event return event_processor def _get_url(aws_event, aws_context): # type: (Any, Any) -> str path = aws_event.get("path", None) headers = aws_event.get("headers") if headers is None: headers = {} host = headers.get("Host", None) proto = headers.get("X-Forwarded-Proto", None) if proto and host and path: return "{}://{}{}".format(proto, host, path) return "awslambda:///{}".format(aws_context.function_name) def _get_cloudwatch_logs_url(aws_context, start_time): # type: (Any, datetime) -> str """ Generates a CloudWatchLogs console URL based on the context object Arguments: aws_context {Any} -- context from lambda handler Returns: str -- AWS Console URL to logs. """ formatstring = "%Y-%m-%dT%H:%M:%SZ" region = environ.get("AWS_REGION", "") url = ( "https://console.{domain}/cloudwatch/home?region={region}" "#logEventViewer:group={log_group};stream={log_stream}" ";start={start_time};end={end_time}" ).format( domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com", region=region, log_group=aws_context.log_group_name, log_stream=aws_context.log_stream_name, start_time=(start_time - timedelta(seconds=1)).strftime(formatstring), end_time=(datetime_utcnow() + timedelta(seconds=2)).strftime(formatstring), ) return url sentry-python-1.39.2/sentry_sdk/integrations/beam.py000066400000000000000000000130711454744723200226400ustar00rootroot00000000000000from __future__ import absolute_import import sys import types from sentry_sdk._functools import wraps from sentry_sdk.hub import Hub from sentry_sdk._compat import reraise from sentry_sdk.utils import capture_internal_exceptions, event_from_exception from sentry_sdk.integrations import Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Iterator from typing import TypeVar from typing import Optional from typing import Callable from sentry_sdk.client import Client from sentry_sdk._types import ExcInfo T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) WRAPPED_FUNC = "_wrapped_{}_" INSPECT_FUNC = "_inspect_{}" # Required format per apache_beam/transforms/core.py USED_FUNC = "_sentry_used_" class BeamIntegration(Integration): identifier = "beam" @staticmethod def setup_once(): # type: () -> None from apache_beam.transforms.core import DoFn, ParDo # type: ignore ignore_logger("root") ignore_logger("bundle_processor.create") function_patches = ["process", "start_bundle", "finish_bundle", "setup"] for func_name in function_patches: setattr( DoFn, INSPECT_FUNC.format(func_name), _wrap_inspect_call(DoFn, func_name), ) old_init = ParDo.__init__ def sentry_init_pardo(self, fn, *args, **kwargs): # type: (ParDo, Any, *Any, **Any) -> Any # Do not monkey patch init twice if not getattr(self, "_sentry_is_patched", False): for func_name in function_patches: if not hasattr(fn, func_name): continue wrapped_func = WRAPPED_FUNC.format(func_name) # Check to see if inspect is set and process is not # to avoid monkey patching process twice. # Check to see if function is part of object for # backwards compatibility. process_func = getattr(fn, func_name) inspect_func = getattr(fn, INSPECT_FUNC.format(func_name)) if not getattr(inspect_func, USED_FUNC, False) and not getattr( process_func, USED_FUNC, False ): setattr(fn, wrapped_func, process_func) setattr(fn, func_name, _wrap_task_call(process_func)) self._sentry_is_patched = True old_init(self, fn, *args, **kwargs) ParDo.__init__ = sentry_init_pardo def _wrap_inspect_call(cls, func_name): # type: (Any, Any) -> Any if not hasattr(cls, func_name): return None def _inspect(self): # type: (Any) -> Any """ Inspect function overrides the way Beam gets argspec. """ wrapped_func = WRAPPED_FUNC.format(func_name) if hasattr(self, wrapped_func): process_func = getattr(self, wrapped_func) else: process_func = getattr(self, func_name) setattr(self, func_name, _wrap_task_call(process_func)) setattr(self, wrapped_func, process_func) # getfullargspec is deprecated in more recent beam versions and get_function_args_defaults # (which uses Signatures internally) should be used instead. try: from apache_beam.transforms.core import get_function_args_defaults return get_function_args_defaults(process_func) except ImportError: from apache_beam.typehints.decorators import getfullargspec # type: ignore return getfullargspec(process_func) setattr(_inspect, USED_FUNC, True) return _inspect def _wrap_task_call(func): # type: (F) -> F """ Wrap task call with a try catch to get exceptions. Pass the client on to raise_exception so it can get rebinded. """ client = Hub.current.client @wraps(func) def _inner(*args, **kwargs): # type: (*Any, **Any) -> Any try: gen = func(*args, **kwargs) except Exception: raise_exception(client) if not isinstance(gen, types.GeneratorType): return gen return _wrap_generator_call(gen, client) setattr(_inner, USED_FUNC, True) return _inner # type: ignore def _capture_exception(exc_info, hub): # type: (ExcInfo, Hub) -> None """ Send Beam exception to Sentry. """ integration = hub.get_integration(BeamIntegration) if integration is None: return client = hub.client if client is None: return event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "beam", "handled": False}, ) hub.capture_event(event, hint=hint) def raise_exception(client): # type: (Optional[Client]) -> None """ Raise an exception. If the client is not in the hub, rebind it. """ hub = Hub.current if hub.client is None: hub.bind_client(client) exc_info = sys.exc_info() with capture_internal_exceptions(): _capture_exception(exc_info, hub) reraise(*exc_info) def _wrap_generator_call(gen, client): # type: (Iterator[T], Optional[Client]) -> Iterator[T] """ Wrap the generator to handle any failures. """ while True: try: yield next(gen) except StopIteration: break except Exception: raise_exception(client) sentry-python-1.39.2/sentry_sdk/integrations/boto3.py000066400000000000000000000106761454744723200227720ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk._functools import partial from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import capture_internal_exceptions, parse_url, parse_version if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from typing import Type try: from botocore import __version__ as BOTOCORE_VERSION # type: ignore from botocore.client import BaseClient # type: ignore from botocore.response import StreamingBody # type: ignore from botocore.awsrequest import AWSRequest # type: ignore except ImportError: raise DidNotEnable("botocore is not installed") class Boto3Integration(Integration): identifier = "boto3" @staticmethod def setup_once(): # type: () -> None version = parse_version(BOTOCORE_VERSION) if version is None: raise DidNotEnable( "Unparsable botocore version: {}".format(BOTOCORE_VERSION) ) if version < (1, 12): raise DidNotEnable("Botocore 1.12 or newer is required.") orig_init = BaseClient.__init__ def sentry_patched_init(self, *args, **kwargs): # type: (Type[BaseClient], *Any, **Any) -> None orig_init(self, *args, **kwargs) meta = self.meta service_id = meta.service_model.service_id.hyphenize() meta.events.register( "request-created", partial(_sentry_request_created, service_id=service_id), ) meta.events.register("after-call", _sentry_after_call) meta.events.register("after-call-error", _sentry_after_call_error) BaseClient.__init__ = sentry_patched_init def _sentry_request_created(service_id, request, operation_name, **kwargs): # type: (str, AWSRequest, str, **Any) -> None hub = Hub.current if hub.get_integration(Boto3Integration) is None: return description = "aws.%s.%s" % (service_id, operation_name) span = hub.start_span( hub=hub, op=OP.HTTP_CLIENT, description=description, ) with capture_internal_exceptions(): parsed_url = parse_url(request.url, sanitize=False) span.set_data("aws.request.url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) span.set_tag("aws.service_id", service_id) span.set_tag("aws.operation_name", operation_name) span.set_data(SPANDATA.HTTP_METHOD, request.method) # We do it in order for subsequent http calls/retries be # attached to this span. span.__enter__() # request.context is an open-ended data-structure # where we can add anything useful in request life cycle. request.context["_sentrysdk_span"] = span def _sentry_after_call(context, parsed, **kwargs): # type: (Dict[str, Any], Dict[str, Any], **Any) -> None span = context.pop("_sentrysdk_span", None) # type: Optional[Span] # Span could be absent if the integration is disabled. if span is None: return span.__exit__(None, None, None) body = parsed.get("Body") if not isinstance(body, StreamingBody): return streaming_span = span.start_child( op=OP.HTTP_CLIENT_STREAM, description=span.description, ) orig_read = body.read orig_close = body.close def sentry_streaming_body_read(*args, **kwargs): # type: (*Any, **Any) -> bytes try: ret = orig_read(*args, **kwargs) if not ret: streaming_span.finish() return ret except Exception: streaming_span.finish() raise body.read = sentry_streaming_body_read def sentry_streaming_body_close(*args, **kwargs): # type: (*Any, **Any) -> None streaming_span.finish() orig_close(*args, **kwargs) body.close = sentry_streaming_body_close def _sentry_after_call_error(context, exception, **kwargs): # type: (Dict[str, Any], Type[BaseException], **Any) -> None span = context.pop("_sentrysdk_span", None) # type: Optional[Span] # Span could be absent if the integration is disabled. if span is None: return span.__exit__(type(exception), exception, None) sentry-python-1.39.2/sentry_sdk/integrations/bottle.py000066400000000000000000000145071454744723200232320ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk.hub import Hub from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, parse_version, transaction_from_function, ) from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from sentry_sdk.integrations.wsgi import _ScopedResponse from typing import Any from typing import Dict from typing import Callable from typing import Optional from bottle import FileUpload, FormsDict, LocalRequest # type: ignore from sentry_sdk._types import EventProcessor, Event try: from bottle import ( Bottle, Route, request as bottle_request, HTTPResponse, __version__ as BOTTLE_VERSION, ) except ImportError: raise DidNotEnable("Bottle not installed") TRANSACTION_STYLE_VALUES = ("endpoint", "url") class BottleIntegration(Integration): identifier = "bottle" transaction_style = "" def __init__(self, transaction_style="endpoint"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None version = parse_version(BOTTLE_VERSION) if version is None: raise DidNotEnable("Unparsable Bottle version: {}".format(BOTTLE_VERSION)) if version < (0, 12): raise DidNotEnable("Bottle 0.12 or newer required.") # monkey patch method Bottle.__call__ old_app = Bottle.__call__ def sentry_patched_wsgi_app(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse hub = Hub.current integration = hub.get_integration(BottleIntegration) if integration is None: return old_app(self, environ, start_response) return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))( environ, start_response ) Bottle.__call__ = sentry_patched_wsgi_app # monkey patch method Bottle._handle old_handle = Bottle._handle def _patched_handle(self, environ): # type: (Bottle, Dict[str, Any]) -> Any hub = Hub.current integration = hub.get_integration(BottleIntegration) if integration is None: return old_handle(self, environ) # create new scope scope_manager = hub.push_scope() with scope_manager: app = self with hub.configure_scope() as scope: scope._name = "bottle" scope.add_event_processor( _make_request_event_processor(app, bottle_request, integration) ) res = old_handle(self, environ) # scope cleanup return res Bottle._handle = _patched_handle # monkey patch method Route._make_callback old_make_callback = Route._make_callback def patched_make_callback(self, *args, **kwargs): # type: (Route, *object, **object) -> Any hub = Hub.current integration = hub.get_integration(BottleIntegration) prepared_callback = old_make_callback(self, *args, **kwargs) if integration is None: return prepared_callback # If an integration is there, a client has to be there. client = hub.client # type: Any def wrapped_callback(*args, **kwargs): # type: (*object, **object) -> Any try: res = prepared_callback(*args, **kwargs) except HTTPResponse: raise except Exception as exception: event, hint = event_from_exception( exception, client_options=client.options, mechanism={"type": "bottle", "handled": False}, ) hub.capture_event(event, hint=hint) raise exception return res return wrapped_callback Route._make_callback = patched_make_callback class BottleRequestExtractor(RequestExtractor): def env(self): # type: () -> Dict[str, str] return self.request.environ def cookies(self): # type: () -> Dict[str, str] return self.request.cookies def raw_data(self): # type: () -> bytes return self.request.body.read() def form(self): # type: () -> FormsDict if self.is_json(): return None return self.request.forms.decode() def files(self): # type: () -> Optional[Dict[str, str]] if self.is_json(): return None return self.request.files def size_of_file(self, file): # type: (FileUpload) -> int return file.content_length def _set_transaction_name_and_source(event, transaction_style, request): # type: (Event, str, Any) -> None name = "" if transaction_style == "url": name = request.route.rule or "" elif transaction_style == "endpoint": name = ( request.route.name or transaction_from_function(request.route.callback) or "" ) event["transaction"] = name event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} def _make_request_event_processor(app, request, integration): # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] _set_transaction_name_and_source(event, integration.transaction_style, request) with capture_internal_exceptions(): BottleRequestExtractor(request).extract_into_event(event) return event return event_processor sentry-python-1.39.2/sentry_sdk/integrations/celery.py000066400000000000000000000465451454744723200232330ustar00rootroot00000000000000from __future__ import absolute_import import sys import time from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk._compat import reraise from sentry_sdk._functools import wraps from sentry_sdk.crons import capture_checkin, MonitorStatus from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TRANSACTION_SOURCE_TASK from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, logger, match_regex_list, ) if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import List from typing import Optional from typing import Tuple from typing import TypeVar from typing import Union from sentry_sdk.tracing import Span from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo F = TypeVar("F", bound=Callable[..., Any]) try: from celery import VERSION as CELERY_VERSION # type: ignore from celery import Task, Celery from celery.app.trace import task_has_custom from celery.beat import Scheduler # type: ignore from celery.exceptions import ( # type: ignore Ignore, Reject, Retry, SoftTimeLimitExceeded, ) from celery.schedules import crontab, schedule # type: ignore from celery.signals import ( # type: ignore task_failure, task_success, task_retry, ) except ImportError: raise DidNotEnable("Celery not installed") CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject) class CeleryIntegration(Integration): identifier = "celery" def __init__( self, propagate_traces=True, monitor_beat_tasks=False, exclude_beat_tasks=None, ): # type: (bool, bool, Optional[List[str]]) -> None self.propagate_traces = propagate_traces self.monitor_beat_tasks = monitor_beat_tasks self.exclude_beat_tasks = exclude_beat_tasks if monitor_beat_tasks: _patch_beat_apply_entry() _setup_celery_beat_signals() @staticmethod def setup_once(): # type: () -> None if CELERY_VERSION < (3,): raise DidNotEnable("Celery 3 or newer required.") import celery.app.trace as trace # type: ignore old_build_tracer = trace.build_tracer def sentry_build_tracer(name, task, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any if not getattr(task, "_sentry_is_patched", False): # determine whether Celery will use __call__ or run and patch # accordingly if task_has_custom(task, "__call__"): type(task).__call__ = _wrap_task_call(task, type(task).__call__) else: task.run = _wrap_task_call(task, task.run) # `build_tracer` is apparently called for every task # invocation. Can't wrap every celery task for every invocation # or we will get infinitely nested wrapper functions. task._sentry_is_patched = True return _wrap_tracer(task, old_build_tracer(name, task, *args, **kwargs)) trace.build_tracer = sentry_build_tracer from celery.app.task import Task # type: ignore Task.apply_async = _wrap_apply_async(Task.apply_async) _patch_worker_exit() # This logger logs every status of every task that ran on the worker. # Meaning that every task's breadcrumbs are full of stuff like "Task # raised unexpected ". ignore_logger("celery.worker.job") ignore_logger("celery.app.trace") # This is stdout/err redirected to a logger, can't deal with this # (need event_level=logging.WARN to reproduce) ignore_logger("celery.redirected") def _now_seconds_since_epoch(): # type: () -> float # We cannot use `time.perf_counter()` when dealing with the duration # of a Celery task, because the start of a Celery task and # the end are recorded in different processes. # Start happens in the Celery Beat process, # the end in a Celery Worker process. return time.time() class NoOpMgr: def __enter__(self): # type: () -> None return None def __exit__(self, exc_type, exc_value, traceback): # type: (Any, Any, Any) -> None return None def _wrap_apply_async(f): # type: (F) -> F @wraps(f) def apply_async(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(CeleryIntegration) if integration is None: return f(*args, **kwargs) # Note: kwargs can contain headers=None, so no setdefault! # Unsure which backend though. kwarg_headers = kwargs.get("headers") or {} propagate_traces = kwarg_headers.pop( "sentry-propagate-traces", integration.propagate_traces ) if not propagate_traces: return f(*args, **kwargs) try: task_started_from_beat = args[1][0] == "BEAT" except (IndexError, TypeError): task_started_from_beat = False task = args[0] span_mgr = ( hub.start_span(op=OP.QUEUE_SUBMIT_CELERY, description=task.name) if not task_started_from_beat else NoOpMgr() ) # type: Union[Span, NoOpMgr] with span_mgr as span: with capture_internal_exceptions(): headers = ( dict(hub.iter_trace_propagation_headers(span)) if span is not None else {} ) if integration.monitor_beat_tasks: headers.update( { "sentry-monitor-start-timestamp-s": "%.9f" % _now_seconds_since_epoch(), } ) if headers: existing_baggage = kwarg_headers.get(BAGGAGE_HEADER_NAME) sentry_baggage = headers.get(BAGGAGE_HEADER_NAME) combined_baggage = sentry_baggage or existing_baggage if sentry_baggage and existing_baggage: combined_baggage = "{},{}".format( existing_baggage, sentry_baggage, ) kwarg_headers.update(headers) if combined_baggage: kwarg_headers[BAGGAGE_HEADER_NAME] = combined_baggage # https://github.com/celery/celery/issues/4875 # # Need to setdefault the inner headers too since other # tracing tools (dd-trace-py) also employ this exact # workaround and we don't want to break them. kwarg_headers.setdefault("headers", {}).update(headers) if combined_baggage: kwarg_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage # Add the Sentry options potentially added in `sentry_apply_entry` # to the headers (done when auto-instrumenting Celery Beat tasks) for key, value in kwarg_headers.items(): if key.startswith("sentry-"): kwarg_headers["headers"][key] = value kwargs["headers"] = kwarg_headers return f(*args, **kwargs) return apply_async # type: ignore def _wrap_tracer(task, f): # type: (Any, F) -> F # Need to wrap tracer for pushing the scope before prerun is sent, and # popping it after postrun is sent. # # This is the reason we don't use signals for hooking in the first place. # Also because in Celery 3, signal dispatch returns early if one handler # crashes. @wraps(f) def _inner(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current if hub.get_integration(CeleryIntegration) is None: return f(*args, **kwargs) with hub.push_scope() as scope: scope._name = "celery" scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) transaction = None # Celery task objects are not a thing to be trusted. Even # something such as attribute access can fail. with capture_internal_exceptions(): transaction = continue_trace( args[3].get("headers") or {}, op=OP.QUEUE_TASK_CELERY, name="unknown celery task", source=TRANSACTION_SOURCE_TASK, ) transaction.name = task.name transaction.set_status("ok") if transaction is None: return f(*args, **kwargs) with hub.start_transaction( transaction, custom_sampling_context={ "celery_job": { "task": task.name, # for some reason, args[1] is a list if non-empty but a # tuple if empty "args": list(args[1]), "kwargs": args[2], } }, ): return f(*args, **kwargs) return _inner # type: ignore def _wrap_task_call(task, f): # type: (Any, F) -> F # Need to wrap task call because the exception is caught before we get to # see it. Also celery's reported stacktrace is untrustworthy. # functools.wraps is important here because celery-once looks at this # method's name. # https://github.com/getsentry/sentry-python/issues/421 @wraps(f) def _inner(*args, **kwargs): # type: (*Any, **Any) -> Any try: return f(*args, **kwargs) except Exception: exc_info = sys.exc_info() with capture_internal_exceptions(): _capture_exception(task, exc_info) reraise(*exc_info) return _inner # type: ignore def _make_event_processor(task, uuid, args, kwargs, request=None): # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): tags = event.setdefault("tags", {}) tags["celery_task_id"] = uuid extra = event.setdefault("extra", {}) extra["celery-job"] = { "task_name": task.name, "args": args, "kwargs": kwargs, } if "exc_info" in hint: with capture_internal_exceptions(): if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded): event["fingerprint"] = [ "celery", "SoftTimeLimitExceeded", getattr(task, "name", task), ] return event return event_processor def _capture_exception(task, exc_info): # type: (Any, ExcInfo) -> None hub = Hub.current if hub.get_integration(CeleryIntegration) is None: return if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS): # ??? Doesn't map to anything _set_status(hub, "aborted") return _set_status(hub, "internal_error") if hasattr(task, "throws") and isinstance(exc_info[1], task.throws): return # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "celery", "handled": False}, ) hub.capture_event(event, hint=hint) def _set_status(hub, status): # type: (Hub, str) -> None with capture_internal_exceptions(): with hub.configure_scope() as scope: if scope.span is not None: scope.span.set_status(status) def _patch_worker_exit(): # type: () -> None # Need to flush queue before worker shutdown because a crashing worker will # call os._exit from billiard.pool import Worker # type: ignore old_workloop = Worker.workloop def sentry_workloop(*args, **kwargs): # type: (*Any, **Any) -> Any try: return old_workloop(*args, **kwargs) finally: with capture_internal_exceptions(): hub = Hub.current if hub.get_integration(CeleryIntegration) is not None: hub.flush() Worker.workloop = sentry_workloop def _get_headers(task): # type: (Task) -> Dict[str, Any] headers = task.request.get("headers") or {} # flatten nested headers if "headers" in headers: headers.update(headers["headers"]) del headers["headers"] headers.update(task.request.get("properties") or {}) return headers def _get_humanized_interval(seconds): # type: (float) -> Tuple[int, str] TIME_UNITS = ( # noqa: N806 ("day", 60 * 60 * 24.0), ("hour", 60 * 60.0), ("minute", 60.0), ) seconds = float(seconds) for unit, divider in TIME_UNITS: if seconds >= divider: interval = int(seconds / divider) return (interval, unit) return (int(seconds), "second") def _get_monitor_config(celery_schedule, app, monitor_name): # type: (Any, Celery, str) -> Dict[str, Any] monitor_config = {} # type: Dict[str, Any] schedule_type = None # type: Optional[str] schedule_value = None # type: Optional[Union[str, int]] schedule_unit = None # type: Optional[str] if isinstance(celery_schedule, crontab): schedule_type = "crontab" schedule_value = ( "{0._orig_minute} " "{0._orig_hour} " "{0._orig_day_of_month} " "{0._orig_month_of_year} " "{0._orig_day_of_week}".format(celery_schedule) ) elif isinstance(celery_schedule, schedule): schedule_type = "interval" (schedule_value, schedule_unit) = _get_humanized_interval( celery_schedule.seconds ) if schedule_unit == "second": logger.warning( "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.", monitor_name, schedule_value, ) return {} else: logger.warning( "Celery schedule type '%s' not supported by Sentry Crons.", type(celery_schedule), ) return {} monitor_config["schedule"] = {} monitor_config["schedule"]["type"] = schedule_type monitor_config["schedule"]["value"] = schedule_value if schedule_unit is not None: monitor_config["schedule"]["unit"] = schedule_unit monitor_config["timezone"] = ( ( hasattr(celery_schedule, "tz") and celery_schedule.tz is not None and str(celery_schedule.tz) ) or app.timezone or "UTC" ) return monitor_config def _patch_beat_apply_entry(): # type: () -> None original_apply_entry = Scheduler.apply_entry def sentry_apply_entry(*args, **kwargs): # type: (*Any, **Any) -> None scheduler, schedule_entry = args app = scheduler.app celery_schedule = schedule_entry.schedule monitor_name = schedule_entry.name hub = Hub.current integration = hub.get_integration(CeleryIntegration) if integration is None: return original_apply_entry(*args, **kwargs) if match_regex_list(monitor_name, integration.exclude_beat_tasks): return original_apply_entry(*args, **kwargs) with hub.configure_scope() as scope: # When tasks are started from Celery Beat, make sure each task has its own trace. scope.set_new_propagation_context() monitor_config = _get_monitor_config(celery_schedule, app, monitor_name) is_supported_schedule = bool(monitor_config) if is_supported_schedule: headers = schedule_entry.options.pop("headers", {}) headers.update( { "sentry-monitor-slug": monitor_name, "sentry-monitor-config": monitor_config, } ) check_in_id = capture_checkin( monitor_slug=monitor_name, monitor_config=monitor_config, status=MonitorStatus.IN_PROGRESS, ) headers.update({"sentry-monitor-check-in-id": check_in_id}) # Set the Sentry configuration in the options of the ScheduleEntry. # Those will be picked up in `apply_async` and added to the headers. schedule_entry.options["headers"] = headers return original_apply_entry(*args, **kwargs) Scheduler.apply_entry = sentry_apply_entry def _setup_celery_beat_signals(): # type: () -> None task_success.connect(crons_task_success) task_failure.connect(crons_task_failure) task_retry.connect(crons_task_retry) def crons_task_success(sender, **kwargs): # type: (Task, Dict[Any, Any]) -> None logger.debug("celery_task_success %s", sender) headers = _get_headers(sender) if "sentry-monitor-slug" not in headers: return monitor_config = headers.get("sentry-monitor-config", {}) start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"]) capture_checkin( monitor_slug=headers["sentry-monitor-slug"], monitor_config=monitor_config, check_in_id=headers["sentry-monitor-check-in-id"], duration=_now_seconds_since_epoch() - start_timestamp_s, status=MonitorStatus.OK, ) def crons_task_failure(sender, **kwargs): # type: (Task, Dict[Any, Any]) -> None logger.debug("celery_task_failure %s", sender) headers = _get_headers(sender) if "sentry-monitor-slug" not in headers: return monitor_config = headers.get("sentry-monitor-config", {}) start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"]) capture_checkin( monitor_slug=headers["sentry-monitor-slug"], monitor_config=monitor_config, check_in_id=headers["sentry-monitor-check-in-id"], duration=_now_seconds_since_epoch() - start_timestamp_s, status=MonitorStatus.ERROR, ) def crons_task_retry(sender, **kwargs): # type: (Task, Dict[Any, Any]) -> None logger.debug("celery_task_retry %s", sender) headers = _get_headers(sender) if "sentry-monitor-slug" not in headers: return monitor_config = headers.get("sentry-monitor-config", {}) start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"]) capture_checkin( monitor_slug=headers["sentry-monitor-slug"], monitor_config=monitor_config, check_in_id=headers["sentry-monitor-check-in-id"], duration=_now_seconds_since_epoch() - start_timestamp_s, status=MonitorStatus.ERROR, ) sentry-python-1.39.2/sentry_sdk/integrations/chalice.py000066400000000000000000000112411454744723200233210ustar00rootroot00000000000000import sys from sentry_sdk._compat import reraise from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.aws_lambda import _make_request_event_processor from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, parse_version, ) from sentry_sdk._types import TYPE_CHECKING from sentry_sdk._functools import wraps import chalice # type: ignore from chalice import Chalice, ChaliceViewError from chalice.app import EventSourceHandler as ChaliceEventSourceHandler # type: ignore if TYPE_CHECKING: from typing import Any from typing import Dict from typing import TypeVar from typing import Callable F = TypeVar("F", bound=Callable[..., Any]) try: from chalice import __version__ as CHALICE_VERSION except ImportError: raise DidNotEnable("Chalice is not installed") class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore def __call__(self, event, context): # type: (Any, Any) -> Any hub = Hub.current client = hub.client # type: Any with hub.push_scope() as scope: with capture_internal_exceptions(): configured_time = context.get_remaining_time_in_millis() scope.add_event_processor( _make_request_event_processor(event, context, configured_time) ) try: return ChaliceEventSourceHandler.__call__(self, event, context) except Exception: exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "chalice", "handled": False}, ) hub.capture_event(event, hint=hint) hub.flush() reraise(*exc_info) def _get_view_function_response(app, view_function, function_args): # type: (Any, F, Any) -> F @wraps(view_function) def wrapped_view_function(**function_args): # type: (**Any) -> Any hub = Hub.current client = hub.client # type: Any with hub.push_scope() as scope: with capture_internal_exceptions(): configured_time = app.lambda_context.get_remaining_time_in_millis() scope.set_transaction_name( app.lambda_context.function_name, source=TRANSACTION_SOURCE_COMPONENT, ) scope.add_event_processor( _make_request_event_processor( app.current_request.to_dict(), app.lambda_context, configured_time, ) ) try: return view_function(**function_args) except Exception as exc: if isinstance(exc, ChaliceViewError): raise exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "chalice", "handled": False}, ) hub.capture_event(event, hint=hint) hub.flush() raise return wrapped_view_function # type: ignore class ChaliceIntegration(Integration): identifier = "chalice" @staticmethod def setup_once(): # type: () -> None version = parse_version(CHALICE_VERSION) if version is None: raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION)) if version < (1, 20): old_get_view_function_response = Chalice._get_view_function_response else: from chalice.app import RestAPIEventHandler old_get_view_function_response = ( RestAPIEventHandler._get_view_function_response ) def sentry_event_response(app, view_function, function_args): # type: (Any, F, Dict[str, Any]) -> Any wrapped_view_function = _get_view_function_response( app, view_function, function_args ) return old_get_view_function_response( app, wrapped_view_function, function_args ) if version < (1, 20): Chalice._get_view_function_response = sentry_event_response else: RestAPIEventHandler._get_view_function_response = sentry_event_response # for everything else (like events) chalice.app.EventSourceHandler = EventSourceHandler sentry-python-1.39.2/sentry_sdk/integrations/clickhouse_driver.py000066400000000000000000000112201454744723200254320ustar00rootroot00000000000000from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.hub import _should_send_default_pii from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import capture_internal_exceptions from typing import TypeVar # Hack to get new Python features working in older versions # without introducing a hard dependency on `typing_extensions` # from: https://stackoverflow.com/a/71944042/300572 if TYPE_CHECKING: from typing import ParamSpec, Callable else: # Fake ParamSpec class ParamSpec: def __init__(self, _): self.args = None self.kwargs = None # Callable[anything] will return None class _Callable: def __getitem__(self, _): return None # Make instances Callable = _Callable() try: import clickhouse_driver # type: ignore[import-not-found] except ImportError: raise DidNotEnable("clickhouse-driver not installed.") if clickhouse_driver.VERSION < (0, 2, 0): raise DidNotEnable("clickhouse-driver >= 0.2.0 required") class ClickhouseDriverIntegration(Integration): identifier = "clickhouse_driver" @staticmethod def setup_once() -> None: # Every query is done using the Connection's `send_query` function clickhouse_driver.connection.Connection.send_query = _wrap_start( clickhouse_driver.connection.Connection.send_query ) # If the query contains parameters then the send_data function is used to send those parameters to clickhouse clickhouse_driver.client.Client.send_data = _wrap_send_data( clickhouse_driver.client.Client.send_data ) # Every query ends either with the Client's `receive_end_of_query` (no result expected) # or its `receive_result` (result expected) clickhouse_driver.client.Client.receive_end_of_query = _wrap_end( clickhouse_driver.client.Client.receive_end_of_query ) clickhouse_driver.client.Client.receive_result = _wrap_end( clickhouse_driver.client.Client.receive_result ) P = ParamSpec("P") T = TypeVar("T") def _wrap_start(f: Callable[P, T]) -> Callable[P, T]: def _inner(*args: P.args, **kwargs: P.kwargs) -> T: hub = Hub.current if hub.get_integration(ClickhouseDriverIntegration) is None: return f(*args, **kwargs) connection = args[0] query = args[1] query_id = args[2] if len(args) > 2 else kwargs.get("query_id") params = args[3] if len(args) > 3 else kwargs.get("params") span = hub.start_span(op=OP.DB, description=query) connection._sentry_span = span # type: ignore[attr-defined] _set_db_data(span, connection) span.set_data("query", query) if query_id: span.set_data("db.query_id", query_id) if params and _should_send_default_pii(): span.set_data("db.params", params) # run the original code ret = f(*args, **kwargs) return ret return _inner def _wrap_end(f: Callable[P, T]) -> Callable[P, T]: def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: res = f(*args, **kwargs) instance = args[0] span = instance.connection._sentry_span # type: ignore[attr-defined] if span is not None: if res is not None and _should_send_default_pii(): span.set_data("db.result", res) with capture_internal_exceptions(): span.hub.add_breadcrumb( message=span._data.pop("query"), category="query", data=span._data ) span.finish() return res return _inner_end def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]: def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: instance = args[0] # type: clickhouse_driver.client.Client data = args[2] span = instance.connection._sentry_span _set_db_data(span, instance.connection) if _should_send_default_pii(): db_params = span._data.get("db.params", []) db_params.extend(data) span.set_data("db.params", db_params) return f(*args, **kwargs) return _inner_send_data def _set_db_data( span: Span, connection: clickhouse_driver.connection.Connection ) -> None: span.set_data(SPANDATA.DB_SYSTEM, "clickhouse") span.set_data(SPANDATA.SERVER_ADDRESS, connection.host) span.set_data(SPANDATA.SERVER_PORT, connection.port) span.set_data(SPANDATA.DB_NAME, connection.database) span.set_data(SPANDATA.DB_USER, connection.user) sentry-python-1.39.2/sentry_sdk/integrations/cloud_resource_context.py000066400000000000000000000151431454744723200265170ustar00rootroot00000000000000import json import urllib3 from sentry_sdk.integrations import Integration from sentry_sdk.api import set_context from sentry_sdk.utils import logger from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Dict CONTEXT_TYPE = "cloud_resource" AWS_METADATA_HOST = "169.254.169.254" AWS_TOKEN_URL = "http://{}/latest/api/token".format(AWS_METADATA_HOST) AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format( AWS_METADATA_HOST ) GCP_METADATA_HOST = "metadata.google.internal" GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format( GCP_METADATA_HOST ) class CLOUD_PROVIDER: # noqa: N801 """ Name of the cloud provider. see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/ """ ALIBABA = "alibaba_cloud" AWS = "aws" AZURE = "azure" GCP = "gcp" IBM = "ibm_cloud" TENCENT = "tencent_cloud" class CLOUD_PLATFORM: # noqa: N801 """ The cloud platform. see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/ """ AWS_EC2 = "aws_ec2" GCP_COMPUTE_ENGINE = "gcp_compute_engine" class CloudResourceContextIntegration(Integration): """ Adds cloud resource context to the Senty scope """ identifier = "cloudresourcecontext" cloud_provider = "" aws_token = "" http = urllib3.PoolManager() gcp_metadata = None def __init__(self, cloud_provider=""): # type: (str) -> None CloudResourceContextIntegration.cloud_provider = cloud_provider @classmethod def _is_aws(cls): # type: () -> bool try: r = cls.http.request( "PUT", AWS_TOKEN_URL, headers={"X-aws-ec2-metadata-token-ttl-seconds": "60"}, ) if r.status != 200: return False cls.aws_token = r.data.decode() return True except Exception: return False @classmethod def _get_aws_context(cls): # type: () -> Dict[str, str] ctx = { "cloud.provider": CLOUD_PROVIDER.AWS, "cloud.platform": CLOUD_PLATFORM.AWS_EC2, } try: r = cls.http.request( "GET", AWS_METADATA_URL, headers={"X-aws-ec2-metadata-token": cls.aws_token}, ) if r.status != 200: return ctx data = json.loads(r.data.decode("utf-8")) try: ctx["cloud.account.id"] = data["accountId"] except Exception: pass try: ctx["cloud.availability_zone"] = data["availabilityZone"] except Exception: pass try: ctx["cloud.region"] = data["region"] except Exception: pass try: ctx["host.id"] = data["instanceId"] except Exception: pass try: ctx["host.type"] = data["instanceType"] except Exception: pass except Exception: pass return ctx @classmethod def _is_gcp(cls): # type: () -> bool try: r = cls.http.request( "GET", GCP_METADATA_URL, headers={"Metadata-Flavor": "Google"}, ) if r.status != 200: return False cls.gcp_metadata = json.loads(r.data.decode("utf-8")) return True except Exception: return False @classmethod def _get_gcp_context(cls): # type: () -> Dict[str, str] ctx = { "cloud.provider": CLOUD_PROVIDER.GCP, "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE, } try: if cls.gcp_metadata is None: r = cls.http.request( "GET", GCP_METADATA_URL, headers={"Metadata-Flavor": "Google"}, ) if r.status != 200: return ctx cls.gcp_metadata = json.loads(r.data.decode("utf-8")) try: ctx["cloud.account.id"] = cls.gcp_metadata["project"]["projectId"] except Exception: pass try: ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"][ "zone" ].split("/")[-1] except Exception: pass try: # only populated in google cloud run ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[ -1 ] except Exception: pass try: ctx["host.id"] = cls.gcp_metadata["instance"]["id"] except Exception: pass except Exception: pass return ctx @classmethod def _get_cloud_provider(cls): # type: () -> str if cls._is_aws(): return CLOUD_PROVIDER.AWS if cls._is_gcp(): return CLOUD_PROVIDER.GCP return "" @classmethod def _get_cloud_resource_context(cls): # type: () -> Dict[str, str] cloud_provider = ( cls.cloud_provider if cls.cloud_provider != "" else CloudResourceContextIntegration._get_cloud_provider() ) if cloud_provider in context_getters.keys(): return context_getters[cloud_provider]() return {} @staticmethod def setup_once(): # type: () -> None cloud_provider = CloudResourceContextIntegration.cloud_provider unsupported_cloud_provider = ( cloud_provider != "" and cloud_provider not in context_getters.keys() ) if unsupported_cloud_provider: logger.warning( "Invalid value for cloud_provider: %s (must be in %s). Falling back to autodetection...", CloudResourceContextIntegration.cloud_provider, list(context_getters.keys()), ) context = CloudResourceContextIntegration._get_cloud_resource_context() if context != {}: set_context(CONTEXT_TYPE, context) # Map with the currently supported cloud providers # mapping to functions extracting the context context_getters = { CLOUD_PROVIDER.AWS: CloudResourceContextIntegration._get_aws_context, CLOUD_PROVIDER.GCP: CloudResourceContextIntegration._get_gcp_context, } sentry-python-1.39.2/sentry_sdk/integrations/dedupe.py000066400000000000000000000022401454744723200231760ustar00rootroot00000000000000from sentry_sdk.hub import Hub from sentry_sdk.utils import ContextVar from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from sentry_sdk._types import Event, Hint class DedupeIntegration(Integration): identifier = "dedupe" def __init__(self): # type: () -> None self._last_seen = ContextVar("last-seen") @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def processor(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if hint is None: return event integration = Hub.current.get_integration(DedupeIntegration) if integration is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exc = exc_info[1] if integration._last_seen.get(None) is exc: return None integration._last_seen.set(exc) return event sentry-python-1.39.2/sentry_sdk/integrations/django/000077500000000000000000000000001454744723200226225ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/integrations/django/__init__.py000066400000000000000000000571421454744723200247440ustar00rootroot00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import import inspect import sys import threading import weakref from importlib import import_module from sentry_sdk._compat import string_types, text_type from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.db.explain_plan.django import attach_explain_plan_to_span from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.scope import add_global_event_processor from sentry_sdk.serializer import add_global_repr_processor from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( AnnotatedValue, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, SENSITIVE_DATA_SUBSTITUTE, logger, capture_internal_exceptions, event_from_exception, transaction_from_function, walk_exception_chain, ) from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.integrations._wsgi_common import RequestExtractor try: from django import VERSION as DJANGO_VERSION from django.conf import settings as django_settings from django.core import signals from django.conf import settings try: from django.urls import resolve except ImportError: from django.core.urlresolvers import resolve try: from django.urls import Resolver404 except ImportError: from django.core.urlresolvers import Resolver404 # Only available in Django 3.0+ try: from django.core.handlers.asgi import ASGIRequest except Exception: ASGIRequest = None except ImportError: raise DidNotEnable("Django not installed") from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER from sentry_sdk.integrations.django.templates import ( get_template_frame_from_exception, patch_templates, ) from sentry_sdk.integrations.django.middleware import patch_django_middlewares from sentry_sdk.integrations.django.signals_handlers import patch_signals from sentry_sdk.integrations.django.views import patch_views if DJANGO_VERSION[:2] > (1, 8): from sentry_sdk.integrations.django.caching import patch_caching else: patch_caching = None # type: ignore if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from typing import Union from typing import List from django.core.handlers.wsgi import WSGIRequest from django.http.response import HttpResponse from django.http.request import QueryDict from django.utils.datastructures import MultiValueDict from sentry_sdk.tracing import Span from sentry_sdk.scope import Scope from sentry_sdk.integrations.wsgi import _ScopedResponse from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType if DJANGO_VERSION < (1, 10): def is_authenticated(request_user): # type: (Any) -> bool return request_user.is_authenticated() else: def is_authenticated(request_user): # type: (Any) -> bool return request_user.is_authenticated TRANSACTION_STYLE_VALUES = ("function_name", "url") class DjangoIntegration(Integration): identifier = "django" transaction_style = "" middleware_spans = None signals_spans = None cache_spans = None def __init__( self, transaction_style="url", middleware_spans=True, signals_spans=True, cache_spans=False, ): # type: (str, bool, bool, bool) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style self.middleware_spans = middleware_spans self.signals_spans = signals_spans self.cache_spans = cache_spans @staticmethod def setup_once(): # type: () -> None if DJANGO_VERSION < (1, 8): raise DidNotEnable("Django 1.8 or newer is required.") install_sql_hook() # Patch in our custom middleware. # logs an error for every 500 ignore_logger("django.server") ignore_logger("django.request") from django.core.handlers.wsgi import WSGIHandler old_app = WSGIHandler.__call__ def sentry_patched_wsgi_handler(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse if Hub.current.get_integration(DjangoIntegration) is None: return old_app(self, environ, start_response) bound_old_app = old_app.__get__(self, WSGIHandler) from django.conf import settings use_x_forwarded_for = settings.USE_X_FORWARDED_HOST return SentryWsgiMiddleware(bound_old_app, use_x_forwarded_for)( environ, start_response ) WSGIHandler.__call__ = sentry_patched_wsgi_handler _patch_get_response() _patch_django_asgi_handler() signals.got_request_exception.connect(_got_request_exception) @add_global_event_processor def process_django_templates(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if hint is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exception = event.get("exception", None) if exception is None: return event values = exception.get("values", None) if values is None: return event for exception, (_, exc_value, _) in zip( reversed(values), walk_exception_chain(exc_info) ): frame = get_template_frame_from_exception(exc_value) if frame is not None: frames = exception.get("stacktrace", {}).get("frames", []) for i in reversed(range(len(frames))): f = frames[i] if ( f.get("function") in ("Parser.parse", "parse", "render") and f.get("module") == "django.template.base" ): i += 1 break else: i = len(frames) frames.insert(i, frame) return event @add_global_repr_processor def _django_queryset_repr(value, hint): # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str] try: # Django 1.6 can fail to import `QuerySet` when Django settings # have not yet been initialized. # # If we fail to import, return `NotImplemented`. It's at least # unlikely that we have a query set in `value` when importing # `QuerySet` fails. from django.db.models.query import QuerySet except Exception: return NotImplemented if not isinstance(value, QuerySet) or value._result_cache: return NotImplemented # Do not call Hub.get_integration here. It is intentional that # running under a new hub does not suddenly start executing # querysets. This might be surprising to the user but it's likely # less annoying. return "<%s from %s at 0x%x>" % ( value.__class__.__name__, value.__module__, id(value), ) _patch_channels() patch_django_middlewares() patch_views() patch_templates() patch_signals() if patch_caching is not None: patch_caching() _DRF_PATCHED = False _DRF_PATCH_LOCK = threading.Lock() def _patch_drf(): # type: () -> None """ Patch Django Rest Framework for more/better request data. DRF's request type is a wrapper around Django's request type. The attribute we're interested in is `request.data`, which is a cached property containing a parsed request body. Reading a request body from that property is more reliable than reading from any of Django's own properties, as those don't hold payloads in memory and therefore can only be accessed once. We patch the Django request object to include a weak backreference to the DRF request object, such that we can later use either in `DjangoRequestExtractor`. This function is not called directly on SDK setup, because importing almost any part of Django Rest Framework will try to access Django settings (where `sentry_sdk.init()` might be called from in the first place). Instead we run this function on every request and do the patching on the first request. """ global _DRF_PATCHED if _DRF_PATCHED: # Double-checked locking return with _DRF_PATCH_LOCK: if _DRF_PATCHED: return # We set this regardless of whether the code below succeeds or fails. # There is no point in trying to patch again on the next request. _DRF_PATCHED = True with capture_internal_exceptions(): try: from rest_framework.views import APIView # type: ignore except ImportError: pass else: old_drf_initial = APIView.initial def sentry_patched_drf_initial(self, request, *args, **kwargs): # type: (APIView, Any, *Any, **Any) -> Any with capture_internal_exceptions(): request._request._sentry_drf_request_backref = weakref.ref( request ) pass return old_drf_initial(self, request, *args, **kwargs) APIView.initial = sentry_patched_drf_initial def _patch_channels(): # type: () -> None try: from channels.http import AsgiHandler # type: ignore except ImportError: return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. # # We cannot hard-raise here because channels may not be used at all in # the current process. That is the case when running traditional WSGI # workers in gunicorn+gevent and the websocket stuff in a separate # process. logger.warning( "We detected that you are using Django channels 2.0." + CONTEXTVARS_ERROR_MESSAGE ) from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl patch_channels_asgi_handler_impl(AsgiHandler) def _patch_django_asgi_handler(): # type: () -> None try: from django.core.handlers.asgi import ASGIHandler except ImportError: return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. # # We cannot hard-raise here because Django's ASGI stuff may not be used # at all. logger.warning( "We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE ) from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl patch_django_asgi_handler_impl(ASGIHandler) def _set_transaction_name_and_source(scope, transaction_style, request): # type: (Scope, str, WSGIRequest) -> None try: transaction_name = None if transaction_style == "function_name": fn = resolve(request.path).func transaction_name = transaction_from_function(getattr(fn, "view_class", fn)) elif transaction_style == "url": if hasattr(request, "urlconf"): transaction_name = LEGACY_RESOLVER.resolve( request.path_info, urlconf=request.urlconf ) else: transaction_name = LEGACY_RESOLVER.resolve(request.path_info) if transaction_name is None: transaction_name = request.path_info source = TRANSACTION_SOURCE_URL else: source = SOURCE_FOR_STYLE[transaction_style] scope.set_transaction_name( transaction_name, source=source, ) except Resolver404: urlconf = import_module(settings.ROOT_URLCONF) # This exception only gets thrown when transaction_style is `function_name` # So we don't check here what style is configured if hasattr(urlconf, "handler404"): handler = urlconf.handler404 if isinstance(handler, string_types): scope.transaction = handler else: scope.transaction = transaction_from_function( getattr(handler, "view_class", handler) ) except Exception: pass def _before_get_response(request): # type: (WSGIRequest) -> None hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None: return _patch_drf() with hub.configure_scope() as scope: # Rely on WSGI middleware to start a trace _set_transaction_name_and_source(scope, integration.transaction_style, request) scope.add_event_processor( _make_wsgi_request_event_processor(weakref.ref(request), integration) ) def _attempt_resolve_again(request, scope, transaction_style): # type: (WSGIRequest, Scope, str) -> None """ Some django middlewares overwrite request.urlconf so we need to respect that contract, so we try to resolve the url again. """ if not hasattr(request, "urlconf"): return _set_transaction_name_and_source(scope, transaction_style, request) def _after_get_response(request): # type: (WSGIRequest) -> None hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None or integration.transaction_style != "url": return with hub.configure_scope() as scope: _attempt_resolve_again(request, scope, integration.transaction_style) def _patch_get_response(): # type: () -> None """ patch get_response, because at that point we have the Django request object """ from django.core.handlers.base import BaseHandler old_get_response = BaseHandler.get_response def sentry_patched_get_response(self, request): # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] _before_get_response(request) rv = old_get_response(self, request) _after_get_response(request) return rv BaseHandler.get_response = sentry_patched_get_response if hasattr(BaseHandler, "get_response_async"): from sentry_sdk.integrations.django.asgi import patch_get_response_async patch_get_response_async(BaseHandler, _before_get_response) def _make_wsgi_request_event_processor(weak_request, integration): # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor def wsgi_request_event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. request = weak_request() if request is None: return event django_3 = ASGIRequest is not None if django_3 and type(request) == ASGIRequest: # We have a `asgi_request_event_processor` for this. return event try: drf_request = request._sentry_drf_request_backref() if drf_request is not None: request = drf_request except AttributeError: pass with capture_internal_exceptions(): DjangoRequestExtractor(request).extract_into_event(event) if _should_send_default_pii(): with capture_internal_exceptions(): _set_user_info(request, event) return event return wsgi_request_event_processor def _got_request_exception(request=None, **kwargs): # type: (WSGIRequest, **Any) -> None hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is not None: if request is not None and integration.transaction_style == "url": with hub.configure_scope() as scope: _attempt_resolve_again(request, scope, integration.transaction_style) # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( sys.exc_info(), client_options=client.options, mechanism={"type": "django", "handled": False}, ) hub.capture_event(event, hint=hint) class DjangoRequestExtractor(RequestExtractor): def env(self): # type: () -> Dict[str, str] return self.request.META def cookies(self): # type: () -> Dict[str, Union[str, AnnotatedValue]] privacy_cookies = [ django_settings.CSRF_COOKIE_NAME, django_settings.SESSION_COOKIE_NAME, ] clean_cookies = {} # type: Dict[str, Union[str, AnnotatedValue]] for key, val in self.request.COOKIES.items(): if key in privacy_cookies: clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE else: clean_cookies[key] = val return clean_cookies def raw_data(self): # type: () -> bytes return self.request.body def form(self): # type: () -> QueryDict return self.request.POST def files(self): # type: () -> MultiValueDict return self.request.FILES def size_of_file(self, file): # type: (Any) -> int return file.size def parsed_body(self): # type: () -> Optional[Dict[str, Any]] try: return self.request.data except AttributeError: return RequestExtractor.parsed_body(self) def _set_user_info(request, event): # type: (WSGIRequest, Dict[str, Any]) -> None user_info = event.setdefault("user", {}) user = getattr(request, "user", None) if user is None or not is_authenticated(user): return try: user_info.setdefault("id", str(user.pk)) except Exception: pass try: user_info.setdefault("email", user.email) except Exception: pass try: user_info.setdefault("username", user.get_username()) except Exception: pass def install_sql_hook(): # type: () -> None """If installed this causes Django's queries to be captured.""" try: from django.db.backends.utils import CursorWrapper except ImportError: from django.db.backends.util import CursorWrapper try: # django 1.6 and 1.7 compatability from django.db.backends import BaseDatabaseWrapper except ImportError: # django 1.8 or later from django.db.backends.base.base import BaseDatabaseWrapper try: real_execute = CursorWrapper.execute real_executemany = CursorWrapper.executemany real_connect = BaseDatabaseWrapper.connect except AttributeError: # This won't work on Django versions < 1.6 return def execute(self, sql, params=None): # type: (CursorWrapper, Any, Optional[Any]) -> Any hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_execute(self, sql, params) with record_sql_queries( hub, self.cursor, sql, params, paramstyle="format", executemany=False ) as span: _set_db_data(span, self) if hub.client: options = hub.client.options["_experiments"].get("attach_explain_plans") if options is not None: attach_explain_plan_to_span( span, self.cursor.connection, sql, params, self.mogrify, options, ) result = real_execute(self, sql, params) with capture_internal_exceptions(): add_query_source(hub, span) return result def executemany(self, sql, param_list): # type: (CursorWrapper, Any, List[Any]) -> Any hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_executemany(self, sql, param_list) with record_sql_queries( hub, self.cursor, sql, param_list, paramstyle="format", executemany=True ) as span: _set_db_data(span, self) result = real_executemany(self, sql, param_list) with capture_internal_exceptions(): add_query_source(hub, span) return result def connect(self): # type: (BaseDatabaseWrapper) -> None hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_connect(self) with capture_internal_exceptions(): hub.add_breadcrumb(message="connect", category="query") with hub.start_span(op=OP.DB, description="connect") as span: _set_db_data(span, self) return real_connect(self) CursorWrapper.execute = execute CursorWrapper.executemany = executemany BaseDatabaseWrapper.connect = connect ignore_logger("django.db.backends") def _set_db_data(span, cursor_or_db): # type: (Span, Any) -> None db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db vendor = db.vendor span.set_data(SPANDATA.DB_SYSTEM, vendor) # Some custom backends override `__getattr__`, making it look like `cursor_or_db` # actually has a `connection` and the `connection` has a `get_dsn_parameters` # attribute, only to throw an error once you actually want to call it. # Hence the `inspect` check whether `get_dsn_parameters` is an actual callable # function. is_psycopg2 = ( hasattr(cursor_or_db, "connection") and hasattr(cursor_or_db.connection, "get_dsn_parameters") and inspect.isroutine(cursor_or_db.connection.get_dsn_parameters) ) if is_psycopg2: connection_params = cursor_or_db.connection.get_dsn_parameters() else: is_psycopg3 = ( hasattr(cursor_or_db, "connection") and hasattr(cursor_or_db.connection, "info") and hasattr(cursor_or_db.connection.info, "get_parameters") and inspect.isroutine(cursor_or_db.connection.info.get_parameters) ) if is_psycopg3: connection_params = cursor_or_db.connection.info.get_parameters() else: connection_params = db.get_connection_params() db_name = connection_params.get("dbname") or connection_params.get("database") if db_name is not None: span.set_data(SPANDATA.DB_NAME, db_name) server_address = connection_params.get("host") if server_address is not None: span.set_data(SPANDATA.SERVER_ADDRESS, server_address) server_port = connection_params.get("port") if server_port is not None: span.set_data(SPANDATA.SERVER_PORT, text_type(server_port)) server_socket_address = connection_params.get("unix_socket") if server_socket_address is not None: span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) sentry-python-1.39.2/sentry_sdk/integrations/django/asgi.py000066400000000000000000000156331454744723200241270ustar00rootroot00000000000000""" Instrumentation for Django 3.0 Since this file contains `async def` it is conditionally imported in `sentry_sdk.integrations.django` (depending on the existence of `django.core.handlers.asgi`. """ import asyncio from django.core.handlers.wsgi import WSGIRequest from sentry_sdk import Hub, _functools from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.hub import _should_send_default_pii from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.utils import capture_internal_exceptions if TYPE_CHECKING: from collections.abc import Callable from typing import Any, Union from django.core.handlers.asgi import ASGIRequest from django.http.response import HttpResponse from sentry_sdk._types import EventProcessor def _make_asgi_request_event_processor(request): # type: (ASGIRequest) -> EventProcessor def asgi_request_event_processor(event, hint): # type: (dict[str, Any], dict[str, Any]) -> dict[str, Any] # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. from sentry_sdk.integrations.django import ( DjangoRequestExtractor, _set_user_info, ) if request is None: return event if type(request) == WSGIRequest: return event with capture_internal_exceptions(): DjangoRequestExtractor(request).extract_into_event(event) if _should_send_default_pii(): with capture_internal_exceptions(): _set_user_info(request, event) return event return asgi_request_event_processor def patch_django_asgi_handler_impl(cls): # type: (Any) -> None from sentry_sdk.integrations.django import DjangoIntegration old_app = cls.__call__ async def sentry_patched_asgi_handler(self, scope, receive, send): # type: (Any, Any, Any, Any) -> Any hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None: return await old_app(self, scope, receive, send) middleware = SentryAsgiMiddleware( old_app.__get__(self, cls), unsafe_context_data=True )._run_asgi3 return await middleware(scope, receive, send) cls.__call__ = sentry_patched_asgi_handler modern_django_asgi_support = hasattr(cls, "create_request") if modern_django_asgi_support: old_create_request = cls.create_request def sentry_patched_create_request(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None: return old_create_request(self, *args, **kwargs) with hub.configure_scope() as scope: request, error_response = old_create_request(self, *args, **kwargs) scope.add_event_processor(_make_asgi_request_event_processor(request)) return request, error_response cls.create_request = sentry_patched_create_request def patch_get_response_async(cls, _before_get_response): # type: (Any, Any) -> None old_get_response_async = cls.get_response_async async def sentry_patched_get_response_async(self, request): # type: (Any, Any) -> Union[HttpResponse, BaseException] _before_get_response(request) return await old_get_response_async(self, request) cls.get_response_async = sentry_patched_get_response_async def patch_channels_asgi_handler_impl(cls): # type: (Any) -> None import channels # type: ignore from sentry_sdk.integrations.django import DjangoIntegration if channels.__version__ < "3.0.0": old_app = cls.__call__ async def sentry_patched_asgi_handler(self, receive, send): # type: (Any, Any, Any) -> Any if Hub.current.get_integration(DjangoIntegration) is None: return await old_app(self, receive, send) middleware = SentryAsgiMiddleware( lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True ) return await middleware(self.scope)(receive, send) cls.__call__ = sentry_patched_asgi_handler else: # The ASGI handler in Channels >= 3 has the same signature as # the Django handler. patch_django_asgi_handler_impl(cls) def wrap_async_view(hub, callback): # type: (Hub, Any) -> Any @_functools.wraps(callback) async def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any with hub.configure_scope() as sentry_scope: if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() with hub.start_span( op=OP.VIEW_RENDER, description=request.resolver_match.view_name ): return await callback(request, *args, **kwargs) return sentry_wrapped_callback def _asgi_middleware_mixin_factory(_check_middleware_span): # type: (Callable[..., Any]) -> Any """ Mixin class factory that generates a middleware mixin for handling requests in async mode. """ class SentryASGIMixin: if TYPE_CHECKING: _inner = None def __init__(self, get_response): # type: (Callable[..., Any]) -> None self.get_response = get_response self._acall_method = None self._async_check() def _async_check(self): # type: () -> None """ If get_response is a coroutine function, turns us into async mode so a thread is not consumed during a whole request. Taken from django.utils.deprecation::MiddlewareMixin._async_check """ if asyncio.iscoroutinefunction(self.get_response): self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore def async_route_check(self): # type: () -> bool """ Function that checks if we are in async mode, and if we are forwards the handling of requests to __acall__ """ return asyncio.iscoroutinefunction(self.get_response) async def __acall__(self, *args, **kwargs): # type: (*Any, **Any) -> Any f = self._acall_method if f is None: if hasattr(self._inner, "__acall__"): self._acall_method = f = self._inner.__acall__ # type: ignore else: self._acall_method = f = self._inner middleware_span = _check_middleware_span(old_method=f) if middleware_span is None: return await f(*args, **kwargs) with middleware_span: return await f(*args, **kwargs) return SentryASGIMixin sentry-python-1.39.2/sentry_sdk/integrations/django/caching.py000066400000000000000000000071541454744723200245770ustar00rootroot00000000000000import functools from typing import TYPE_CHECKING from django import VERSION as DJANGO_VERSION from django.core.cache import CacheHandler from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk._compat import text_type if TYPE_CHECKING: from typing import Any from typing import Callable METHODS_TO_INSTRUMENT = [ "get", "get_many", ] def _get_span_description(method_name, args, kwargs): # type: (str, Any, Any) -> str description = "{} ".format(method_name) if args is not None and len(args) >= 1: description += text_type(args[0]) elif kwargs is not None and "key" in kwargs: description += text_type(kwargs["key"]) return description def _patch_cache_method(cache, method_name): # type: (CacheHandler, str) -> None from sentry_sdk.integrations.django import DjangoIntegration def _instrument_call(cache, method_name, original_method, args, kwargs): # type: (CacheHandler, str, Callable[..., Any], Any, Any) -> Any hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None or not integration.cache_spans: return original_method(*args, **kwargs) description = _get_span_description(method_name, args, kwargs) with hub.start_span(op=OP.CACHE_GET_ITEM, description=description) as span: value = original_method(*args, **kwargs) if value: span.set_data(SPANDATA.CACHE_HIT, True) size = len(text_type(value)) span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) else: span.set_data(SPANDATA.CACHE_HIT, False) return value original_method = getattr(cache, method_name) @functools.wraps(original_method) def sentry_method(*args, **kwargs): # type: (*Any, **Any) -> Any return _instrument_call(cache, method_name, original_method, args, kwargs) setattr(cache, method_name, sentry_method) def _patch_cache(cache): # type: (CacheHandler) -> None if not hasattr(cache, "_sentry_patched"): for method_name in METHODS_TO_INSTRUMENT: _patch_cache_method(cache, method_name) cache._sentry_patched = True def patch_caching(): # type: () -> None from sentry_sdk.integrations.django import DjangoIntegration if not hasattr(CacheHandler, "_sentry_patched"): if DJANGO_VERSION < (3, 2): original_get_item = CacheHandler.__getitem__ @functools.wraps(original_get_item) def sentry_get_item(self, alias): # type: (CacheHandler, str) -> Any cache = original_get_item(self, alias) integration = Hub.current.get_integration(DjangoIntegration) if integration and integration.cache_spans: _patch_cache(cache) return cache CacheHandler.__getitem__ = sentry_get_item CacheHandler._sentry_patched = True else: original_create_connection = CacheHandler.create_connection @functools.wraps(original_create_connection) def sentry_create_connection(self, alias): # type: (CacheHandler, str) -> Any cache = original_create_connection(self, alias) integration = Hub.current.get_integration(DjangoIntegration) if integration and integration.cache_spans: _patch_cache(cache) return cache CacheHandler.create_connection = sentry_create_connection CacheHandler._sentry_patched = True sentry-python-1.39.2/sentry_sdk/integrations/django/middleware.py000066400000000000000000000135171454744723200253200ustar00rootroot00000000000000""" Create spans from Django middleware invocations """ from django import VERSION as DJANGO_VERSION from sentry_sdk import Hub from sentry_sdk._functools import wraps from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.utils import ( ContextVar, transaction_from_function, capture_internal_exceptions, ) if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Optional from typing import TypeVar from sentry_sdk.tracing import Span F = TypeVar("F", bound=Callable[..., Any]) _import_string_should_wrap_middleware = ContextVar( "import_string_should_wrap_middleware" ) if DJANGO_VERSION < (1, 7): import_string_name = "import_by_path" else: import_string_name = "import_string" if DJANGO_VERSION < (3, 1): _asgi_middleware_mixin_factory = lambda _: object else: from .asgi import _asgi_middleware_mixin_factory def patch_django_middlewares(): # type: () -> None from django.core.handlers import base old_import_string = getattr(base, import_string_name) def sentry_patched_import_string(dotted_path): # type: (str) -> Any rv = old_import_string(dotted_path) if _import_string_should_wrap_middleware.get(None): rv = _wrap_middleware(rv, dotted_path) return rv setattr(base, import_string_name, sentry_patched_import_string) old_load_middleware = base.BaseHandler.load_middleware def sentry_patched_load_middleware(*args, **kwargs): # type: (Any, Any) -> Any _import_string_should_wrap_middleware.set(True) try: return old_load_middleware(*args, **kwargs) finally: _import_string_should_wrap_middleware.set(False) base.BaseHandler.load_middleware = sentry_patched_load_middleware def _wrap_middleware(middleware, middleware_name): # type: (Any, str) -> Any from sentry_sdk.integrations.django import DjangoIntegration def _check_middleware_span(old_method): # type: (Callable[..., Any]) -> Optional[Span] hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None or not integration.middleware_spans: return None function_name = transaction_from_function(old_method) description = middleware_name function_basename = getattr(old_method, "__name__", None) if function_basename: description = "{}.{}".format(description, function_basename) middleware_span = hub.start_span( op=OP.MIDDLEWARE_DJANGO, description=description ) middleware_span.set_tag("django.function_name", function_name) middleware_span.set_tag("django.middleware_name", middleware_name) return middleware_span def _get_wrapped_method(old_method): # type: (F) -> F with capture_internal_exceptions(): def sentry_wrapped_method(*args, **kwargs): # type: (*Any, **Any) -> Any middleware_span = _check_middleware_span(old_method) if middleware_span is None: return old_method(*args, **kwargs) with middleware_span: return old_method(*args, **kwargs) try: # fails for __call__ of function on Python 2 (see py2.7-django-1.11) sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method) # Necessary for Django 3.1 sentry_wrapped_method.__self__ = old_method.__self__ # type: ignore except Exception: pass return sentry_wrapped_method # type: ignore return old_method class SentryWrappingMiddleware( _asgi_middleware_mixin_factory(_check_middleware_span) # type: ignore ): async_capable = getattr(middleware, "async_capable", False) def __init__(self, get_response=None, *args, **kwargs): # type: (Optional[Callable[..., Any]], *Any, **Any) -> None if get_response: self._inner = middleware(get_response, *args, **kwargs) else: self._inner = middleware(*args, **kwargs) self.get_response = get_response self._call_method = None if self.async_capable: super(SentryWrappingMiddleware, self).__init__(get_response) # We need correct behavior for `hasattr()`, which we can only determine # when we have an instance of the middleware we're wrapping. def __getattr__(self, method_name): # type: (str) -> Any if method_name not in ( "process_request", "process_view", "process_template_response", "process_response", "process_exception", ): raise AttributeError() old_method = getattr(self._inner, method_name) rv = _get_wrapped_method(old_method) self.__dict__[method_name] = rv return rv def __call__(self, *args, **kwargs): # type: (*Any, **Any) -> Any if hasattr(self, "async_route_check") and self.async_route_check(): return self.__acall__(*args, **kwargs) f = self._call_method if f is None: self._call_method = f = self._inner.__call__ middleware_span = _check_middleware_span(old_method=f) if middleware_span is None: return f(*args, **kwargs) with middleware_span: return f(*args, **kwargs) for attr in ( "__name__", "__module__", "__qualname__", ): if hasattr(middleware, attr): setattr(SentryWrappingMiddleware, attr, getattr(middleware, attr)) return SentryWrappingMiddleware sentry-python-1.39.2/sentry_sdk/integrations/django/signals_handlers.py000066400000000000000000000057561454744723200265310ustar00rootroot00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import from django.dispatch import Signal from sentry_sdk import Hub from sentry_sdk._functools import wraps from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.integrations.django import DJANGO_VERSION if TYPE_CHECKING: from collections.abc import Callable from typing import Any, Union def _get_receiver_name(receiver): # type: (Callable[..., Any]) -> str name = "" if hasattr(receiver, "__qualname__"): name = receiver.__qualname__ elif hasattr(receiver, "__name__"): # Python 2.7 has no __qualname__ name = receiver.__name__ elif hasattr( receiver, "func" ): # certain functions (like partials) dont have a name if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"): name = "partial()" if ( name == "" ): # In case nothing was found, return the string representation (this is the slowest case) return str(receiver) if hasattr(receiver, "__module__"): # prepend with module, if there is one name = receiver.__module__ + "." + name return name def patch_signals(): # type: () -> None """ Patch django signal receivers to create a span. This only wraps sync receivers. Django>=5.0 introduced async receivers, but since we don't create transactions for ASGI Django, we don't wrap them. """ from sentry_sdk.integrations.django import DjangoIntegration old_live_receivers = Signal._live_receivers def _sentry_live_receivers(self, sender): # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]] hub = Hub.current if DJANGO_VERSION >= (5, 0): sync_receivers, async_receivers = old_live_receivers(self, sender) else: sync_receivers = old_live_receivers(self, sender) async_receivers = [] def sentry_sync_receiver_wrapper(receiver): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(receiver) def wrapper(*args, **kwargs): # type: (Any, Any) -> Any signal_name = _get_receiver_name(receiver) with hub.start_span( op=OP.EVENT_DJANGO, description=signal_name, ) as span: span.set_data("signal", signal_name) return receiver(*args, **kwargs) return wrapper integration = hub.get_integration(DjangoIntegration) if integration and integration.signals_spans: for idx, receiver in enumerate(sync_receivers): sync_receivers[idx] = sentry_sync_receiver_wrapper(receiver) if DJANGO_VERSION >= (5, 0): return sync_receivers, async_receivers else: return sync_receivers Signal._live_receivers = _sentry_live_receivers sentry-python-1.39.2/sentry_sdk/integrations/django/templates.py000066400000000000000000000131011454744723200251660ustar00rootroot00000000000000from django.template import TemplateSyntaxError from django.utils.safestring import mark_safe from django import VERSION as DJANGO_VERSION from sentry_sdk import _functools, Hub from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from typing import Iterator from typing import Tuple try: # support Django 1.9 from django.template.base import Origin except ImportError: # backward compatibility from django.template.loader import LoaderOrigin as Origin def get_template_frame_from_exception(exc_value): # type: (Optional[BaseException]) -> Optional[Dict[str, Any]] # As of Django 1.9 or so the new template debug thing showed up. if hasattr(exc_value, "template_debug"): return _get_template_frame_from_debug(exc_value.template_debug) # type: ignore # As of r16833 (Django) all exceptions may contain a # ``django_template_source`` attribute (rather than the legacy # ``TemplateSyntaxError.source`` check) if hasattr(exc_value, "django_template_source"): return _get_template_frame_from_source( exc_value.django_template_source # type: ignore ) if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"): source = exc_value.source if isinstance(source, (tuple, list)) and isinstance(source[0], Origin): return _get_template_frame_from_source(source) # type: ignore return None def _get_template_name_description(template_name): # type: (str) -> str if isinstance(template_name, (list, tuple)): if template_name: return "[{}, ...]".format(template_name[0]) else: return template_name def patch_templates(): # type: () -> None from django.template.response import SimpleTemplateResponse from sentry_sdk.integrations.django import DjangoIntegration real_rendered_content = SimpleTemplateResponse.rendered_content @property # type: ignore def rendered_content(self): # type: (SimpleTemplateResponse) -> str hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_rendered_content.fget(self) with hub.start_span( op=OP.TEMPLATE_RENDER, description=_get_template_name_description(self.template_name), ) as span: span.set_data("context", self.context_data) return real_rendered_content.fget(self) SimpleTemplateResponse.rendered_content = rendered_content if DJANGO_VERSION < (1, 7): return import django.shortcuts real_render = django.shortcuts.render @_functools.wraps(real_render) def render(request, template_name, context=None, *args, **kwargs): # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_render(request, template_name, context, *args, **kwargs) # Inject trace meta tags into template context context = context or {} if "sentry_trace_meta" not in context: context["sentry_trace_meta"] = mark_safe(hub.trace_propagation_meta()) with hub.start_span( op=OP.TEMPLATE_RENDER, description=_get_template_name_description(template_name), ) as span: span.set_data("context", context) return real_render(request, template_name, context, *args, **kwargs) django.shortcuts.render = render def _get_template_frame_from_debug(debug): # type: (Dict[str, Any]) -> Dict[str, Any] if debug is None: return None lineno = debug["line"] filename = debug["name"] if filename is None: filename = "" pre_context = [] post_context = [] context_line = None for i, line in debug["source_lines"]: if i < lineno: pre_context.append(line) elif i > lineno: post_context.append(line) else: context_line = line return { "filename": filename, "lineno": lineno, "pre_context": pre_context[-5:], "post_context": post_context[:5], "context_line": context_line, "in_app": True, } def _linebreak_iter(template_source): # type: (str) -> Iterator[int] yield 0 p = template_source.find("\n") while p >= 0: yield p + 1 p = template_source.find("\n", p + 1) def _get_template_frame_from_source(source): # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]] if not source: return None origin, (start, end) = source filename = getattr(origin, "loadname", None) if filename is None: filename = "" template_source = origin.reload() lineno = None upto = 0 pre_context = [] post_context = [] context_line = None for num, next in enumerate(_linebreak_iter(template_source)): line = template_source[upto:next] if start >= upto and end <= next: lineno = num context_line = line elif lineno is None: pre_context.append(line) else: post_context.append(line) upto = next if context_line is None or lineno is None: return None return { "filename": filename, "lineno": lineno, "pre_context": pre_context[-5:], "post_context": post_context[:5], "context_line": context_line, } sentry-python-1.39.2/sentry_sdk/integrations/django/transactions.py000066400000000000000000000116151454744723200257100ustar00rootroot00000000000000""" Copied from raven-python. Despite being called "legacy" in some places this resolver is very much still in use. """ from __future__ import absolute_import import re from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from django.urls.resolvers import URLResolver from typing import Dict from typing import List from typing import Optional from django.urls.resolvers import URLPattern from typing import Tuple from typing import Union from re import Pattern from django import VERSION as DJANGO_VERSION if DJANGO_VERSION >= (2, 0): from django.urls.resolvers import RoutePattern else: RoutePattern = None try: from django.urls import get_resolver except ImportError: from django.core.urlresolvers import get_resolver def get_regex(resolver_or_pattern): # type: (Union[URLPattern, URLResolver]) -> Pattern[str] """Utility method for django's deprecated resolver.regex""" try: regex = resolver_or_pattern.regex except AttributeError: regex = resolver_or_pattern.pattern.regex return regex class RavenResolver(object): _new_style_group_matcher = re.compile( r"<(?:([^>:]+):)?([^>]+)>" ) # https://github.com/django/django/blob/21382e2743d06efbf5623e7c9b6dccf2a325669b/django/urls/resolvers.py#L245-L247 _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)") _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+") _non_named_group_matcher = re.compile(r"\([^\)]+\)") # [foo|bar|baz] _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]") _camel_re = re.compile(r"([A-Z]+)([a-z])") _cache = {} # type: Dict[URLPattern, str] def _simplify(self, pattern): # type: (Union[URLPattern, URLResolver]) -> str r""" Clean up urlpattern regexes into something readable by humans: From: > "^(?P\w+)/athletes/(?P\w+)/$" To: > "{sport_slug}/athletes/{athlete_slug}/" """ # "new-style" path patterns can be parsed directly without turning them # into regexes first if ( RoutePattern is not None and hasattr(pattern, "pattern") and isinstance(pattern.pattern, RoutePattern) ): return self._new_style_group_matcher.sub( lambda m: "{%s}" % m.group(2), pattern.pattern._route ) result = get_regex(pattern).pattern # remove optional params # TODO(dcramer): it'd be nice to change these into [%s] but it currently # conflicts with the other rules because we're doing regexp matches # rather than parsing tokens result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), result) # handle named groups first result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result) # handle non-named groups result = self._non_named_group_matcher.sub("{var}", result) # handle optional params result = self._either_option_matcher.sub(lambda m: m.group(1), result) # clean up any outstanding regex-y characters. result = ( result.replace("^", "") .replace("$", "") .replace("?", "") .replace("\\A", "") .replace("\\Z", "") .replace("//", "/") .replace("\\", "") ) return result def _resolve(self, resolver, path, parents=None): # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str] match = get_regex(resolver).search(path) # Django < 2.0 if not match: return None if parents is None: parents = [resolver] elif resolver not in parents: parents = parents + [resolver] new_path = path[match.end() :] for pattern in resolver.url_patterns: # this is an include() if not pattern.callback: match_ = self._resolve(pattern, new_path, parents) if match_: return match_ continue elif not get_regex(pattern).search(new_path): continue try: return self._cache[pattern] except KeyError: pass prefix = "".join(self._simplify(p) for p in parents) result = prefix + self._simplify(pattern) if not result.startswith("/"): result = "/" + result self._cache[pattern] = result return result return None def resolve( self, path, # type: str urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]] ): # type: (...) -> Optional[str] resolver = get_resolver(urlconf) match = self._resolve(resolver, path) return match LEGACY_RESOLVER = RavenResolver() sentry-python-1.39.2/sentry_sdk/integrations/django/views.py000066400000000000000000000055351454744723200243410ustar00rootroot00000000000000from sentry_sdk.consts import OP from sentry_sdk.hub import Hub from sentry_sdk._types import TYPE_CHECKING from sentry_sdk import _functools if TYPE_CHECKING: from typing import Any try: from asyncio import iscoroutinefunction except ImportError: iscoroutinefunction = None # type: ignore try: from sentry_sdk.integrations.django.asgi import wrap_async_view except (ImportError, SyntaxError): wrap_async_view = None # type: ignore def patch_views(): # type: () -> None from django.core.handlers.base import BaseHandler from django.template.response import SimpleTemplateResponse from sentry_sdk.integrations.django import DjangoIntegration old_make_view_atomic = BaseHandler.make_view_atomic old_render = SimpleTemplateResponse.render def sentry_patched_render(self): # type: (SimpleTemplateResponse) -> Any hub = Hub.current with hub.start_span( op=OP.VIEW_RESPONSE_RENDER, description="serialize response" ): return old_render(self) @_functools.wraps(old_make_view_atomic) def sentry_patched_make_view_atomic(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any callback = old_make_view_atomic(self, *args, **kwargs) # XXX: The wrapper function is created for every request. Find more # efficient way to wrap views (or build a cache?) hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is not None and integration.middleware_spans: is_async_view = ( iscoroutinefunction is not None and wrap_async_view is not None and iscoroutinefunction(callback) ) if is_async_view: sentry_wrapped_callback = wrap_async_view(hub, callback) else: sentry_wrapped_callback = _wrap_sync_view(hub, callback) else: sentry_wrapped_callback = callback return sentry_wrapped_callback SimpleTemplateResponse.render = sentry_patched_render BaseHandler.make_view_atomic = sentry_patched_make_view_atomic def _wrap_sync_view(hub, callback): # type: (Hub, Any) -> Any @_functools.wraps(callback) def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any with hub.configure_scope() as sentry_scope: # set the active thread id to the handler thread for sync views # this isn't necessary for async views since that runs on main if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() with hub.start_span( op=OP.VIEW_RENDER, description=request.resolver_match.view_name ): return callback(request, *args, **kwargs) return sentry_wrapped_callback sentry-python-1.39.2/sentry_sdk/integrations/excepthook.py000066400000000000000000000043241454744723200241060ustar00rootroot00000000000000import sys from sentry_sdk.hub import Hub from sentry_sdk.utils import capture_internal_exceptions, event_from_exception from sentry_sdk.integrations import Integration from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Callable from typing import Any from typing import Type from typing import Optional from types import TracebackType Excepthook = Callable[ [Type[BaseException], BaseException, Optional[TracebackType]], Any, ] class ExcepthookIntegration(Integration): identifier = "excepthook" always_run = False def __init__(self, always_run=False): # type: (bool) -> None if not isinstance(always_run, bool): raise ValueError( "Invalid value for always_run: %s (must be type boolean)" % (always_run,) ) self.always_run = always_run @staticmethod def setup_once(): # type: () -> None sys.excepthook = _make_excepthook(sys.excepthook) def _make_excepthook(old_excepthook): # type: (Excepthook) -> Excepthook def sentry_sdk_excepthook(type_, value, traceback): # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None hub = Hub.current integration = hub.get_integration(ExcepthookIntegration) if integration is not None and _should_send(integration.always_run): # If an integration is there, a client has to be there. client = hub.client # type: Any with capture_internal_exceptions(): event, hint = event_from_exception( (type_, value, traceback), client_options=client.options, mechanism={"type": "excepthook", "handled": False}, ) hub.capture_event(event, hint=hint) return old_excepthook(type_, value, traceback) return sentry_sdk_excepthook def _should_send(always_run=False): # type: (bool) -> bool if always_run: return True if hasattr(sys, "ps1"): # Disable the excepthook for interactive Python shells, otherwise # every typo gets sent to Sentry. return False return True sentry-python-1.39.2/sentry_sdk/integrations/executing.py000066400000000000000000000037711454744723200237350ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk import Hub from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import walk_exception_chain, iter_stacks if TYPE_CHECKING: from typing import Optional from sentry_sdk._types import Event, Hint try: import executing except ImportError: raise DidNotEnable("executing is not installed") class ExecutingIntegration(Integration): identifier = "executing" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def add_executing_info(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if Hub.current.get_integration(ExecutingIntegration) is None: return event if hint is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exception = event.get("exception", None) if exception is None: return event values = exception.get("values", None) if values is None: return event for exception, (_exc_type, _exc_value, exc_tb) in zip( reversed(values), walk_exception_chain(exc_info) ): sentry_frames = [ frame for frame in exception.get("stacktrace", {}).get("frames", []) if frame.get("function") ] tbs = list(iter_stacks(exc_tb)) if len(sentry_frames) != len(tbs): continue for sentry_frame, tb in zip(sentry_frames, tbs): frame = tb.tb_frame source = executing.Source.for_frame(frame) sentry_frame["function"] = source.code_qualname(frame.f_code) return event sentry-python-1.39.2/sentry_sdk/integrations/falcon.py000066400000000000000000000224541454744723200232030ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, parse_version, ) from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from sentry_sdk._types import EventProcessor # In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers` # and `falcon.API` to `falcon.App` try: import falcon # type: ignore from falcon import __version__ as FALCON_VERSION except ImportError: raise DidNotEnable("Falcon not installed") try: import falcon.app_helpers # type: ignore falcon_helpers = falcon.app_helpers falcon_app_class = falcon.App FALCON3 = True except ImportError: import falcon.api_helpers # type: ignore falcon_helpers = falcon.api_helpers falcon_app_class = falcon.API FALCON3 = False class FalconRequestExtractor(RequestExtractor): def env(self): # type: () -> Dict[str, Any] return self.request.env def cookies(self): # type: () -> Dict[str, Any] return self.request.cookies def form(self): # type: () -> None return None # No such concept in Falcon def files(self): # type: () -> None return None # No such concept in Falcon def raw_data(self): # type: () -> Optional[str] # As request data can only be read once we won't make this available # to Sentry. Just send back a dummy string in case there was a # content length. # TODO(jmagnusson): Figure out if there's a way to support this content_length = self.content_length() if content_length > 0: return "[REQUEST_CONTAINING_RAW_DATA]" else: return None if FALCON3: def json(self): # type: () -> Optional[Dict[str, Any]] try: return self.request.media except falcon.errors.HTTPBadRequest: return None else: def json(self): # type: () -> Optional[Dict[str, Any]] try: return self.request.media except falcon.errors.HTTPBadRequest: # NOTE(jmagnusson): We return `falcon.Request._media` here because # falcon 1.4 doesn't do proper type checking in # `falcon.Request.media`. This has been fixed in 2.0. # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953 return self.request._media class SentryFalconMiddleware(object): """Captures exceptions in Falcon requests and send to Sentry""" def process_request(self, req, resp, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> None hub = Hub.current integration = hub.get_integration(FalconIntegration) if integration is None: return with hub.configure_scope() as scope: scope._name = "falcon" scope.add_event_processor(_make_request_event_processor(req, integration)) TRANSACTION_STYLE_VALUES = ("uri_template", "path") class FalconIntegration(Integration): identifier = "falcon" transaction_style = "" def __init__(self, transaction_style="uri_template"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None version = parse_version(FALCON_VERSION) if version is None: raise DidNotEnable("Unparsable Falcon version: {}".format(FALCON_VERSION)) if version < (1, 4): raise DidNotEnable("Falcon 1.4 or newer required.") _patch_wsgi_app() _patch_handle_exception() _patch_prepare_middleware() def _patch_wsgi_app(): # type: () -> None original_wsgi_app = falcon_app_class.__call__ def sentry_patched_wsgi_app(self, env, start_response): # type: (falcon.API, Any, Any) -> Any hub = Hub.current integration = hub.get_integration(FalconIntegration) if integration is None: return original_wsgi_app(self, env, start_response) sentry_wrapped = SentryWsgiMiddleware( lambda envi, start_resp: original_wsgi_app(self, envi, start_resp) ) return sentry_wrapped(env, start_response) falcon_app_class.__call__ = sentry_patched_wsgi_app def _patch_handle_exception(): # type: () -> None original_handle_exception = falcon_app_class._handle_exception def sentry_patched_handle_exception(self, *args): # type: (falcon.API, *Any) -> Any # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception # method signature from `(ex, req, resp, params)` to # `(req, resp, ex, params)` ex = response = None with capture_internal_exceptions(): ex = next(argument for argument in args if isinstance(argument, Exception)) response = next( argument for argument in args if isinstance(argument, falcon.Response) ) was_handled = original_handle_exception(self, *args) if ex is None or response is None: # Both ex and response should have a non-None value at this point; otherwise, # there is an error with the SDK that will have been captured in the # capture_internal_exceptions block above. return was_handled hub = Hub.current integration = hub.get_integration(FalconIntegration) if integration is not None and _exception_leads_to_http_5xx(ex, response): # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( ex, client_options=client.options, mechanism={"type": "falcon", "handled": False}, ) hub.capture_event(event, hint=hint) return was_handled falcon_app_class._handle_exception = sentry_patched_handle_exception def _patch_prepare_middleware(): # type: () -> None original_prepare_middleware = falcon_helpers.prepare_middleware def sentry_patched_prepare_middleware( middleware=None, independent_middleware=False, asgi=False ): # type: (Any, Any, bool) -> Any if asgi: # We don't support ASGI Falcon apps, so we don't patch anything here return original_prepare_middleware(middleware, independent_middleware, asgi) hub = Hub.current integration = hub.get_integration(FalconIntegration) if integration is not None: middleware = [SentryFalconMiddleware()] + (middleware or []) # We intentionally omit the asgi argument here, since the default is False anyways, # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions. return original_prepare_middleware(middleware, independent_middleware) falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware def _exception_leads_to_http_5xx(ex, response): # type: (Exception, falcon.Response) -> bool is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith( "5" ) is_unhandled_error = not isinstance( ex, (falcon.HTTPError, falcon.http_status.HTTPStatus) ) # We only check the HTTP status on Falcon 3 because in Falcon 2, the status on the response # at the stage where we capture it is listed as 200, even though we would expect to see a 500 # status. Since at the time of this change, Falcon 2 is ca. 4 years old, we have decided to # only perform this check on Falcon 3+, despite the risk that some handled errors might be # reported to Sentry as unhandled on Falcon 2. return (is_server_error or is_unhandled_error) and ( not FALCON3 or _has_http_5xx_status(response) ) def _has_http_5xx_status(response): # type: (falcon.Response) -> bool return response.status.startswith("5") def _set_transaction_name_and_source(event, transaction_style, request): # type: (Dict[str, Any], str, falcon.Request) -> None name_for_style = { "uri_template": request.uri_template, "path": request.path, } event["transaction"] = name_for_style[transaction_style] event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} def _make_request_event_processor(req, integration): # type: (falcon.Request, FalconIntegration) -> EventProcessor def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] _set_transaction_name_and_source(event, integration.transaction_style, req) with capture_internal_exceptions(): FalconRequestExtractor(req).extract_into_event(event) return event return event_processor sentry-python-1.39.2/sentry_sdk/integrations/fastapi.py000066400000000000000000000111111454744723200233540ustar00rootroot00000000000000import asyncio from copy import deepcopy from sentry_sdk._functools import wraps from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import transaction_from_function, logger if TYPE_CHECKING: from typing import Any, Callable, Dict from sentry_sdk.scope import Scope try: from sentry_sdk.integrations.starlette import ( StarletteIntegration, StarletteRequestExtractor, ) except DidNotEnable: raise DidNotEnable("Starlette is not installed") try: import fastapi # type: ignore except ImportError: raise DidNotEnable("FastAPI is not installed") _DEFAULT_TRANSACTION_NAME = "generic FastAPI request" class FastApiIntegration(StarletteIntegration): identifier = "fastapi" @staticmethod def setup_once(): # type: () -> None patch_get_request_handler() def _set_transaction_name_and_source(scope, transaction_style, request): # type: (Scope, str, Any) -> None name = "" if transaction_style == "endpoint": endpoint = request.scope.get("endpoint") if endpoint: name = transaction_from_function(endpoint) or "" elif transaction_style == "url": route = request.scope.get("route") if route: path = getattr(route, "path", None) if path is not None: name = path if not name: name = _DEFAULT_TRANSACTION_NAME source = TRANSACTION_SOURCE_ROUTE else: source = SOURCE_FOR_STYLE[transaction_style] scope.set_transaction_name(name, source=source) logger.debug( "[FastAPI] Set transaction name and source on scope: %s / %s", name, source ) def patch_get_request_handler(): # type: () -> None old_get_request_handler = fastapi.routing.get_request_handler def _sentry_get_request_handler(*args, **kwargs): # type: (*Any, **Any) -> Any dependant = kwargs.get("dependant") if ( dependant and dependant.call is not None and not asyncio.iscoroutinefunction(dependant.call) ): old_call = dependant.call @wraps(old_call) def _sentry_call(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current with hub.configure_scope() as sentry_scope: if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() return old_call(*args, **kwargs) dependant.call = _sentry_call old_app = old_get_request_handler(*args, **kwargs) async def _sentry_app(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(FastApiIntegration) if integration is None: return await old_app(*args, **kwargs) with hub.configure_scope() as sentry_scope: request = args[0] _set_transaction_name_and_source( sentry_scope, integration.transaction_style, request ) extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() def _make_request_event_processor(req, integration): # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] # Extract information from request request_info = event.get("request", {}) if info: if "cookies" in info and _should_send_default_pii(): request_info["cookies"] = info["cookies"] if "data" in info: request_info["data"] = info["data"] event["request"] = deepcopy(request_info) return event return event_processor sentry_scope._name = FastApiIntegration.identifier sentry_scope.add_event_processor( _make_request_event_processor(request, integration) ) return await old_app(*args, **kwargs) return _sentry_app fastapi.routing.get_request_handler = _sentry_get_request_handler sentry-python-1.39.2/sentry_sdk/integrations/flask.py000066400000000000000000000172241454744723200230400ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import Scope from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, package_version, ) if TYPE_CHECKING: from typing import Any, Callable, Dict, Union from sentry_sdk._types import EventProcessor from sentry_sdk.integrations.wsgi import _ScopedResponse from werkzeug.datastructures import FileStorage, ImmutableMultiDict try: import flask_login # type: ignore except ImportError: flask_login = None try: from flask import Flask, Request # type: ignore from flask import request as flask_request from flask.signals import ( before_render_template, got_request_exception, request_started, ) from markupsafe import Markup except ImportError: raise DidNotEnable("Flask is not installed") try: import blinker # noqa except ImportError: raise DidNotEnable("blinker is not installed") TRANSACTION_STYLE_VALUES = ("endpoint", "url") class FlaskIntegration(Integration): identifier = "flask" transaction_style = "" def __init__(self, transaction_style="endpoint"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None version = package_version("flask") if version is None: raise DidNotEnable("Unparsable Flask version.") if version < (0, 10): raise DidNotEnable("Flask 0.10 or newer is required.") before_render_template.connect(_add_sentry_trace) request_started.connect(_request_started) got_request_exception.connect(_capture_exception) old_app = Flask.__call__ def sentry_patched_wsgi_app(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse if Hub.current.get_integration(FlaskIntegration) is None: return old_app(self, environ, start_response) return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))( environ, start_response ) Flask.__call__ = sentry_patched_wsgi_app def _add_sentry_trace(sender, template, context, **extra): # type: (Flask, Any, Dict[str, Any], **Any) -> None if "sentry_trace" in context: return hub = Hub.current trace_meta = Markup(hub.trace_propagation_meta()) context["sentry_trace"] = trace_meta # for backwards compatibility context["sentry_trace_meta"] = trace_meta def _set_transaction_name_and_source(scope, transaction_style, request): # type: (Scope, str, Request) -> None try: name_for_style = { "url": request.url_rule.rule, "endpoint": request.url_rule.endpoint, } scope.set_transaction_name( name_for_style[transaction_style], source=SOURCE_FOR_STYLE[transaction_style], ) except Exception: pass def _request_started(app, **kwargs): # type: (Flask, **Any) -> None hub = Hub.current integration = hub.get_integration(FlaskIntegration) if integration is None: return with hub.configure_scope() as scope: # Set the transaction name and source here, # but rely on WSGI middleware to actually start the transaction request = flask_request._get_current_object() _set_transaction_name_and_source(scope, integration.transaction_style, request) evt_processor = _make_request_event_processor(app, request, integration) scope.add_event_processor(evt_processor) class FlaskRequestExtractor(RequestExtractor): def env(self): # type: () -> Dict[str, str] return self.request.environ def cookies(self): # type: () -> Dict[Any, Any] return { k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in self.request.cookies.items() } def raw_data(self): # type: () -> bytes return self.request.get_data() def form(self): # type: () -> ImmutableMultiDict[str, Any] return self.request.form def files(self): # type: () -> ImmutableMultiDict[str, Any] return self.request.files def is_json(self): # type: () -> bool return self.request.is_json def json(self): # type: () -> Any return self.request.get_json(silent=True) def size_of_file(self, file): # type: (FileStorage) -> int return file.content_length def _make_request_event_processor(app, request, integration): # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor def inner(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. if request is None: return event with capture_internal_exceptions(): FlaskRequestExtractor(request).extract_into_event(event) if _should_send_default_pii(): with capture_internal_exceptions(): _add_user_to_event(event) return event return inner def _capture_exception(sender, exception, **kwargs): # type: (Flask, Union[ValueError, BaseException], **Any) -> None hub = Hub.current if hub.get_integration(FlaskIntegration) is None: return # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( exception, client_options=client.options, mechanism={"type": "flask", "handled": False}, ) hub.capture_event(event, hint=hint) def _add_user_to_event(event): # type: (Dict[str, Any]) -> None if flask_login is None: return user = flask_login.current_user if user is None: return with capture_internal_exceptions(): # Access this object as late as possible as accessing the user # is relatively costly user_info = event.setdefault("user", {}) try: user_info.setdefault("id", user.get_id()) # TODO: more configurable user attrs here except AttributeError: # might happen if: # - flask_login could not be imported # - flask_login is not configured # - no user is logged in pass # The following attribute accesses are ineffective for the general # Flask-Login case, because the User interface of Flask-Login does not # care about anything but the ID. However, Flask-User (based on # Flask-Login) documents a few optional extra attributes. # # https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names try: user_info.setdefault("email", user.email) except Exception: pass try: user_info.setdefault("username", user.username) except Exception: pass sentry-python-1.39.2/sentry_sdk/integrations/gcp.py000066400000000000000000000200761454744723200225100ustar00rootroot00000000000000import sys from copy import deepcopy from datetime import timedelta from os import environ from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk._compat import datetime_utcnow, reraise from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, event_from_exception, logger, TimeoutThread, ) from sentry_sdk.integrations import Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk._types import TYPE_CHECKING # Constants TIMEOUT_WARNING_BUFFER = 1.5 # Buffer time required to send timeout warning to Sentry MILLIS_TO_SECONDS = 1000.0 if TYPE_CHECKING: from datetime import datetime from typing import Any from typing import TypeVar from typing import Callable from typing import Optional from sentry_sdk._types import EventProcessor, Event, Hint F = TypeVar("F", bound=Callable[..., Any]) def _wrap_func(func): # type: (F) -> F def sentry_func(functionhandler, gcp_event, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(GcpIntegration) if integration is None: return func(functionhandler, gcp_event, *args, **kwargs) # If an integration is there, a client has to be there. client = hub.client # type: Any configured_time = environ.get("FUNCTION_TIMEOUT_SEC") if not configured_time: logger.debug( "The configured timeout could not be fetched from Cloud Functions configuration." ) return func(functionhandler, gcp_event, *args, **kwargs) configured_time = int(configured_time) initial_time = datetime_utcnow() with hub.push_scope() as scope: with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.add_event_processor( _make_request_event_processor( gcp_event, configured_time, initial_time ) ) scope.set_tag("gcp_region", environ.get("FUNCTION_REGION")) timeout_thread = None if ( integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER ): waiting_time = configured_time - TIMEOUT_WARNING_BUFFER timeout_thread = TimeoutThread(waiting_time, configured_time) # Starting the thread to raise timeout warning exception timeout_thread.start() headers = {} if hasattr(gcp_event, "headers"): headers = gcp_event.headers transaction = continue_trace( headers, op=OP.FUNCTION_GCP, name=environ.get("FUNCTION_NAME", ""), source=TRANSACTION_SOURCE_COMPONENT, ) sampling_context = { "gcp_env": { "function_name": environ.get("FUNCTION_NAME"), "function_entry_point": environ.get("ENTRY_POINT"), "function_identity": environ.get("FUNCTION_IDENTITY"), "function_region": environ.get("FUNCTION_REGION"), "function_project": environ.get("GCP_PROJECT"), }, "gcp_event": gcp_event, } with hub.start_transaction( transaction, custom_sampling_context=sampling_context ): try: return func(functionhandler, gcp_event, *args, **kwargs) except Exception: exc_info = sys.exc_info() sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "gcp", "handled": False}, ) hub.capture_event(sentry_event, hint=hint) reraise(*exc_info) finally: if timeout_thread: timeout_thread.stop() # Flush out the event queue hub.flush() return sentry_func # type: ignore class GcpIntegration(Integration): identifier = "gcp" def __init__(self, timeout_warning=False): # type: (bool) -> None self.timeout_warning = timeout_warning @staticmethod def setup_once(): # type: () -> None import __main__ as gcp_functions if not hasattr(gcp_functions, "worker_v1"): logger.warning( "GcpIntegration currently supports only Python 3.7 runtime environment." ) return worker1 = gcp_functions.worker_v1 worker1.FunctionHandler.invoke_user_function = _wrap_func( worker1.FunctionHandler.invoke_user_function ) def _make_request_event_processor(gcp_event, configured_timeout, initial_time): # type: (Any, Any, Any) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] final_time = datetime_utcnow() time_diff = final_time - initial_time execution_duration_in_millis = time_diff.microseconds / MILLIS_TO_SECONDS extra = event.setdefault("extra", {}) extra["google cloud functions"] = { "function_name": environ.get("FUNCTION_NAME"), "function_entry_point": environ.get("ENTRY_POINT"), "function_identity": environ.get("FUNCTION_IDENTITY"), "function_region": environ.get("FUNCTION_REGION"), "function_project": environ.get("GCP_PROJECT"), "execution_duration_in_millis": execution_duration_in_millis, "configured_timeout_in_seconds": configured_timeout, } extra["google cloud logs"] = { "url": _get_google_cloud_logs_url(final_time), } request = event.get("request", {}) request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME")) if hasattr(gcp_event, "method"): request["method"] = gcp_event.method if hasattr(gcp_event, "query_string"): request["query_string"] = gcp_event.query_string.decode("utf-8") if hasattr(gcp_event, "headers"): request["headers"] = _filter_headers(gcp_event.headers) if _should_send_default_pii(): if hasattr(gcp_event, "data"): request["data"] = gcp_event.data else: if hasattr(gcp_event, "data"): # Unfortunately couldn't find a way to get structured body from GCP # event. Meaning every body is unstructured to us. request["data"] = AnnotatedValue.removed_because_raw_data() event["request"] = deepcopy(request) return event return event_processor def _get_google_cloud_logs_url(final_time): # type: (datetime) -> str """ Generates a Google Cloud Logs console URL based on the environment variables Arguments: final_time {datetime} -- Final time Returns: str -- Google Cloud Logs Console URL to logs. """ hour_ago = final_time - timedelta(hours=1) formatstring = "%Y-%m-%dT%H:%M:%SZ" url = ( "https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function" "%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false" "×tamp={timestamp_end}&customFacets=&limitCustomFacetWidth=true" "&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}" "&interval=PT1H&scrollTimestamp={timestamp_end}" ).format( project=environ.get("GCP_PROJECT"), function_name=environ.get("FUNCTION_NAME"), region=environ.get("FUNCTION_REGION"), timestamp_end=final_time.strftime(formatstring), timestamp_start=hour_ago.strftime(formatstring), ) return url sentry-python-1.39.2/sentry_sdk/integrations/gnu_backtrace.py000066400000000000000000000055621454744723200245320ustar00rootroot00000000000000import re from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import capture_internal_exceptions from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict MODULE_RE = r"[a-zA-Z0-9/._:\\-]+" TYPE_RE = r"[a-zA-Z0-9._:<>,-]+" HEXVAL_RE = r"[A-Fa-f0-9]+" FRAME_RE = r""" ^(?P\d+)\.\s (?P{MODULE_RE})\( (?P{TYPE_RE}\ )? ((?P{TYPE_RE}) (?P\(.*\))? )? ((?P\ const)?\+0x(?P{HEXVAL_RE}))? \)\s \[0x(?P{HEXVAL_RE})\]$ """.format( MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE ) FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE) class GnuBacktraceIntegration(Integration): identifier = "gnu_backtrace" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def process_gnu_backtrace(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] with capture_internal_exceptions(): return _process_gnu_backtrace(event, hint) def _process_gnu_backtrace(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] if Hub.current.get_integration(GnuBacktraceIntegration) is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exception = event.get("exception", None) if exception is None: return event values = exception.get("values", None) if values is None: return event for exception in values: frames = exception.get("stacktrace", {}).get("frames", []) if not frames: continue msg = exception.get("value", None) if not msg: continue additional_frames = [] new_msg = [] for line in msg.splitlines(): match = FRAME_RE.match(line) if match: additional_frames.append( ( int(match.group("index")), { "package": match.group("package") or None, "function": match.group("function") or None, "platform": "native", }, ) ) else: # Put garbage lines back into message, not sure what to do with them. new_msg.append(line) if additional_frames: additional_frames.sort(key=lambda x: -x[0]) for _, frame in additional_frames: frames.append(frame) new_msg.append("") exception["value"] = "\n".join(new_msg) return event sentry-python-1.39.2/sentry_sdk/integrations/gql.py000066400000000000000000000105361454744723200225220ustar00rootroot00000000000000from sentry_sdk.utils import event_from_exception, parse_version from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration try: import gql # type: ignore[import-not-found] from graphql import print_ast, get_operation_ast, DocumentNode, VariableDefinitionNode # type: ignore[import-not-found] from gql.transport import Transport, AsyncTransport # type: ignore[import-not-found] from gql.transport.exceptions import TransportQueryError # type: ignore[import-not-found] except ImportError: raise DidNotEnable("gql is not installed") from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Dict, Tuple, Union from sentry_sdk._types import EventProcessor EventDataType = Dict[str, Union[str, Tuple[VariableDefinitionNode, ...]]] MIN_GQL_VERSION = (3, 4, 1) class GQLIntegration(Integration): identifier = "gql" @staticmethod def setup_once(): # type: () -> None gql_version = parse_version(gql.__version__) if gql_version is None or gql_version < MIN_GQL_VERSION: raise DidNotEnable( "GQLIntegration is only supported for GQL versions %s and above." % ".".join(str(num) for num in MIN_GQL_VERSION) ) _patch_execute() def _data_from_document(document): # type: (DocumentNode) -> EventDataType try: operation_ast = get_operation_ast(document) data = {"query": print_ast(document)} # type: EventDataType if operation_ast is not None: data["variables"] = operation_ast.variable_definitions if operation_ast.name is not None: data["operationName"] = operation_ast.name.value return data except (AttributeError, TypeError): return dict() def _transport_method(transport): # type: (Union[Transport, AsyncTransport]) -> str """ The RequestsHTTPTransport allows defining the HTTP method; all other transports use POST. """ try: return transport.method except AttributeError: return "POST" def _request_info_from_transport(transport): # type: (Union[Transport, AsyncTransport, None]) -> Dict[str, str] if transport is None: return {} request_info = { "method": _transport_method(transport), } try: request_info["url"] = transport.url except AttributeError: pass return request_info def _patch_execute(): # type: () -> None real_execute = gql.Client.execute def sentry_patched_execute(self, document, *args, **kwargs): # type: (gql.Client, DocumentNode, Any, Any) -> Any hub = Hub.current if hub.get_integration(GQLIntegration) is None: return real_execute(self, document, *args, **kwargs) with Hub.current.configure_scope() as scope: scope.add_event_processor(_make_gql_event_processor(self, document)) try: return real_execute(self, document, *args, **kwargs) except TransportQueryError as e: event, hint = event_from_exception( e, client_options=hub.client.options if hub.client is not None else None, mechanism={"type": "gql", "handled": False}, ) hub.capture_event(event, hint) raise e gql.Client.execute = sentry_patched_execute def _make_gql_event_processor(client, document): # type: (gql.Client, DocumentNode) -> EventProcessor def processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] try: errors = hint["exc_info"][1].errors except (AttributeError, KeyError): errors = None request = event.setdefault("request", {}) request.update( { "api_target": "graphql", **_request_info_from_transport(client.transport), } ) if _should_send_default_pii(): request["data"] = _data_from_document(document) contexts = event.setdefault("contexts", {}) response = contexts.setdefault("response", {}) response.update( { "data": {"errors": errors}, "type": response, } ) return event return processor sentry-python-1.39.2/sentry_sdk/integrations/graphene.py000066400000000000000000000072141454744723200235270ustar00rootroot00000000000000from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, package_version, ) from sentry_sdk._types import TYPE_CHECKING try: from graphene.types import schema as graphene_schema # type: ignore except ImportError: raise DidNotEnable("graphene is not installed") if TYPE_CHECKING: from typing import Any, Dict, Union from graphene.language.source import Source # type: ignore from graphql.execution import ExecutionResult # type: ignore from graphql.type import GraphQLSchema # type: ignore class GrapheneIntegration(Integration): identifier = "graphene" @staticmethod def setup_once(): # type: () -> None version = package_version("graphene") if version is None: raise DidNotEnable("Unparsable graphene version.") if version < (3, 3): raise DidNotEnable("graphene 3.3 or newer required.") _patch_graphql() def _patch_graphql(): # type: () -> None old_graphql_sync = graphene_schema.graphql_sync old_graphql_async = graphene_schema.graphql def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult hub = Hub.current integration = hub.get_integration(GrapheneIntegration) if integration is None: return old_graphql_sync(schema, source, *args, **kwargs) with hub.configure_scope() as scope: scope.add_event_processor(_event_processor) result = old_graphql_sync(schema, source, *args, **kwargs) with capture_internal_exceptions(): for error in result.errors or []: event, hint = event_from_exception( error, client_options=hub.client.options if hub.client else None, mechanism={ "type": integration.identifier, "handled": False, }, ) hub.capture_event(event, hint=hint) return result async def _sentry_patched_graphql_async(schema, source, *args, **kwargs): # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult hub = Hub.current integration = hub.get_integration(GrapheneIntegration) if integration is None: return await old_graphql_async(schema, source, *args, **kwargs) with hub.configure_scope() as scope: scope.add_event_processor(_event_processor) result = await old_graphql_async(schema, source, *args, **kwargs) with capture_internal_exceptions(): for error in result.errors or []: event, hint = event_from_exception( error, client_options=hub.client.options if hub.client else None, mechanism={ "type": integration.identifier, "handled": False, }, ) hub.capture_event(event, hint=hint) return result graphene_schema.graphql_sync = _sentry_patched_graphql_sync graphene_schema.graphql = _sentry_patched_graphql_async def _event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] if _should_send_default_pii(): request_info = event.setdefault("request", {}) request_info["api_target"] = "graphql" elif event.get("request", {}).get("data"): del event["request"]["data"] return event sentry-python-1.39.2/sentry_sdk/integrations/grpc/000077500000000000000000000000001454744723200223135ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/integrations/grpc/__init__.py000066400000000000000000000115631454744723200244320ustar00rootroot00000000000000from functools import wraps import grpc from grpc import Channel, Server, intercept_channel from grpc.aio import Channel as AsyncChannel from grpc.aio import Server as AsyncServer from sentry_sdk.integrations import Integration from sentry_sdk._types import TYPE_CHECKING from .client import ClientInterceptor from .server import ServerInterceptor from .aio.server import ServerInterceptor as AsyncServerInterceptor from .aio.client import ( SentryUnaryUnaryClientInterceptor as AsyncUnaryUnaryClientInterceptor, ) from .aio.client import ( SentryUnaryStreamClientInterceptor as AsyncUnaryStreamClientIntercetor, ) from typing import Any, Optional, Sequence # Hack to get new Python features working in older versions # without introducing a hard dependency on `typing_extensions` # from: https://stackoverflow.com/a/71944042/300572 if TYPE_CHECKING: from typing import ParamSpec, Callable else: # Fake ParamSpec class ParamSpec: def __init__(self, _): self.args = None self.kwargs = None # Callable[anything] will return None class _Callable: def __getitem__(self, _): return None # Make instances Callable = _Callable() P = ParamSpec("P") def _wrap_channel_sync(func: Callable[P, Channel]) -> Callable[P, Channel]: "Wrapper for synchronous secure and insecure channel." @wraps(func) def patched_channel(*args: Any, **kwargs: Any) -> Channel: channel = func(*args, **kwargs) if not ClientInterceptor._is_intercepted: ClientInterceptor._is_intercepted = True return intercept_channel(channel, ClientInterceptor()) else: return channel return patched_channel def _wrap_intercept_channel(func: Callable[P, Channel]) -> Callable[P, Channel]: @wraps(func) def patched_intercept_channel( channel: Channel, *interceptors: grpc.ServerInterceptor ) -> Channel: if ClientInterceptor._is_intercepted: interceptors = tuple( [ interceptor for interceptor in interceptors if not isinstance(interceptor, ClientInterceptor) ] ) else: interceptors = interceptors return intercept_channel(channel, *interceptors) return patched_intercept_channel # type: ignore def _wrap_channel_async(func: Callable[P, AsyncChannel]) -> Callable[P, AsyncChannel]: "Wrapper for asynchronous secure and insecure channel." @wraps(func) def patched_channel( *args: P.args, interceptors: Optional[Sequence[grpc.aio.ClientInterceptor]] = None, **kwargs: P.kwargs, ) -> Channel: sentry_interceptors = [ AsyncUnaryUnaryClientInterceptor(), AsyncUnaryStreamClientIntercetor(), ] interceptors = [*sentry_interceptors, *(interceptors or [])] return func(*args, interceptors=interceptors, **kwargs) # type: ignore return patched_channel # type: ignore def _wrap_sync_server(func: Callable[P, Server]) -> Callable[P, Server]: """Wrapper for synchronous server.""" @wraps(func) def patched_server( *args: P.args, interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None, **kwargs: P.kwargs, ) -> Server: interceptors = [ interceptor for interceptor in interceptors or [] if not isinstance(interceptor, ServerInterceptor) ] server_interceptor = ServerInterceptor() interceptors = [server_interceptor, *(interceptors or [])] return func(*args, interceptors=interceptors, **kwargs) # type: ignore return patched_server # type: ignore def _wrap_async_server(func: Callable[P, AsyncServer]) -> Callable[P, AsyncServer]: """Wrapper for asynchronous server.""" @wraps(func) def patched_aio_server( *args: P.args, interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None, **kwargs: P.kwargs, ) -> Server: server_interceptor = AsyncServerInterceptor() interceptors = [server_interceptor, *(interceptors or [])] return func(*args, interceptors=interceptors, **kwargs) # type: ignore return patched_aio_server # type: ignore class GRPCIntegration(Integration): identifier = "grpc" @staticmethod def setup_once() -> None: import grpc grpc.insecure_channel = _wrap_channel_sync(grpc.insecure_channel) grpc.secure_channel = _wrap_channel_sync(grpc.secure_channel) grpc.intercept_channel = _wrap_intercept_channel(grpc.intercept_channel) grpc.aio.insecure_channel = _wrap_channel_async(grpc.aio.insecure_channel) grpc.aio.secure_channel = _wrap_channel_async(grpc.aio.secure_channel) grpc.server = _wrap_sync_server(grpc.server) grpc.aio.server = _wrap_async_server(grpc.aio.server) sentry-python-1.39.2/sentry_sdk/integrations/grpc/aio/000077500000000000000000000000001454744723200230635ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/integrations/grpc/aio/__init__.py000066400000000000000000000001501454744723200251700ustar00rootroot00000000000000from .server import ServerInterceptor # noqa: F401 from .client import ClientInterceptor # noqa: F401 sentry-python-1.39.2/sentry_sdk/integrations/grpc/aio/client.py000066400000000000000000000060061454744723200247150ustar00rootroot00000000000000from typing import Callable, Union, AsyncIterable, Any from grpc.aio import ( UnaryUnaryClientInterceptor, UnaryStreamClientInterceptor, ClientCallDetails, UnaryUnaryCall, UnaryStreamCall, ) from google.protobuf.message import Message from sentry_sdk import Hub from sentry_sdk.consts import OP class ClientInterceptor: @staticmethod def _update_client_call_details_metadata_from_hub( client_call_details: ClientCallDetails, hub: Hub ) -> ClientCallDetails: metadata = ( list(client_call_details.metadata) if client_call_details.metadata else [] ) for key, value in hub.iter_trace_propagation_headers(): metadata.append((key, value)) client_call_details = ClientCallDetails( method=client_call_details.method, timeout=client_call_details.timeout, metadata=metadata, credentials=client_call_details.credentials, wait_for_ready=client_call_details.wait_for_ready, ) return client_call_details class SentryUnaryUnaryClientInterceptor(ClientInterceptor, UnaryUnaryClientInterceptor): # type: ignore async def intercept_unary_unary( self, continuation: Callable[[ClientCallDetails, Message], UnaryUnaryCall], client_call_details: ClientCallDetails, request: Message, ) -> Union[UnaryUnaryCall, Message]: hub = Hub.current method = client_call_details.method with hub.start_span( op=OP.GRPC_CLIENT, description="unary unary call to %s" % method.decode() ) as span: span.set_data("type", "unary unary") span.set_data("method", method) client_call_details = self._update_client_call_details_metadata_from_hub( client_call_details, hub ) response = await continuation(client_call_details, request) status_code = await response.code() span.set_data("code", status_code.name) return response class SentryUnaryStreamClientInterceptor( ClientInterceptor, UnaryStreamClientInterceptor # type: ignore ): async def intercept_unary_stream( self, continuation: Callable[[ClientCallDetails, Message], UnaryStreamCall], client_call_details: ClientCallDetails, request: Message, ) -> Union[AsyncIterable[Any], UnaryStreamCall]: hub = Hub.current method = client_call_details.method with hub.start_span( op=OP.GRPC_CLIENT, description="unary stream call to %s" % method.decode() ) as span: span.set_data("type", "unary stream") span.set_data("method", method) client_call_details = self._update_client_call_details_metadata_from_hub( client_call_details, hub ) response = await continuation(client_call_details, request) # status_code = await response.code() # span.set_data("code", status_code) return response sentry-python-1.39.2/sentry_sdk/integrations/grpc/aio/server.py000066400000000000000000000074751454744723200247600ustar00rootroot00000000000000from sentry_sdk import Hub from sentry_sdk._types import MYPY from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM from sentry_sdk.utils import event_from_exception if MYPY: from collections.abc import Awaitable, Callable from typing import Any try: import grpc from grpc import HandlerCallDetails, RpcMethodHandler from grpc.aio import AbortError, ServicerContext except ImportError: raise DidNotEnable("grpcio is not installed") class ServerInterceptor(grpc.aio.ServerInterceptor): # type: ignore def __init__(self, find_name=None): # type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None self._find_method_name = find_name or self._find_name super(ServerInterceptor, self).__init__() async def intercept_service(self, continuation, handler_call_details): # type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Awaitable[RpcMethodHandler] self._handler_call_details = handler_call_details handler = await continuation(handler_call_details) if not handler.request_streaming and not handler.response_streaming: handler_factory = grpc.unary_unary_rpc_method_handler async def wrapped(request, context): # type: (Any, ServicerContext) -> Any name = self._find_method_name(context) if not name: return await handler(request, context) hub = Hub.current # What if the headers are empty? transaction = Transaction.continue_from_headers( dict(context.invocation_metadata()), op=OP.GRPC_SERVER, name=name, source=TRANSACTION_SOURCE_CUSTOM, ) with hub.start_transaction(transaction=transaction): try: return await handler.unary_unary(request, context) except AbortError: raise except Exception as exc: event, hint = event_from_exception( exc, mechanism={"type": "grpc", "handled": False}, ) hub.capture_event(event, hint=hint) raise elif not handler.request_streaming and handler.response_streaming: handler_factory = grpc.unary_stream_rpc_method_handler async def wrapped(request, context): # type: ignore # type: (Any, ServicerContext) -> Any async for r in handler.unary_stream(request, context): yield r elif handler.request_streaming and not handler.response_streaming: handler_factory = grpc.stream_unary_rpc_method_handler async def wrapped(request, context): # type: (Any, ServicerContext) -> Any response = handler.stream_unary(request, context) return await response elif handler.request_streaming and handler.response_streaming: handler_factory = grpc.stream_stream_rpc_method_handler async def wrapped(request, context): # type: ignore # type: (Any, ServicerContext) -> Any async for r in handler.stream_stream(request, context): yield r return handler_factory( wrapped, request_deserializer=handler.request_deserializer, response_serializer=handler.response_serializer, ) def _find_name(self, context): # type: (ServicerContext) -> str return self._handler_call_details.method sentry-python-1.39.2/sentry_sdk/integrations/grpc/client.py000066400000000000000000000062431454744723200241500ustar00rootroot00000000000000from sentry_sdk import Hub from sentry_sdk._types import MYPY from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable if MYPY: from typing import Any, Callable, Iterator, Iterable, Union try: import grpc from grpc import ClientCallDetails, Call from grpc._interceptor import _UnaryOutcome from grpc.aio._interceptor import UnaryStreamCall from google.protobuf.message import Message except ImportError: raise DidNotEnable("grpcio is not installed") class ClientInterceptor( grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor # type: ignore ): _is_intercepted = False def intercept_unary_unary(self, continuation, client_call_details, request): # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome hub = Hub.current method = client_call_details.method with hub.start_span( op=OP.GRPC_CLIENT, description="unary unary call to %s" % method ) as span: span.set_data("type", "unary unary") span.set_data("method", method) client_call_details = self._update_client_call_details_metadata_from_hub( client_call_details, hub ) response = continuation(client_call_details, request) span.set_data("code", response.code().name) return response def intercept_unary_stream(self, continuation, client_call_details, request): # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call] hub = Hub.current method = client_call_details.method with hub.start_span( op=OP.GRPC_CLIENT, description="unary stream call to %s" % method ) as span: span.set_data("type", "unary stream") span.set_data("method", method) client_call_details = self._update_client_call_details_metadata_from_hub( client_call_details, hub ) response = continuation( client_call_details, request ) # type: UnaryStreamCall # Setting code on unary-stream leads to execution getting stuck # span.set_data("code", response.code().name) return response @staticmethod def _update_client_call_details_metadata_from_hub(client_call_details, hub): # type: (ClientCallDetails, Hub) -> ClientCallDetails metadata = ( list(client_call_details.metadata) if client_call_details.metadata else [] ) for key, value in hub.iter_trace_propagation_headers(): metadata.append((key, value)) client_call_details = grpc._interceptor._ClientCallDetails( method=client_call_details.method, timeout=client_call_details.timeout, metadata=metadata, credentials=client_call_details.credentials, wait_for_ready=client_call_details.wait_for_ready, compression=client_call_details.compression, ) return client_call_details sentry-python-1.39.2/sentry_sdk/integrations/grpc/server.py000066400000000000000000000044211454744723200241740ustar00rootroot00000000000000from sentry_sdk import Hub from sentry_sdk._types import MYPY from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM if MYPY: from typing import Callable, Optional from google.protobuf.message import Message try: import grpc from grpc import ServicerContext, HandlerCallDetails, RpcMethodHandler except ImportError: raise DidNotEnable("grpcio is not installed") class ServerInterceptor(grpc.ServerInterceptor): # type: ignore def __init__(self, find_name=None): # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None self._find_method_name = find_name or ServerInterceptor._find_name super(ServerInterceptor, self).__init__() def intercept_service(self, continuation, handler_call_details): # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler handler = continuation(handler_call_details) if not handler or not handler.unary_unary: return handler def behavior(request, context): # type: (Message, ServicerContext) -> Message hub = Hub(Hub.current) name = self._find_method_name(context) if name: metadata = dict(context.invocation_metadata()) transaction = Transaction.continue_from_headers( metadata, op=OP.GRPC_SERVER, name=name, source=TRANSACTION_SOURCE_CUSTOM, ) with hub.start_transaction(transaction=transaction): try: return handler.unary_unary(request, context) except BaseException as e: raise e else: return handler.unary_unary(request, context) return grpc.unary_unary_rpc_method_handler( behavior, request_deserializer=handler.request_deserializer, response_serializer=handler.response_serializer, ) @staticmethod def _find_name(context): # type: (ServicerContext) -> str return context._rpc_event.call_details.method.decode() sentry-python-1.39.2/sentry_sdk/integrations/httpx.py000066400000000000000000000116151454744723200231050ustar00rootroot00000000000000from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import BAGGAGE_HEADER_NAME from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, logger, parse_url, ) from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any try: from httpx import AsyncClient, Client, Request, Response # type: ignore except ImportError: raise DidNotEnable("httpx is not installed") __all__ = ["HttpxIntegration"] class HttpxIntegration(Integration): identifier = "httpx" @staticmethod def setup_once(): # type: () -> None """ httpx has its own transport layer and can be customized when needed, so patch Client.send and AsyncClient.send to support both synchronous and async interfaces. """ _install_httpx_client() _install_httpx_async_client() def _install_httpx_client(): # type: () -> None real_send = Client.send def send(self, request, **kwargs): # type: (Client, Request, **Any) -> Response hub = Hub.current if hub.get_integration(HttpxIntegration) is None: return real_send(self, request, **kwargs) parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(str(request.url), sanitize=False) with hub.start_span( op=OP.HTTP_CLIENT, description="%s %s" % ( request.method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), ) as span: span.set_data(SPANDATA.HTTP_METHOD, request.method) if parsed_url is not None: span.set_data("url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) if should_propagate_trace(hub, str(request.url)): for key, value in hub.iter_trace_propagation_headers(): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( key=key, value=value, url=request.url ) ) if key == BAGGAGE_HEADER_NAME and request.headers.get( BAGGAGE_HEADER_NAME ): # do not overwrite any existing baggage, just append to it request.headers[key] += "," + value else: request.headers[key] = value rv = real_send(self, request, **kwargs) span.set_http_status(rv.status_code) span.set_data("reason", rv.reason_phrase) return rv Client.send = send def _install_httpx_async_client(): # type: () -> None real_send = AsyncClient.send async def send(self, request, **kwargs): # type: (AsyncClient, Request, **Any) -> Response hub = Hub.current if hub.get_integration(HttpxIntegration) is None: return await real_send(self, request, **kwargs) parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(str(request.url), sanitize=False) with hub.start_span( op=OP.HTTP_CLIENT, description="%s %s" % ( request.method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), ) as span: span.set_data(SPANDATA.HTTP_METHOD, request.method) if parsed_url is not None: span.set_data("url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) if should_propagate_trace(hub, str(request.url)): for key, value in hub.iter_trace_propagation_headers(): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( key=key, value=value, url=request.url ) ) if key == BAGGAGE_HEADER_NAME and request.headers.get( BAGGAGE_HEADER_NAME ): # do not overwrite any existing baggage, just append to it request.headers[key] += "," + value else: request.headers[key] = value rv = await real_send(self, request, **kwargs) span.set_http_status(rv.status_code) span.set_data("reason", rv.reason_phrase) return rv AsyncClient.send = send sentry-python-1.39.2/sentry_sdk/integrations/huey.py000066400000000000000000000111341454744723200227040ustar00rootroot00000000000000from __future__ import absolute_import import sys from datetime import datetime from sentry_sdk._compat import reraise from sentry_sdk._types import TYPE_CHECKING from sentry_sdk import Hub from sentry_sdk.consts import OP from sentry_sdk.hub import _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, SENSITIVE_DATA_SUBSTITUTE, ) if TYPE_CHECKING: from typing import Any, Callable, Optional, Union, TypeVar from sentry_sdk._types import EventProcessor, Event, Hint from sentry_sdk.utils import ExcInfo F = TypeVar("F", bound=Callable[..., Any]) try: from huey.api import Huey, Result, ResultGroup, Task from huey.exceptions import CancelExecution, RetryTask, TaskLockedException except ImportError: raise DidNotEnable("Huey is not installed") HUEY_CONTROL_FLOW_EXCEPTIONS = (CancelExecution, RetryTask, TaskLockedException) class HueyIntegration(Integration): identifier = "huey" @staticmethod def setup_once(): # type: () -> None patch_enqueue() patch_execute() def patch_enqueue(): # type: () -> None old_enqueue = Huey.enqueue def _sentry_enqueue(self, task): # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]] hub = Hub.current if hub.get_integration(HueyIntegration) is None: return old_enqueue(self, task) with hub.start_span(op=OP.QUEUE_SUBMIT_HUEY, description=task.name): return old_enqueue(self, task) Huey.enqueue = _sentry_enqueue def _make_event_processor(task): # type: (Any) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): tags = event.setdefault("tags", {}) tags["huey_task_id"] = task.id tags["huey_task_retry"] = task.default_retries > task.retries extra = event.setdefault("extra", {}) extra["huey-job"] = { "task": task.name, "args": task.args if _should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE, "kwargs": task.kwargs if _should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE, "retry": (task.default_retries or 0) - task.retries, } return event return event_processor def _capture_exception(exc_info): # type: (ExcInfo) -> None hub = Hub.current if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: hub.scope.transaction.set_status("aborted") return hub.scope.transaction.set_status("internal_error") event, hint = event_from_exception( exc_info, client_options=hub.client.options if hub.client else None, mechanism={"type": HueyIntegration.identifier, "handled": False}, ) hub.capture_event(event, hint=hint) def _wrap_task_execute(func): # type: (F) -> F def _sentry_execute(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current if hub.get_integration(HueyIntegration) is None: return func(*args, **kwargs) try: result = func(*args, **kwargs) except Exception: exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) return result return _sentry_execute # type: ignore def patch_execute(): # type: () -> None old_execute = Huey._execute def _sentry_execute(self, task, timestamp=None): # type: (Huey, Task, Optional[datetime]) -> Any hub = Hub.current if hub.get_integration(HueyIntegration) is None: return old_execute(self, task, timestamp) with hub.push_scope() as scope: with capture_internal_exceptions(): scope._name = "huey" scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task)) transaction = Transaction( name=task.name, status="ok", op=OP.QUEUE_TASK_HUEY, source=TRANSACTION_SOURCE_TASK, ) if not getattr(task, "_sentry_is_patched", False): task.execute = _wrap_task_execute(task.execute) task._sentry_is_patched = True with hub.start_transaction(transaction): return old_execute(self, task, timestamp) Huey._execute = _sentry_execute sentry-python-1.39.2/sentry_sdk/integrations/logging.py000066400000000000000000000222401454744723200233600ustar00rootroot00000000000000from __future__ import absolute_import import logging from fnmatch import fnmatch from sentry_sdk.hub import Hub from sentry_sdk.utils import ( to_string, event_from_exception, current_stacktrace, capture_internal_exceptions, ) from sentry_sdk.integrations import Integration from sentry_sdk._compat import iteritems, utc_from_timestamp from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from logging import LogRecord from typing import Any from typing import Dict from typing import Optional DEFAULT_LEVEL = logging.INFO DEFAULT_EVENT_LEVEL = logging.ERROR LOGGING_TO_EVENT_LEVEL = { logging.NOTSET: "notset", logging.DEBUG: "debug", logging.INFO: "info", logging.WARN: "warning", # WARN is same a WARNING logging.WARNING: "warning", logging.ERROR: "error", logging.FATAL: "fatal", logging.CRITICAL: "fatal", # CRITICAL is same as FATAL } # Capturing events from those loggers causes recursion errors. We cannot allow # the user to unconditionally create events from those loggers under any # circumstances. # # Note: Ignoring by logger name here is better than mucking with thread-locals. # We do not necessarily know whether thread-locals work 100% correctly in the user's environment. _IGNORED_LOGGERS = set( ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"] ) def ignore_logger( name, # type: str ): # type: (...) -> None """This disables recording (both in breadcrumbs and as events) calls to a logger of a specific name. Among other uses, many of our integrations use this to prevent their actions being recorded as breadcrumbs. Exposed to users as a way to quiet spammy loggers. :param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``). """ _IGNORED_LOGGERS.add(name) class LoggingIntegration(Integration): identifier = "logging" def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL): # type: (Optional[int], Optional[int]) -> None self._handler = None self._breadcrumb_handler = None if level is not None: self._breadcrumb_handler = BreadcrumbHandler(level=level) if event_level is not None: self._handler = EventHandler(level=event_level) def _handle_record(self, record): # type: (LogRecord) -> None if self._handler is not None and record.levelno >= self._handler.level: self._handler.handle(record) if ( self._breadcrumb_handler is not None and record.levelno >= self._breadcrumb_handler.level ): self._breadcrumb_handler.handle(record) @staticmethod def setup_once(): # type: () -> None old_callhandlers = logging.Logger.callHandlers def sentry_patched_callhandlers(self, record): # type: (Any, LogRecord) -> Any # keeping a local reference because the # global might be discarded on shutdown ignored_loggers = _IGNORED_LOGGERS try: return old_callhandlers(self, record) finally: # This check is done twice, once also here before we even get # the integration. Otherwise we have a high chance of getting # into a recursion error when the integration is resolved # (this also is slower). if ignored_loggers is not None and record.name not in ignored_loggers: integration = Hub.current.get_integration(LoggingIntegration) if integration is not None: integration._handle_record(record) logging.Logger.callHandlers = sentry_patched_callhandlers # type: ignore class _BaseHandler(logging.Handler, object): COMMON_RECORD_ATTRS = frozenset( ( "args", "created", "exc_info", "exc_text", "filename", "funcName", "levelname", "levelno", "linenno", "lineno", "message", "module", "msecs", "msg", "name", "pathname", "process", "processName", "relativeCreated", "stack", "tags", "taskName", "thread", "threadName", "stack_info", ) ) def _can_record(self, record): # type: (LogRecord) -> bool """Prevents ignored loggers from recording""" for logger in _IGNORED_LOGGERS: if fnmatch(record.name, logger): return False return True def _logging_to_event_level(self, record): # type: (LogRecord) -> str return LOGGING_TO_EVENT_LEVEL.get( record.levelno, record.levelname.lower() if record.levelname else "" ) def _extra_from_record(self, record): # type: (LogRecord) -> Dict[str, None] return { k: v for k, v in iteritems(vars(record)) if k not in self.COMMON_RECORD_ATTRS and (not isinstance(k, str) or not k.startswith("_")) } class EventHandler(_BaseHandler): """ A logging handler that emits Sentry events for each log record Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ def emit(self, record): # type: (LogRecord) -> Any with capture_internal_exceptions(): self.format(record) return self._emit(record) def _emit(self, record): # type: (LogRecord) -> None if not self._can_record(record): return hub = Hub.current if hub.client is None: return client_options = hub.client.options # exc_info might be None or (None, None, None) # # exc_info may also be any falsy value due to Python stdlib being # liberal with what it receives and Celery's billiard being "liberal" # with what it sends. See # https://github.com/getsentry/sentry-python/issues/904 if record.exc_info and record.exc_info[0] is not None: event, hint = event_from_exception( record.exc_info, client_options=client_options, mechanism={"type": "logging", "handled": True}, ) elif record.exc_info and record.exc_info[0] is None: event = {} hint = {} with capture_internal_exceptions(): event["threads"] = { "values": [ { "stacktrace": current_stacktrace( include_local_variables=client_options[ "include_local_variables" ], max_value_length=client_options["max_value_length"], ), "crashed": False, "current": True, } ] } else: event = {} hint = {} hint["log_record"] = record event["level"] = self._logging_to_event_level(record) event["logger"] = record.name # Log records from `warnings` module as separate issues record_caputured_from_warnings_module = ( record.name == "py.warnings" and record.msg == "%s" ) if record_caputured_from_warnings_module: # use the actual message and not "%s" as the message # this prevents grouping all warnings under one "%s" issue msg = record.args[0] # type: ignore event["logentry"] = { "message": msg, "params": (), } else: event["logentry"] = { "message": to_string(record.msg), "params": record.args, } event["extra"] = self._extra_from_record(record) hub.capture_event(event, hint=hint) # Legacy name SentryHandler = EventHandler class BreadcrumbHandler(_BaseHandler): """ A logging handler that records breadcrumbs for each log record. Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ def emit(self, record): # type: (LogRecord) -> Any with capture_internal_exceptions(): self.format(record) return self._emit(record) def _emit(self, record): # type: (LogRecord) -> None if not self._can_record(record): return Hub.current.add_breadcrumb( self._breadcrumb_from_record(record), hint={"log_record": record} ) def _breadcrumb_from_record(self, record): # type: (LogRecord) -> Dict[str, Any] return { "type": "log", "level": self._logging_to_event_level(record), "category": record.name, "message": record.message, "timestamp": utc_from_timestamp(record.created), "data": self._extra_from_record(record), } sentry-python-1.39.2/sentry_sdk/integrations/loguru.py000066400000000000000000000057531454744723200232610ustar00rootroot00000000000000from __future__ import absolute_import import enum from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ( BreadcrumbHandler, EventHandler, _BaseHandler, ) if TYPE_CHECKING: from logging import LogRecord from typing import Optional, Tuple try: import loguru from loguru import logger from loguru._defaults import LOGURU_FORMAT as DEFAULT_FORMAT except ImportError: raise DidNotEnable("LOGURU is not installed") class LoggingLevels(enum.IntEnum): TRACE = 5 DEBUG = 10 INFO = 20 SUCCESS = 25 WARNING = 30 ERROR = 40 CRITICAL = 50 DEFAULT_LEVEL = LoggingLevels.INFO.value DEFAULT_EVENT_LEVEL = LoggingLevels.ERROR.value # We need to save the handlers to be able to remove them later # in tests (they call `LoguruIntegration.__init__` multiple times, # and we can't use `setup_once` because it's called before # than we get configuration). _ADDED_HANDLERS = (None, None) # type: Tuple[Optional[int], Optional[int]] class LoguruIntegration(Integration): identifier = "loguru" def __init__( self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL, breadcrumb_format=DEFAULT_FORMAT, event_format=DEFAULT_FORMAT, ): # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction) -> None global _ADDED_HANDLERS breadcrumb_handler, event_handler = _ADDED_HANDLERS if breadcrumb_handler is not None: logger.remove(breadcrumb_handler) breadcrumb_handler = None if event_handler is not None: logger.remove(event_handler) event_handler = None if level is not None: breadcrumb_handler = logger.add( LoguruBreadcrumbHandler(level=level), level=level, format=breadcrumb_format, ) if event_level is not None: event_handler = logger.add( LoguruEventHandler(level=event_level), level=event_level, format=event_format, ) _ADDED_HANDLERS = (breadcrumb_handler, event_handler) @staticmethod def setup_once(): # type: () -> None pass # we do everything in __init__ class _LoguruBaseHandler(_BaseHandler): def _logging_to_event_level(self, record): # type: (LogRecord) -> str try: return LoggingLevels(record.levelno).name.lower() except ValueError: return record.levelname.lower() if record.levelname else "" class LoguruEventHandler(_LoguruBaseHandler, EventHandler): """Modified version of :class:`sentry_sdk.integrations.logging.EventHandler` to use loguru's level names.""" class LoguruBreadcrumbHandler(_LoguruBaseHandler, BreadcrumbHandler): """Modified version of :class:`sentry_sdk.integrations.logging.BreadcrumbHandler` to use loguru's level names.""" sentry-python-1.39.2/sentry_sdk/integrations/modules.py000066400000000000000000000016161454744723200234060ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import _get_installed_modules from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from sentry_sdk._types import Event class ModulesIntegration(Integration): identifier = "modules" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def processor(event, hint): # type: (Event, Any) -> Dict[str, Any] if event.get("type") == "transaction": return event if Hub.current.get_integration(ModulesIntegration) is None: return event event["modules"] = _get_installed_modules() return event sentry-python-1.39.2/sentry_sdk/integrations/opentelemetry/000077500000000000000000000000001454744723200242545ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/integrations/opentelemetry/__init__.py000066400000000000000000000003221454744723200263620ustar00rootroot00000000000000from sentry_sdk.integrations.opentelemetry.span_processor import ( # noqa: F401 SentrySpanProcessor, ) from sentry_sdk.integrations.opentelemetry.propagator import ( # noqa: F401 SentryPropagator, ) sentry-python-1.39.2/sentry_sdk/integrations/opentelemetry/consts.py000066400000000000000000000002471454744723200261420ustar00rootroot00000000000000from opentelemetry.context import ( # type: ignore create_key, ) SENTRY_TRACE_KEY = create_key("sentry-trace") SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") sentry-python-1.39.2/sentry_sdk/integrations/opentelemetry/integration.py000066400000000000000000000133051454744723200271530ustar00rootroot00000000000000""" IMPORTANT: The contents of this file are part of a proof of concept and as such are experimental and not suitable for production use. They may be changed or removed at any time without prior notice. """ import sys from importlib import import_module from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator from sentry_sdk.utils import logger, _get_installed_modules from sentry_sdk._types import TYPE_CHECKING try: from opentelemetry import trace # type: ignore from opentelemetry.instrumentation.auto_instrumentation._load import ( # type: ignore _load_distro, _load_instrumentors, ) from opentelemetry.propagate import set_global_textmap # type: ignore from opentelemetry.sdk.trace import TracerProvider # type: ignore except ImportError: raise DidNotEnable("opentelemetry not installed") if TYPE_CHECKING: from typing import Dict CLASSES_TO_INSTRUMENT = { # A mapping of packages to their entry point class that will be instrumented. # This is used to post-instrument any classes that were imported before OTel # instrumentation took place. "fastapi": "fastapi.FastAPI", "flask": "flask.Flask", } class OpenTelemetryIntegration(Integration): identifier = "opentelemetry" @staticmethod def setup_once(): # type: () -> None logger.warning( "[OTel] Initializing highly experimental OpenTelemetry support. " "Use at your own risk." ) original_classes = _record_unpatched_classes() try: distro = _load_distro() distro.configure() _load_instrumentors(distro) except Exception: logger.exception("[OTel] Failed to auto-initialize OpenTelemetry") try: _patch_remaining_classes(original_classes) except Exception: logger.exception( "[OTel] Failed to post-patch instrumented classes. " "You might have to make sure sentry_sdk.init() is called before importing anything else." ) _setup_sentry_tracing() logger.debug("[OTel] Finished setting up OpenTelemetry integration") def _record_unpatched_classes(): # type: () -> Dict[str, type] """ Keep references to classes that are about to be instrumented. Used to search for unpatched classes after the instrumentation has run so that they can be patched manually. """ installed_packages = _get_installed_modules() original_classes = {} for package, orig_path in CLASSES_TO_INSTRUMENT.items(): if package in installed_packages: try: original_cls = _import_by_path(orig_path) except (AttributeError, ImportError): logger.debug("[OTel] Failed to import %s", orig_path) continue original_classes[package] = original_cls return original_classes def _patch_remaining_classes(original_classes): # type: (Dict[str, type]) -> None """ Best-effort attempt to patch any uninstrumented classes in sys.modules. This enables us to not care about the order of imports and sentry_sdk.init() in user code. If e.g. the Flask class had been imported before sentry_sdk was init()ed (and therefore before the OTel instrumentation ran), it would not be instrumented. This function goes over remaining uninstrumented occurrences of the class in sys.modules and replaces them with the instrumented class. Since this is looking for exact matches, it will not work in some scenarios (e.g. if someone is not using the specific class explicitly, but rather inheriting from it). In those cases it's still necessary to sentry_sdk.init() before importing anything that's supposed to be instrumented. """ # check which classes have actually been instrumented instrumented_classes = {} for package in list(original_classes.keys()): original_path = CLASSES_TO_INSTRUMENT[package] try: cls = _import_by_path(original_path) except (AttributeError, ImportError): logger.debug( "[OTel] Failed to check if class has been instrumented: %s", original_path, ) del original_classes[package] continue if not cls.__module__.startswith("opentelemetry."): del original_classes[package] continue instrumented_classes[package] = cls if not instrumented_classes: return # replace occurrences of the original unpatched class in sys.modules for module_name, module in sys.modules.copy().items(): if ( module_name.startswith("sentry_sdk") or module_name in sys.builtin_module_names ): continue for package, original_cls in original_classes.items(): for var_name, var in vars(module).copy().items(): if var == original_cls: logger.debug( "[OTel] Additionally patching %s from %s", original_cls, module_name, ) setattr(module, var_name, instrumented_classes[package]) def _import_by_path(path): # type: (str) -> type parts = path.rsplit(".", maxsplit=1) return getattr(import_module(parts[0]), parts[-1]) def _setup_sentry_tracing(): # type: () -> None provider = TracerProvider() provider.add_span_processor(SentrySpanProcessor()) trace.set_tracer_provider(provider) set_global_textmap(SentryPropagator()) sentry-python-1.39.2/sentry_sdk/integrations/opentelemetry/propagator.py000066400000000000000000000072231454744723200270100ustar00rootroot00000000000000from opentelemetry import trace # type: ignore from opentelemetry.context import ( # type: ignore Context, get_current, set_value, ) from opentelemetry.propagators.textmap import ( # type: ignore CarrierT, Getter, Setter, TextMapPropagator, default_getter, default_setter, ) from opentelemetry.trace import ( # type: ignore NonRecordingSpan, SpanContext, TraceFlags, ) from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, ) from sentry_sdk.integrations.opentelemetry.span_processor import ( SentrySpanProcessor, ) from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, ) from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from typing import Set class SentryPropagator(TextMapPropagator): # type: ignore """ Propagates tracing headers for Sentry's tracing system in a way OTel understands. """ def extract(self, carrier, context=None, getter=default_getter): # type: (CarrierT, Optional[Context], Getter) -> Context if context is None: context = get_current() sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME) if not sentry_trace: return context sentrytrace = extract_sentrytrace_data(sentry_trace[0]) if not sentrytrace: return context context = set_value(SENTRY_TRACE_KEY, sentrytrace, context) trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"] span_context = SpanContext( trace_id=int(trace_id, 16), # type: ignore span_id=int(span_id, 16), # type: ignore # we simulate a sampled trace on the otel side and leave the sampling to sentry trace_flags=TraceFlags(TraceFlags.SAMPLED), is_remote=True, ) baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME) if baggage_header: baggage = Baggage.from_incoming_header(baggage_header[0]) else: # If there's an incoming sentry-trace but no incoming baggage header, # for instance in traces coming from older SDKs, # baggage will be empty and frozen and won't be populated as head SDK. baggage = Baggage(sentry_items={}) baggage.freeze() context = set_value(SENTRY_BAGGAGE_KEY, baggage, context) span = NonRecordingSpan(span_context) modified_context = trace.set_span_in_context(span, context) return modified_context def inject(self, carrier, context=None, setter=default_setter): # type: (CarrierT, Optional[Context], Setter) -> None if context is None: context = get_current() current_span = trace.get_current_span(context) current_span_context = current_span.get_span_context() if not current_span_context.is_valid: return span_id = trace.format_span_id(current_span_context.span_id) span_map = SentrySpanProcessor().otel_span_map sentry_span = span_map.get(span_id, None) if not sentry_span: return setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent()) if sentry_span.containing_transaction: baggage = sentry_span.containing_transaction.get_baggage() if baggage: setter.set(carrier, BAGGAGE_HEADER_NAME, baggage.serialize()) @property def fields(self): # type: () -> Set[str] return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME} sentry-python-1.39.2/sentry_sdk/integrations/opentelemetry/span_processor.py000066400000000000000000000246531454744723200277000ustar00rootroot00000000000000from opentelemetry.context import get_value # type: ignore from opentelemetry.sdk.trace import SpanProcessor # type: ignore from opentelemetry.semconv.trace import SpanAttributes # type: ignore from opentelemetry.trace import ( # type: ignore format_span_id, format_trace_id, get_current_span, SpanContext, Span as OTelSpan, SpanKind, ) from opentelemetry.trace.span import ( # type: ignore INVALID_SPAN_ID, INVALID_TRACE_ID, ) from sentry_sdk._compat import utc_from_timestamp from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.hub import Hub from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, ) from sentry_sdk.scope import add_global_event_processor from sentry_sdk.tracing import Transaction, Span as SentrySpan from sentry_sdk.utils import Dsn from sentry_sdk._types import TYPE_CHECKING from urllib3.util import parse_url as urlparse if TYPE_CHECKING: from typing import Any, Dict, Optional, Union from sentry_sdk._types import Event, Hint OPEN_TELEMETRY_CONTEXT = "otel" def link_trace_context_to_error_event(event, otel_span_map): # type: (Event, Dict[str, Union[Transaction, SentrySpan]]) -> Event hub = Hub.current if not hub: return event if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL: return event if hasattr(event, "type") and event["type"] == "transaction": return event otel_span = get_current_span() if not otel_span: return event ctx = otel_span.get_span_context() trace_id = format_trace_id(ctx.trace_id) span_id = format_span_id(ctx.span_id) if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID: return event sentry_span = otel_span_map.get(span_id, None) if not sentry_span: return event contexts = event.setdefault("contexts", {}) contexts.setdefault("trace", {}).update(sentry_span.get_trace_context()) return event class SentrySpanProcessor(SpanProcessor): # type: ignore """ Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. """ # The mapping from otel span ids to sentry spans otel_span_map = {} # type: Dict[str, Union[Transaction, SentrySpan]] def __new__(cls): # type: () -> SentrySpanProcessor if not hasattr(cls, "instance"): cls.instance = super(SentrySpanProcessor, cls).__new__(cls) return cls.instance def __init__(self): # type: () -> None @add_global_event_processor def global_event_processor(event, hint): # type: (Event, Hint) -> Event return link_trace_context_to_error_event(event, self.otel_span_map) def on_start(self, otel_span, parent_context=None): # type: (OTelSpan, Optional[SpanContext]) -> None hub = Hub.current if not hub: return if not hub.client or (hub.client and not hub.client.dsn): return try: _ = Dsn(hub.client.dsn or "") except Exception: return if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL: return if not otel_span.get_span_context().is_valid: return if self._is_sentry_span(hub, otel_span): return trace_data = self._get_trace_data(otel_span, parent_context) parent_span_id = trace_data["parent_span_id"] sentry_parent_span = ( self.otel_span_map.get(parent_span_id, None) if parent_span_id else None ) sentry_span = None if sentry_parent_span: sentry_span = sentry_parent_span.start_child( span_id=trace_data["span_id"], description=otel_span.name, start_timestamp=utc_from_timestamp(otel_span.start_time / 1e9), instrumenter=INSTRUMENTER.OTEL, ) else: sentry_span = hub.start_transaction( name=otel_span.name, span_id=trace_data["span_id"], parent_span_id=parent_span_id, trace_id=trace_data["trace_id"], baggage=trace_data["baggage"], start_timestamp=utc_from_timestamp(otel_span.start_time / 1e9), instrumenter=INSTRUMENTER.OTEL, ) self.otel_span_map[trace_data["span_id"]] = sentry_span def on_end(self, otel_span): # type: (OTelSpan) -> None hub = Hub.current if not hub: return if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL: return span_context = otel_span.get_span_context() if not span_context.is_valid: return span_id = format_span_id(span_context.span_id) sentry_span = self.otel_span_map.pop(span_id, None) if not sentry_span: return sentry_span.op = otel_span.name self._update_span_with_otel_status(sentry_span, otel_span) if isinstance(sentry_span, Transaction): sentry_span.name = otel_span.name sentry_span.set_context( OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span) ) self._update_transaction_with_otel_data(sentry_span, otel_span) else: self._update_span_with_otel_data(sentry_span, otel_span) sentry_span.finish(end_timestamp=utc_from_timestamp(otel_span.end_time / 1e9)) def _is_sentry_span(self, hub, otel_span): # type: (Hub, OTelSpan) -> bool """ Break infinite loop: HTTP requests to Sentry are caught by OTel and send again to Sentry. """ otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None) dsn_url = hub.client and Dsn(hub.client.dsn or "").netloc if otel_span_url and dsn_url in otel_span_url: return True return False def _get_otel_context(self, otel_span): # type: (OTelSpan) -> Dict[str, Any] """ Returns the OTel context for Sentry. See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context """ ctx = {} if otel_span.attributes: ctx["attributes"] = dict(otel_span.attributes) if otel_span.resource.attributes: ctx["resource"] = dict(otel_span.resource.attributes) return ctx def _get_trace_data(self, otel_span, parent_context): # type: (OTelSpan, SpanContext) -> Dict[str, Any] """ Extracts tracing information from one OTel span and its parent OTel context. """ trace_data = {} span_context = otel_span.get_span_context() span_id = format_span_id(span_context.span_id) trace_data["span_id"] = span_id trace_id = format_trace_id(span_context.trace_id) trace_data["trace_id"] = trace_id parent_span_id = ( format_span_id(otel_span.parent.span_id) if otel_span.parent else None ) trace_data["parent_span_id"] = parent_span_id sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context) trace_data["parent_sampled"] = ( sentry_trace_data["parent_sampled"] if sentry_trace_data else None ) baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context) trace_data["baggage"] = baggage return trace_data def _update_span_with_otel_status(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None """ Set the Sentry span status from the OTel span """ if otel_span.status.is_unset: return if otel_span.status.is_ok: sentry_span.set_status("ok") return sentry_span.set_status("internal_error") def _update_span_with_otel_data(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None """ Convert OTel span data and update the Sentry span with it. This should eventually happen on the server when ingesting the spans. """ for key, val in otel_span.attributes.items(): sentry_span.set_data(key, val) sentry_span.set_data("otel.kind", otel_span.kind) op = otel_span.name description = otel_span.name http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None) db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None) if http_method: op = "http" if otel_span.kind == SpanKind.SERVER: op += ".server" elif otel_span.kind == SpanKind.CLIENT: op += ".client" description = http_method peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None) if peer_name: description += " {}".format(peer_name) target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None) if target: description += " {}".format(target) if not peer_name and not target: url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None) if url: parsed_url = urlparse(url) url = "{}://{}{}".format( parsed_url.scheme, parsed_url.netloc, parsed_url.path ) description += " {}".format(url) status_code = otel_span.attributes.get( SpanAttributes.HTTP_STATUS_CODE, None ) if status_code: sentry_span.set_http_status(status_code) elif db_query: op = "db" statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None) if statement: description = statement sentry_span.op = op sentry_span.description = description def _update_transaction_with_otel_data(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) if http_method: status_code = otel_span.attributes.get(SpanAttributes.HTTP_STATUS_CODE) if status_code: sentry_span.set_http_status(status_code) op = "http" if otel_span.kind == SpanKind.SERVER: op += ".server" elif otel_span.kind == SpanKind.CLIENT: op += ".client" sentry_span.op = op sentry-python-1.39.2/sentry_sdk/integrations/pure_eval.py000066400000000000000000000107121454744723200237150ustar00rootroot00000000000000from __future__ import absolute_import import ast from sentry_sdk import Hub, serializer from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import walk_exception_chain, iter_stacks if TYPE_CHECKING: from typing import Optional, Dict, Any, Tuple, List from types import FrameType from sentry_sdk._types import Event, Hint try: import executing except ImportError: raise DidNotEnable("executing is not installed") try: import pure_eval except ImportError: raise DidNotEnable("pure_eval is not installed") try: # Used implicitly, just testing it's available import asttokens # noqa except ImportError: raise DidNotEnable("asttokens is not installed") class PureEvalIntegration(Integration): identifier = "pure_eval" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def add_executing_info(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if Hub.current.get_integration(PureEvalIntegration) is None: return event if hint is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exception = event.get("exception", None) if exception is None: return event values = exception.get("values", None) if values is None: return event for exception, (_exc_type, _exc_value, exc_tb) in zip( reversed(values), walk_exception_chain(exc_info) ): sentry_frames = [ frame for frame in exception.get("stacktrace", {}).get("frames", []) if frame.get("function") ] tbs = list(iter_stacks(exc_tb)) if len(sentry_frames) != len(tbs): continue for sentry_frame, tb in zip(sentry_frames, tbs): sentry_frame["vars"] = ( pure_eval_frame(tb.tb_frame) or sentry_frame["vars"] ) return event def pure_eval_frame(frame): # type: (FrameType) -> Dict[str, Any] source = executing.Source.for_frame(frame) if not source.tree: return {} statements = source.statements_at_line(frame.f_lineno) if not statements: return {} scope = stmt = list(statements)[0] while True: # Get the parent first in case the original statement is already # a function definition, e.g. if we're calling a decorator # In that case we still want the surrounding scope, not that function scope = scope.parent if isinstance(scope, (ast.FunctionDef, ast.ClassDef, ast.Module)): break evaluator = pure_eval.Evaluator.from_frame(frame) expressions = evaluator.interesting_expressions_grouped(scope) def closeness(expression): # type: (Tuple[List[Any], Any]) -> Tuple[int, int] # Prioritise expressions with a node closer to the statement executed # without being after that statement # A higher return value is better - the expression will appear # earlier in the list of values and is less likely to be trimmed nodes, _value = expression def start(n): # type: (ast.expr) -> Tuple[int, int] return (n.lineno, n.col_offset) nodes_before_stmt = [ node for node in nodes if start(node) < stmt.last_token.end # type: ignore ] if nodes_before_stmt: # The position of the last node before or in the statement return max(start(node) for node in nodes_before_stmt) else: # The position of the first node after the statement # Negative means it's always lower priority than nodes that come before # Less negative means closer to the statement and higher priority lineno, col_offset = min(start(node) for node in nodes) return (-lineno, -col_offset) # This adds the first_token and last_token attributes to nodes atok = source.asttokens() expressions.sort(key=closeness, reverse=True) return { atok.get_text(nodes[0]): value for nodes, value in expressions[: serializer.MAX_DATABAG_BREADTH] } sentry-python-1.39.2/sentry_sdk/integrations/pymongo.py000066400000000000000000000135671454744723200234360ustar00rootroot00000000000000from __future__ import absolute_import import copy from sentry_sdk import Hub from sentry_sdk.consts import SPANDATA from sentry_sdk.hub import _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from sentry_sdk._types import TYPE_CHECKING try: from pymongo import monitoring except ImportError: raise DidNotEnable("Pymongo not installed") if TYPE_CHECKING: from typing import Any, Dict, Union from pymongo.monitoring import ( CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent, ) SAFE_COMMAND_ATTRIBUTES = [ "insert", "ordered", "find", "limit", "singleBatch", "aggregate", "createIndexes", "indexes", "delete", "findAndModify", "renameCollection", "to", "drop", ] def _strip_pii(command): # type: (Dict[str, Any]) -> Dict[str, Any] for key in command: is_safe_field = key in SAFE_COMMAND_ATTRIBUTES if is_safe_field: # Skip if safe key continue update_db_command = key == "update" and "findAndModify" not in command if update_db_command: # Also skip "update" db command because it is save. # There is also an "update" key in the "findAndModify" command, which is NOT safe! continue # Special stripping for documents is_document = key == "documents" if is_document: for doc in command[key]: for doc_key in doc: doc[doc_key] = "%s" continue # Special stripping for dict style fields is_dict_field = key in ["filter", "query", "update"] if is_dict_field: for item_key in command[key]: command[key][item_key] = "%s" continue # For pipeline fields strip the `$match` dict is_pipeline_field = key == "pipeline" if is_pipeline_field: for pipeline in command[key]: for match_key in pipeline["$match"] if "$match" in pipeline else []: pipeline["$match"][match_key] = "%s" continue # Default stripping command[key] = "%s" return command def _get_db_data(event): # type: (Any) -> Dict[str, Any] data = {} data[SPANDATA.DB_SYSTEM] = "mongodb" db_name = event.database_name if db_name is not None: data[SPANDATA.DB_NAME] = db_name server_address = event.connection_id[0] if server_address is not None: data[SPANDATA.SERVER_ADDRESS] = server_address server_port = event.connection_id[1] if server_port is not None: data[SPANDATA.SERVER_PORT] = server_port return data class CommandTracer(monitoring.CommandListener): def __init__(self): # type: () -> None self._ongoing_operations = {} # type: Dict[int, Span] def _operation_key(self, event): # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int return event.request_id def started(self, event): # type: (CommandStartedEvent) -> None hub = Hub.current if hub.get_integration(PyMongoIntegration) is None: return with capture_internal_exceptions(): command = dict(copy.deepcopy(event.command)) command.pop("$db", None) command.pop("$clusterTime", None) command.pop("$signature", None) op = "db.query" tags = { "db.name": event.database_name, SPANDATA.DB_SYSTEM: "mongodb", SPANDATA.DB_OPERATION: event.command_name, } try: tags["net.peer.name"] = event.connection_id[0] tags["net.peer.port"] = str(event.connection_id[1]) except TypeError: pass data = {"operation_ids": {}} # type: Dict[str, Any] data["operation_ids"]["operation"] = event.operation_id data["operation_ids"]["request"] = event.request_id data.update(_get_db_data(event)) try: lsid = command.pop("lsid")["id"] data["operation_ids"]["session"] = str(lsid) except KeyError: pass if not _should_send_default_pii(): command = _strip_pii(command) query = "{} {}".format(event.command_name, command) span = hub.start_span(op=op, description=query) for tag, value in tags.items(): span.set_tag(tag, value) for key, value in data.items(): span.set_data(key, value) with capture_internal_exceptions(): hub.add_breadcrumb(message=query, category="query", type=op, data=tags) self._ongoing_operations[self._operation_key(event)] = span.__enter__() def failed(self, event): # type: (CommandFailedEvent) -> None hub = Hub.current if hub.get_integration(PyMongoIntegration) is None: return try: span = self._ongoing_operations.pop(self._operation_key(event)) span.set_status("internal_error") span.__exit__(None, None, None) except KeyError: return def succeeded(self, event): # type: (CommandSucceededEvent) -> None hub = Hub.current if hub.get_integration(PyMongoIntegration) is None: return try: span = self._ongoing_operations.pop(self._operation_key(event)) span.set_status("ok") span.__exit__(None, None, None) except KeyError: pass class PyMongoIntegration(Integration): identifier = "pymongo" @staticmethod def setup_once(): # type: () -> None monitoring.register(CommandTracer()) sentry-python-1.39.2/sentry_sdk/integrations/pyramid.py000066400000000000000000000164221454744723200234040ustar00rootroot00000000000000from __future__ import absolute_import import os import sys import weakref from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.scope import Scope from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, ) from sentry_sdk._compat import reraise, iteritems from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware try: from pyramid.httpexceptions import HTTPException from pyramid.request import Request except ImportError: raise DidNotEnable("Pyramid not installed") from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from pyramid.response import Response from typing import Any from sentry_sdk.integrations.wsgi import _ScopedResponse from typing import Callable from typing import Dict from typing import Optional from webob.cookies import RequestCookies # type: ignore from webob.compat import cgi_FieldStorage # type: ignore from sentry_sdk.utils import ExcInfo from sentry_sdk._types import EventProcessor if getattr(Request, "authenticated_userid", None): def authenticated_userid(request): # type: (Request) -> Optional[Any] return request.authenticated_userid else: # bw-compat for pyramid < 1.5 from pyramid.security import authenticated_userid # type: ignore TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern") class PyramidIntegration(Integration): identifier = "pyramid" transaction_style = "" def __init__(self, transaction_style="route_name"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None from pyramid import router old_call_view = router._call_view def sentry_patched_call_view(registry, request, *args, **kwargs): # type: (Any, Request, *Any, **Any) -> Response hub = Hub.current integration = hub.get_integration(PyramidIntegration) if integration is not None: with hub.configure_scope() as scope: _set_transaction_name_and_source( scope, integration.transaction_style, request ) scope.add_event_processor( _make_event_processor(weakref.ref(request), integration) ) return old_call_view(registry, request, *args, **kwargs) router._call_view = sentry_patched_call_view if hasattr(Request, "invoke_exception_view"): old_invoke_exception_view = Request.invoke_exception_view def sentry_patched_invoke_exception_view(self, *args, **kwargs): # type: (Request, *Any, **Any) -> Any rv = old_invoke_exception_view(self, *args, **kwargs) if ( self.exc_info and all(self.exc_info) and rv.status_int == 500 and Hub.current.get_integration(PyramidIntegration) is not None ): _capture_exception(self.exc_info) return rv Request.invoke_exception_view = sentry_patched_invoke_exception_view old_wsgi_call = router.Router.__call__ def sentry_patched_wsgi_call(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse hub = Hub.current integration = hub.get_integration(PyramidIntegration) if integration is None: return old_wsgi_call(self, environ, start_response) def sentry_patched_inner_wsgi_call(environ, start_response): # type: (Dict[str, Any], Callable[..., Any]) -> Any try: return old_wsgi_call(self, environ, start_response) except Exception: einfo = sys.exc_info() _capture_exception(einfo) reraise(*einfo) return SentryWsgiMiddleware(sentry_patched_inner_wsgi_call)( environ, start_response ) router.Router.__call__ = sentry_patched_wsgi_call def _capture_exception(exc_info): # type: (ExcInfo) -> None if exc_info[0] is None or issubclass(exc_info[0], HTTPException): return hub = Hub.current if hub.get_integration(PyramidIntegration) is None: return # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "pyramid", "handled": False}, ) hub.capture_event(event, hint=hint) def _set_transaction_name_and_source(scope, transaction_style, request): # type: (Scope, str, Request) -> None try: name_for_style = { "route_name": request.matched_route.name, "route_pattern": request.matched_route.pattern, } scope.set_transaction_name( name_for_style[transaction_style], source=SOURCE_FOR_STYLE[transaction_style], ) except Exception: pass class PyramidRequestExtractor(RequestExtractor): def url(self): # type: () -> str return self.request.path_url def env(self): # type: () -> Dict[str, str] return self.request.environ def cookies(self): # type: () -> RequestCookies return self.request.cookies def raw_data(self): # type: () -> str return self.request.text def form(self): # type: () -> Dict[str, str] return { key: value for key, value in iteritems(self.request.POST) if not getattr(value, "filename", None) } def files(self): # type: () -> Dict[str, cgi_FieldStorage] return { key: value for key, value in iteritems(self.request.POST) if getattr(value, "filename", None) } def size_of_file(self, postdata): # type: (cgi_FieldStorage) -> int file = postdata.file try: return os.fstat(file.fileno()).st_size except Exception: return 0 def _make_event_processor(weak_request, integration): # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] request = weak_request() if request is None: return event with capture_internal_exceptions(): PyramidRequestExtractor(request).extract_into_event(event) if _should_send_default_pii(): with capture_internal_exceptions(): user_info = event.setdefault("user", {}) user_info.setdefault("id", authenticated_userid(request)) return event return event_processor sentry-python-1.39.2/sentry_sdk/integrations/quart.py000066400000000000000000000165661454744723200231040ustar00rootroot00000000000000from __future__ import absolute_import import asyncio import inspect import threading from sentry_sdk.hub import _should_send_default_pii, Hub from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import Scope from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, ) from sentry_sdk._functools import wraps from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Union from sentry_sdk._types import EventProcessor try: import quart_auth # type: ignore except ImportError: quart_auth = None try: from quart import ( # type: ignore has_request_context, has_websocket_context, Request, Quart, request, websocket, ) from quart.signals import ( # type: ignore got_background_exception, got_request_exception, got_websocket_exception, request_started, websocket_started, ) except ImportError: raise DidNotEnable("Quart is not installed") else: # Quart 0.19 is based on Flask and hence no longer has a Scaffold try: from quart.scaffold import Scaffold # type: ignore except ImportError: from flask.sansio.scaffold import Scaffold # type: ignore TRANSACTION_STYLE_VALUES = ("endpoint", "url") class QuartIntegration(Integration): identifier = "quart" transaction_style = "" def __init__(self, transaction_style="endpoint"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None request_started.connect(_request_websocket_started) websocket_started.connect(_request_websocket_started) got_background_exception.connect(_capture_exception) got_request_exception.connect(_capture_exception) got_websocket_exception.connect(_capture_exception) patch_asgi_app() patch_scaffold_route() def patch_asgi_app(): # type: () -> None old_app = Quart.__call__ async def sentry_patched_asgi_app(self, scope, receive, send): # type: (Any, Any, Any, Any) -> Any if Hub.current.get_integration(QuartIntegration) is None: return await old_app(self, scope, receive, send) middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw)) middleware.__call__ = middleware._run_asgi3 return await middleware(scope, receive, send) Quart.__call__ = sentry_patched_asgi_app def patch_scaffold_route(): # type: () -> None old_route = Scaffold.route def _sentry_route(*args, **kwargs): # type: (*Any, **Any) -> Any old_decorator = old_route(*args, **kwargs) def decorator(old_func): # type: (Any) -> Any if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction( old_func ): @wraps(old_func) def _sentry_func(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(QuartIntegration) if integration is None: return old_func(*args, **kwargs) with hub.configure_scope() as sentry_scope: if sentry_scope.profile is not None: sentry_scope.profile.active_thread_id = ( threading.current_thread().ident ) return old_func(*args, **kwargs) return old_decorator(_sentry_func) return old_decorator(old_func) return decorator Scaffold.route = _sentry_route def _set_transaction_name_and_source(scope, transaction_style, request): # type: (Scope, str, Request) -> None try: name_for_style = { "url": request.url_rule.rule, "endpoint": request.url_rule.endpoint, } scope.set_transaction_name( name_for_style[transaction_style], source=SOURCE_FOR_STYLE[transaction_style], ) except Exception: pass async def _request_websocket_started(app, **kwargs): # type: (Quart, **Any) -> None hub = Hub.current integration = hub.get_integration(QuartIntegration) if integration is None: return with hub.configure_scope() as scope: if has_request_context(): request_websocket = request._get_current_object() if has_websocket_context(): request_websocket = websocket._get_current_object() # Set the transaction name here, but rely on ASGI middleware # to actually start the transaction _set_transaction_name_and_source( scope, integration.transaction_style, request_websocket ) evt_processor = _make_request_event_processor( app, request_websocket, integration ) scope.add_event_processor(evt_processor) def _make_request_event_processor(app, request, integration): # type: (Quart, Request, QuartIntegration) -> EventProcessor def inner(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. if request is None: return event with capture_internal_exceptions(): # TODO: Figure out what to do with request body. Methods on request # are async, but event processors are not. request_info = event.setdefault("request", {}) request_info["url"] = request.url request_info["query_string"] = request.query_string request_info["method"] = request.method request_info["headers"] = _filter_headers(dict(request.headers)) if _should_send_default_pii(): request_info["env"] = {"REMOTE_ADDR": request.access_route[0]} _add_user_to_event(event) return event return inner async def _capture_exception(sender, exception, **kwargs): # type: (Quart, Union[ValueError, BaseException], **Any) -> None hub = Hub.current if hub.get_integration(QuartIntegration) is None: return # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( exception, client_options=client.options, mechanism={"type": "quart", "handled": False}, ) hub.capture_event(event, hint=hint) def _add_user_to_event(event): # type: (Dict[str, Any]) -> None if quart_auth is None: return user = quart_auth.current_user if user is None: return with capture_internal_exceptions(): user_info = event.setdefault("user", {}) user_info["id"] = quart_auth.current_user._auth_id sentry-python-1.39.2/sentry_sdk/integrations/redis/000077500000000000000000000000001454744723200224665ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/integrations/redis/__init__.py000066400000000000000000000273361454744723200246120ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk._compat import text_type from sentry_sdk.hub import _should_send_default_pii from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, logger, ) if TYPE_CHECKING: from collections.abc import Callable from typing import Any, Dict, Sequence from redis import Redis, RedisCluster from redis.asyncio.cluster import ( RedisCluster as AsyncRedisCluster, ClusterPipeline as AsyncClusterPipeline, ) from sentry_sdk.tracing import Span _SINGLE_KEY_COMMANDS = frozenset( ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"], ) _MULTI_KEY_COMMANDS = frozenset( ["del", "touch", "unlink"], ) _COMMANDS_INCLUDING_SENSITIVE_DATA = [ "auth", ] _MAX_NUM_ARGS = 10 # Trim argument lists to this many values _MAX_NUM_COMMANDS = 10 # Trim command lists to this many values _DEFAULT_MAX_DATA_SIZE = 1024 def _get_safe_command(name, args): # type: (str, Sequence[Any]) -> str command_parts = [name] for i, arg in enumerate(args): if i > _MAX_NUM_ARGS: break name_low = name.lower() if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA: command_parts.append(SENSITIVE_DATA_SUBSTITUTE) continue arg_is_the_key = i == 0 if arg_is_the_key: command_parts.append(repr(arg)) else: if _should_send_default_pii(): command_parts.append(repr(arg)) else: command_parts.append(SENSITIVE_DATA_SUBSTITUTE) command = " ".join(command_parts) return command def _get_span_description(name, *args): # type: (str, *Any) -> str description = name with capture_internal_exceptions(): description = _get_safe_command(name, args) return description def _get_redis_command_args(command): # type: (Any) -> Sequence[Any] return command[0] def _parse_rediscluster_command(command): # type: (Any) -> Sequence[Any] return command.args def _set_pipeline_data( span, is_cluster, get_command_args_fn, is_transaction, command_stack ): # type: (Span, bool, Any, bool, Sequence[Any]) -> None span.set_tag("redis.is_cluster", is_cluster) span.set_tag("redis.transaction", is_transaction) commands = [] for i, arg in enumerate(command_stack): if i >= _MAX_NUM_COMMANDS: break command = get_command_args_fn(arg) commands.append(_get_safe_command(command[0], command[1:])) span.set_data( "redis.commands", { "count": len(command_stack), "first_ten": commands, }, ) def _set_client_data(span, is_cluster, name, *args): # type: (Span, bool, str, *Any) -> None span.set_tag("redis.is_cluster", is_cluster) if name: span.set_tag("redis.command", name) span.set_tag(SPANDATA.DB_OPERATION, name) if name and args: name_low = name.lower() if (name_low in _SINGLE_KEY_COMMANDS) or ( name_low in _MULTI_KEY_COMMANDS and len(args) == 1 ): span.set_tag("redis.key", args[0]) def _set_db_data_on_span(span, connection_params): # type: (Span, Dict[str, Any]) -> None span.set_data(SPANDATA.DB_SYSTEM, "redis") db = connection_params.get("db") if db is not None: span.set_data(SPANDATA.DB_NAME, text_type(db)) host = connection_params.get("host") if host is not None: span.set_data(SPANDATA.SERVER_ADDRESS, host) port = connection_params.get("port") if port is not None: span.set_data(SPANDATA.SERVER_PORT, port) def _set_db_data(span, redis_instance): # type: (Span, Redis[Any]) -> None try: _set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs) except AttributeError: pass # connections_kwargs may be missing in some cases def _set_cluster_db_data(span, redis_cluster_instance): # type: (Span, RedisCluster[Any]) -> None default_node = redis_cluster_instance.get_default_node() if default_node is not None: _set_db_data_on_span( span, {"host": default_node.host, "port": default_node.port} ) def _set_async_cluster_db_data(span, async_redis_cluster_instance): # type: (Span, AsyncRedisCluster[Any]) -> None default_node = async_redis_cluster_instance.get_default_node() if default_node is not None and default_node.connection_kwargs is not None: _set_db_data_on_span(span, default_node.connection_kwargs) def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance): # type: (Span, AsyncClusterPipeline[Any]) -> None with capture_internal_exceptions(): _set_async_cluster_db_data( span, # the AsyncClusterPipeline has always had a `_client` attr but it is private so potentially problematic and mypy # does not recognize it - see https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386 async_redis_cluster_pipeline_instance._client, # type: ignore[attr-defined] ) def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn): # type: (Any, bool, Any, Callable[[Span, Any], None]) -> None old_execute = pipeline_cls.execute def sentry_patched_execute(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any hub = Hub.current if hub.get_integration(RedisIntegration) is None: return old_execute(self, *args, **kwargs) with hub.start_span( op=OP.DB_REDIS, description="redis.pipeline.execute" ) as span: with capture_internal_exceptions(): set_db_data_fn(span, self) _set_pipeline_data( span, is_cluster, get_command_args_fn, False if is_cluster else self.transaction, self.command_stack, ) return old_execute(self, *args, **kwargs) pipeline_cls.execute = sentry_patched_execute def patch_redis_client(cls, is_cluster, set_db_data_fn): # type: (Any, bool, Callable[[Span, Any], None]) -> None """ This function can be used to instrument custom redis client classes or subclasses. """ old_execute_command = cls.execute_command def sentry_patched_execute_command(self, name, *args, **kwargs): # type: (Any, str, *Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(RedisIntegration) if integration is None: return old_execute_command(self, name, *args, **kwargs) description = _get_span_description(name, *args) data_should_be_truncated = ( integration.max_data_size and len(description) > integration.max_data_size ) if data_should_be_truncated: description = description[: integration.max_data_size - len("...")] + "..." with hub.start_span(op=OP.DB_REDIS, description=description) as span: set_db_data_fn(span, self) _set_client_data(span, is_cluster, name, *args) return old_execute_command(self, name, *args, **kwargs) cls.execute_command = sentry_patched_execute_command def _patch_redis(StrictRedis, client): # noqa: N803 # type: (Any, Any) -> None patch_redis_client(StrictRedis, is_cluster=False, set_db_data_fn=_set_db_data) patch_redis_pipeline(client.Pipeline, False, _get_redis_command_args, _set_db_data) try: strict_pipeline = client.StrictPipeline except AttributeError: pass else: patch_redis_pipeline( strict_pipeline, False, _get_redis_command_args, _set_db_data ) try: import redis.asyncio except ImportError: pass else: from sentry_sdk.integrations.redis.asyncio import ( patch_redis_async_client, patch_redis_async_pipeline, ) patch_redis_async_client( redis.asyncio.client.StrictRedis, is_cluster=False, set_db_data_fn=_set_db_data, ) patch_redis_async_pipeline( redis.asyncio.client.Pipeline, False, _get_redis_command_args, set_db_data_fn=_set_db_data, ) def _patch_redis_cluster(): # type: () -> None """Patches the cluster module on redis SDK (as opposed to rediscluster library)""" try: from redis import RedisCluster, cluster except ImportError: pass else: patch_redis_client(RedisCluster, True, _set_cluster_db_data) patch_redis_pipeline( cluster.ClusterPipeline, True, _parse_rediscluster_command, _set_cluster_db_data, ) try: from redis.asyncio import cluster as async_cluster except ImportError: pass else: from sentry_sdk.integrations.redis.asyncio import ( patch_redis_async_client, patch_redis_async_pipeline, ) patch_redis_async_client( async_cluster.RedisCluster, is_cluster=True, set_db_data_fn=_set_async_cluster_db_data, ) patch_redis_async_pipeline( async_cluster.ClusterPipeline, True, _parse_rediscluster_command, set_db_data_fn=_set_async_cluster_pipeline_db_data, ) def _patch_rb(): # type: () -> None try: import rb.clients # type: ignore except ImportError: pass else: patch_redis_client( rb.clients.FanoutClient, is_cluster=False, set_db_data_fn=_set_db_data ) patch_redis_client( rb.clients.MappingClient, is_cluster=False, set_db_data_fn=_set_db_data ) patch_redis_client( rb.clients.RoutingClient, is_cluster=False, set_db_data_fn=_set_db_data ) def _patch_rediscluster(): # type: () -> None try: import rediscluster # type: ignore except ImportError: return patch_redis_client( rediscluster.RedisCluster, is_cluster=True, set_db_data_fn=_set_db_data ) # up to v1.3.6, __version__ attribute is a tuple # from v2.0.0, __version__ is a string and VERSION a tuple version = getattr(rediscluster, "VERSION", rediscluster.__version__) # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0 # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst if (0, 2, 0) < version < (2, 0, 0): pipeline_cls = rediscluster.pipeline.StrictClusterPipeline patch_redis_client( rediscluster.StrictRedisCluster, is_cluster=True, set_db_data_fn=_set_db_data, ) else: pipeline_cls = rediscluster.pipeline.ClusterPipeline patch_redis_pipeline( pipeline_cls, True, _parse_rediscluster_command, set_db_data_fn=_set_db_data ) class RedisIntegration(Integration): identifier = "redis" def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE): # type: (int) -> None self.max_data_size = max_data_size @staticmethod def setup_once(): # type: () -> None try: from redis import StrictRedis, client except ImportError: raise DidNotEnable("Redis client not installed") _patch_redis(StrictRedis, client) _patch_redis_cluster() _patch_rb() try: _patch_rediscluster() except Exception: logger.exception("Error occurred while patching `rediscluster` library") sentry-python-1.39.2/sentry_sdk/integrations/redis/asyncio.py000066400000000000000000000051061454744723200245070ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk import Hub from sentry_sdk.consts import OP from sentry_sdk.integrations.redis import ( RedisIntegration, _get_span_description, _set_client_data, _set_pipeline_data, ) from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions if TYPE_CHECKING: from collections.abc import Callable from typing import Any, Union from redis.asyncio.client import Pipeline, StrictRedis from redis.asyncio.cluster import ClusterPipeline, RedisCluster def patch_redis_async_pipeline( pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn ): # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None old_execute = pipeline_cls.execute async def _sentry_execute(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any hub = Hub.current if hub.get_integration(RedisIntegration) is None: return await old_execute(self, *args, **kwargs) with hub.start_span( op=OP.DB_REDIS, description="redis.pipeline.execute" ) as span: with capture_internal_exceptions(): set_db_data_fn(span, self) _set_pipeline_data( span, is_cluster, get_command_args_fn, False if is_cluster else self.is_transaction, self._command_stack if is_cluster else self.command_stack, ) return await old_execute(self, *args, **kwargs) pipeline_cls.execute = _sentry_execute # type: ignore[method-assign] def patch_redis_async_client(cls, is_cluster, set_db_data_fn): # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None old_execute_command = cls.execute_command async def _sentry_execute_command(self, name, *args, **kwargs): # type: (Any, str, *Any, **Any) -> Any hub = Hub.current if hub.get_integration(RedisIntegration) is None: return await old_execute_command(self, name, *args, **kwargs) description = _get_span_description(name, *args) with hub.start_span(op=OP.DB_REDIS, description=description) as span: set_db_data_fn(span, self) _set_client_data(span, is_cluster, name, *args) return await old_execute_command(self, name, *args, **kwargs) cls.execute_command = _sentry_execute_command # type: ignore[method-assign] sentry-python-1.39.2/sentry_sdk/integrations/rq.py000066400000000000000000000127311454744723200223600ustar00rootroot00000000000000from __future__ import absolute_import import weakref from sentry_sdk.consts import OP from sentry_sdk.api import continue_trace from sentry_sdk.hub import Hub from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, format_timestamp, parse_version, ) try: from rq.queue import Queue from rq.timeouts import JobTimeoutException from rq.version import VERSION as RQ_VERSION from rq.worker import Worker from rq.job import JobStatus except ImportError: raise DidNotEnable("RQ not installed") from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Dict from sentry_sdk._types import EventProcessor from sentry_sdk.utils import ExcInfo from rq.job import Job class RqIntegration(Integration): identifier = "rq" @staticmethod def setup_once(): # type: () -> None version = parse_version(RQ_VERSION) if version is None: raise DidNotEnable("Unparsable RQ version: {}".format(RQ_VERSION)) if version < (0, 6): raise DidNotEnable("RQ 0.6 or newer is required.") old_perform_job = Worker.perform_job def sentry_patched_perform_job(self, job, *args, **kwargs): # type: (Any, Job, *Queue, **Any) -> bool hub = Hub.current integration = hub.get_integration(RqIntegration) if integration is None: return old_perform_job(self, job, *args, **kwargs) client = hub.client assert client is not None with hub.push_scope() as scope: scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(weakref.ref(job))) transaction = continue_trace( job.meta.get("_sentry_trace_headers") or {}, op=OP.QUEUE_TASK_RQ, name="unknown RQ task", source=TRANSACTION_SOURCE_TASK, ) with capture_internal_exceptions(): transaction.name = job.func_name with hub.start_transaction( transaction, custom_sampling_context={"rq_job": job} ): rv = old_perform_job(self, job, *args, **kwargs) if self.is_horse: # We're inside of a forked process and RQ is # about to call `os._exit`. Make sure that our # events get sent out. client.flush() return rv Worker.perform_job = sentry_patched_perform_job old_handle_exception = Worker.handle_exception def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): # type: (Worker, Any, *Any, **Any) -> Any # Note, the order of the `or` here is important, # because calling `job.is_failed` will change `_status`. if job._status == JobStatus.FAILED or job.is_failed: _capture_exception(exc_info) return old_handle_exception(self, job, *exc_info, **kwargs) Worker.handle_exception = sentry_patched_handle_exception old_enqueue_job = Queue.enqueue_job def sentry_patched_enqueue_job(self, job, **kwargs): # type: (Queue, Any, **Any) -> Any hub = Hub.current if hub.get_integration(RqIntegration) is not None: if hub.scope.span is not None: job.meta["_sentry_trace_headers"] = dict( hub.iter_trace_propagation_headers() ) return old_enqueue_job(self, job, **kwargs) Queue.enqueue_job = sentry_patched_enqueue_job ignore_logger("rq.worker") def _make_event_processor(weak_job): # type: (Callable[[], Job]) -> EventProcessor def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] job = weak_job() if job is not None: with capture_internal_exceptions(): extra = event.setdefault("extra", {}) extra["rq-job"] = { "job_id": job.id, "func": job.func_name, "args": job.args, "kwargs": job.kwargs, "description": job.description, } if job.enqueued_at: extra["rq-job"]["enqueued_at"] = format_timestamp(job.enqueued_at) if job.started_at: extra["rq-job"]["started_at"] = format_timestamp(job.started_at) if "exc_info" in hint: with capture_internal_exceptions(): if issubclass(hint["exc_info"][0], JobTimeoutException): event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name] return event return event_processor def _capture_exception(exc_info, **kwargs): # type: (ExcInfo, **Any) -> None hub = Hub.current if hub.get_integration(RqIntegration) is None: return # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "rq", "handled": False}, ) hub.capture_event(event, hint=hint) sentry-python-1.39.2/sentry_sdk/integrations/sanic.py000066400000000000000000000320041454744723200230260ustar00rootroot00000000000000import sys import weakref from inspect import isawaitable from sentry_sdk import continue_trace from sentry_sdk._compat import urlparse, reraise from sentry_sdk.consts import OP from sentry_sdk.hub import Hub from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, parse_version, ) from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Container from typing import Any from typing import Callable from typing import Optional from typing import Union from typing import Tuple from typing import Dict from sanic.request import Request, RequestParameters from sanic.response import BaseHTTPResponse from sentry_sdk._types import Event, EventProcessor, Hint from sanic.router import Route try: from sanic import Sanic, __version__ as SANIC_VERSION from sanic.exceptions import SanicException from sanic.router import Router from sanic.handlers import ErrorHandler except ImportError: raise DidNotEnable("Sanic not installed") old_error_handler_lookup = ErrorHandler.lookup old_handle_request = Sanic.handle_request old_router_get = Router.get try: # This method was introduced in Sanic v21.9 old_startup = Sanic._startup except AttributeError: pass class SanicIntegration(Integration): identifier = "sanic" version = None def __init__(self, unsampled_statuses=frozenset({404})): # type: (Optional[Container[int]]) -> None """ The unsampled_statuses parameter can be used to specify for which HTTP statuses the transactions should not be sent to Sentry. By default, transactions are sent for all HTTP statuses, except 404. Set unsampled_statuses to None to send transactions for all HTTP statuses, including 404. """ self._unsampled_statuses = unsampled_statuses or set() @staticmethod def setup_once(): # type: () -> None SanicIntegration.version = parse_version(SANIC_VERSION) if SanicIntegration.version is None: raise DidNotEnable("Unparsable Sanic version: {}".format(SANIC_VERSION)) if SanicIntegration.version < (0, 8): raise DidNotEnable("Sanic 0.8 or newer required.") if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. raise DidNotEnable( "The sanic integration for Sentry requires Python 3.7+ " " or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) if SANIC_VERSION.startswith("0.8."): # Sanic 0.8 and older creates a logger named "root" and puts a # stringified version of every exception in there (without exc_info), # which our error deduplication can't detect. # # We explicitly check the version here because it is a very # invasive step to ignore this logger and not necessary in newer # versions at all. # # https://github.com/huge-success/sanic/issues/1332 ignore_logger("root") if SanicIntegration.version < (21, 9): _setup_legacy_sanic() return _setup_sanic() class SanicRequestExtractor(RequestExtractor): def content_length(self): # type: () -> int if self.request.body is None: return 0 return len(self.request.body) def cookies(self): # type: () -> Dict[str, str] return dict(self.request.cookies) def raw_data(self): # type: () -> bytes return self.request.body def form(self): # type: () -> RequestParameters return self.request.form def is_json(self): # type: () -> bool raise NotImplementedError() def json(self): # type: () -> Optional[Any] return self.request.json def files(self): # type: () -> RequestParameters return self.request.files def size_of_file(self, file): # type: (Any) -> int return len(file.body or ()) def _setup_sanic(): # type: () -> None Sanic._startup = _startup ErrorHandler.lookup = _sentry_error_handler_lookup def _setup_legacy_sanic(): # type: () -> None Sanic.handle_request = _legacy_handle_request Router.get = _legacy_router_get ErrorHandler.lookup = _sentry_error_handler_lookup async def _startup(self): # type: (Sanic) -> None # This happens about as early in the lifecycle as possible, just after the # Request object is created. The body has not yet been consumed. self.signal("http.lifecycle.request")(_hub_enter) # This happens after the handler is complete. In v21.9 this signal is not # dispatched when there is an exception. Therefore we need to close out # and call _hub_exit from the custom exception handler as well. # See https://github.com/sanic-org/sanic/issues/2297 self.signal("http.lifecycle.response")(_hub_exit) # This happens inside of request handling immediately after the route # has been identified by the router. self.signal("http.routing.after")(_set_transaction) # The above signals need to be declared before this can be called. await old_startup(self) async def _hub_enter(request): # type: (Request) -> None hub = Hub.current request.ctx._sentry_do_integration = ( hub.get_integration(SanicIntegration) is not None ) if not request.ctx._sentry_do_integration: return weak_request = weakref.ref(request) request.ctx._sentry_hub = Hub(hub) request.ctx._sentry_hub.__enter__() with request.ctx._sentry_hub.configure_scope() as scope: scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) transaction = continue_trace( dict(request.headers), op=OP.HTTP_SERVER, # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction name=request.path, source=TRANSACTION_SOURCE_URL, ) request.ctx._sentry_transaction = request.ctx._sentry_hub.start_transaction( transaction ).__enter__() async def _hub_exit(request, response=None): # type: (Request, Optional[BaseHTTPResponse]) -> None with capture_internal_exceptions(): if not request.ctx._sentry_do_integration: return integration = Hub.current.get_integration(SanicIntegration) # type: Integration response_status = None if response is None else response.status # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception # happens while trying to end the transaction, we still attempt to exit the hub. with capture_internal_exceptions(): request.ctx._sentry_transaction.set_http_status(response_status) request.ctx._sentry_transaction.sampled &= ( isinstance(integration, SanicIntegration) and response_status not in integration._unsampled_statuses ) request.ctx._sentry_transaction.__exit__(None, None, None) request.ctx._sentry_hub.__exit__(None, None, None) async def _set_transaction(request, route, **_): # type: (Request, Route, **Any) -> None hub = Hub.current if request.ctx._sentry_do_integration: with capture_internal_exceptions(): with hub.configure_scope() as scope: route_name = route.name.replace(request.app.name, "").strip(".") scope.set_transaction_name( route_name, source=TRANSACTION_SOURCE_COMPONENT ) def _sentry_error_handler_lookup(self, exception, *args, **kwargs): # type: (Any, Exception, *Any, **Any) -> Optional[object] _capture_exception(exception) old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs) if old_error_handler is None: return None if Hub.current.get_integration(SanicIntegration) is None: return old_error_handler async def sentry_wrapped_error_handler(request, exception): # type: (Request, Exception) -> Any try: response = old_error_handler(request, exception) if isawaitable(response): response = await response return response except Exception: # Report errors that occur in Sanic error handler. These # exceptions will not even show up in Sanic's # `sanic.exceptions` logger. exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) finally: # As mentioned in previous comment in _startup, this can be removed # after https://github.com/sanic-org/sanic/issues/2297 is resolved if SanicIntegration.version and SanicIntegration.version == (21, 9): await _hub_exit(request) return sentry_wrapped_error_handler async def _legacy_handle_request(self, request, *args, **kwargs): # type: (Any, Request, *Any, **Any) -> Any hub = Hub.current if hub.get_integration(SanicIntegration) is None: return old_handle_request(self, request, *args, **kwargs) weak_request = weakref.ref(request) with Hub(hub) as hub: with hub.configure_scope() as scope: scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) response = old_handle_request(self, request, *args, **kwargs) if isawaitable(response): response = await response return response def _legacy_router_get(self, *args): # type: (Any, Union[Any, Request]) -> Any rv = old_router_get(self, *args) hub = Hub.current if hub.get_integration(SanicIntegration) is not None: with capture_internal_exceptions(): with hub.configure_scope() as scope: if SanicIntegration.version and SanicIntegration.version >= (21, 3): # Sanic versions above and including 21.3 append the app name to the # route name, and so we need to remove it from Route name so the # transaction name is consistent across all versions sanic_app_name = self.ctx.app.name sanic_route = rv[0].name if sanic_route.startswith("%s." % sanic_app_name): # We add a 1 to the len of the sanic_app_name because there is a dot # that joins app name and the route name # Format: app_name.route_name sanic_route = sanic_route[len(sanic_app_name) + 1 :] scope.set_transaction_name( sanic_route, source=TRANSACTION_SOURCE_COMPONENT ) else: scope.set_transaction_name( rv[0].__name__, source=TRANSACTION_SOURCE_COMPONENT ) return rv def _capture_exception(exception): # type: (Union[Tuple[Optional[type], Optional[BaseException], Any], BaseException]) -> None hub = Hub.current integration = hub.get_integration(SanicIntegration) if integration is None: return # If an integration is there, a client has to be there. client = hub.client # type: Any with capture_internal_exceptions(): event, hint = event_from_exception( exception, client_options=client.options, mechanism={"type": "sanic", "handled": False}, ) hub.capture_event(event, hint=hint) def _make_request_processor(weak_request): # type: (Callable[[], Request]) -> EventProcessor def sanic_processor(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] try: if hint and issubclass(hint["exc_info"][0], SanicException): return None except KeyError: pass request = weak_request() if request is None: return event with capture_internal_exceptions(): extractor = SanicRequestExtractor(request) extractor.extract_into_event(event) request_info = event["request"] urlparts = urlparse.urlsplit(request.url) request_info["url"] = "%s://%s%s" % ( urlparts.scheme, urlparts.netloc, urlparts.path, ) request_info["query_string"] = urlparts.query request_info["method"] = request.method request_info["env"] = {"REMOTE_ADDR": request.remote_addr} request_info["headers"] = _filter_headers(dict(request.headers)) return event return sanic_processor sentry-python-1.39.2/sentry_sdk/integrations/serverless.py000066400000000000000000000036671454744723200241430ustar00rootroot00000000000000import sys from sentry_sdk.hub import Hub from sentry_sdk.utils import event_from_exception from sentry_sdk._compat import reraise from sentry_sdk._functools import wraps from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import TypeVar from typing import Union from typing import Optional from typing import overload F = TypeVar("F", bound=Callable[..., Any]) else: def overload(x): # type: (F) -> F return x @overload def serverless_function(f, flush=True): # type: (F, bool) -> F pass @overload def serverless_function(f=None, flush=True): # noqa: F811 # type: (None, bool) -> Callable[[F], F] pass def serverless_function(f=None, flush=True): # noqa # type: (Optional[F], bool) -> Union[F, Callable[[F], F]] def wrapper(f): # type: (F) -> F @wraps(f) def inner(*args, **kwargs): # type: (*Any, **Any) -> Any with Hub(Hub.current) as hub: with hub.configure_scope() as scope: scope.clear_breadcrumbs() try: return f(*args, **kwargs) except Exception: _capture_and_reraise() finally: if flush: _flush_client() return inner # type: ignore if f is None: return wrapper else: return wrapper(f) def _capture_and_reraise(): # type: () -> None exc_info = sys.exc_info() hub = Hub.current if hub.client is not None: event, hint = event_from_exception( exc_info, client_options=hub.client.options, mechanism={"type": "serverless", "handled": False}, ) hub.capture_event(event, hint=hint) reraise(*exc_info) def _flush_client(): # type: () -> None return Hub.current.flush() sentry-python-1.39.2/sentry_sdk/integrations/socket.py000066400000000000000000000056011454744723200232240ustar00rootroot00000000000000from __future__ import absolute_import import socket from sentry_sdk import Hub from sentry_sdk._types import MYPY from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration if MYPY: from socket import AddressFamily, SocketKind from typing import Tuple, Optional, Union, List __all__ = ["SocketIntegration"] class SocketIntegration(Integration): identifier = "socket" @staticmethod def setup_once(): # type: () -> None """ patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver) """ _patch_create_connection() _patch_getaddrinfo() def _get_span_description(host, port): # type: (Union[bytes, str, None], Union[str, int, None]) -> str try: host = host.decode() # type: ignore except (UnicodeDecodeError, AttributeError): pass description = "%s:%s" % (host, port) # type: ignore return description def _patch_create_connection(): # type: () -> None real_create_connection = socket.create_connection def create_connection( address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, # type: ignore source_address=None, ): # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket hub = Hub.current if hub.get_integration(SocketIntegration) is None: return real_create_connection( address=address, timeout=timeout, source_address=source_address ) with hub.start_span( op=OP.SOCKET_CONNECTION, description=_get_span_description(address[0], address[1]), ) as span: span.set_data("address", address) span.set_data("timeout", timeout) span.set_data("source_address", source_address) return real_create_connection( address=address, timeout=timeout, source_address=source_address ) socket.create_connection = create_connection # type: ignore def _patch_getaddrinfo(): # type: () -> None real_getaddrinfo = socket.getaddrinfo def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): # type: (Union[bytes, str, None], Union[str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]] hub = Hub.current if hub.get_integration(SocketIntegration) is None: return real_getaddrinfo(host, port, family, type, proto, flags) with hub.start_span( op=OP.SOCKET_DNS, description=_get_span_description(host, port) ) as span: span.set_data("host", host) span.set_data("port", port) return real_getaddrinfo(host, port, family, type, proto, flags) socket.getaddrinfo = getaddrinfo # type: ignore sentry-python-1.39.2/sentry_sdk/integrations/spark/000077500000000000000000000000001454744723200225005ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/integrations/spark/__init__.py000066400000000000000000000003201454744723200246040ustar00rootroot00000000000000from sentry_sdk.integrations.spark.spark_driver import SparkIntegration from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration __all__ = ["SparkIntegration", "SparkWorkerIntegration"] sentry-python-1.39.2/sentry_sdk/integrations/spark/spark_driver.py000066400000000000000000000204431454744723200255500ustar00rootroot00000000000000from sentry_sdk import configure_scope from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration from sentry_sdk.utils import capture_internal_exceptions from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional from sentry_sdk._types import Event, Hint class SparkIntegration(Integration): identifier = "spark" @staticmethod def setup_once(): # type: () -> None patch_spark_context_init() def _set_app_properties(): # type: () -> None """ Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties. This allows worker integration to have access to app_name and application_id. """ from pyspark import SparkContext spark_context = SparkContext._active_spark_context if spark_context: spark_context.setLocalProperty("sentry_app_name", spark_context.appName) spark_context.setLocalProperty( "sentry_application_id", spark_context.applicationId ) def _start_sentry_listener(sc): # type: (Any) -> None """ Start java gateway server to add custom `SparkListener` """ from pyspark.java_gateway import ensure_callback_server_started gw = sc._gateway ensure_callback_server_started(gw) listener = SentryListener() sc._jsc.sc().addSparkListener(listener) def patch_spark_context_init(): # type: () -> None from pyspark import SparkContext spark_context_init = SparkContext._do_init def _sentry_patched_spark_context_init(self, *args, **kwargs): # type: (SparkContext, *Any, **Any) -> Optional[Any] init = spark_context_init(self, *args, **kwargs) if Hub.current.get_integration(SparkIntegration) is None: return init _start_sentry_listener(self) _set_app_properties() with configure_scope() as scope: @scope.add_event_processor def process_event(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): if Hub.current.get_integration(SparkIntegration) is None: return event event.setdefault("user", {}).setdefault("id", self.sparkUser()) event.setdefault("tags", {}).setdefault( "executor.id", self._conf.get("spark.executor.id") ) event["tags"].setdefault( "spark-submit.deployMode", self._conf.get("spark.submit.deployMode"), ) event["tags"].setdefault( "driver.host", self._conf.get("spark.driver.host") ) event["tags"].setdefault( "driver.port", self._conf.get("spark.driver.port") ) event["tags"].setdefault("spark_version", self.version) event["tags"].setdefault("app_name", self.appName) event["tags"].setdefault("application_id", self.applicationId) event["tags"].setdefault("master", self.master) event["tags"].setdefault("spark_home", self.sparkHome) event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl) return event return init SparkContext._do_init = _sentry_patched_spark_context_init class SparkListener(object): def onApplicationEnd(self, applicationEnd): # noqa: N802,N803 # type: (Any) -> None pass def onApplicationStart(self, applicationStart): # noqa: N802,N803 # type: (Any) -> None pass def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803 # type: (Any) -> None pass def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803 # type: (Any) -> None pass def onBlockUpdated(self, blockUpdated): # noqa: N802,N803 # type: (Any) -> None pass def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803 # type: (Any) -> None pass def onExecutorAdded(self, executorAdded): # noqa: N802,N803 # type: (Any) -> None pass def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803 # type: (Any) -> None pass def onExecutorBlacklistedForStage( # noqa: N802 self, executorBlacklistedForStage # noqa: N803 ): # type: (Any) -> None pass def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803 # type: (Any) -> None pass def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803 # type: (Any) -> None pass def onJobEnd(self, jobEnd): # noqa: N802,N803 # type: (Any) -> None pass def onJobStart(self, jobStart): # noqa: N802,N803 # type: (Any) -> None pass def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803 # type: (Any) -> None pass def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803 # type: (Any) -> None pass def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803 # type: (Any) -> None pass def onOtherEvent(self, event): # noqa: N802,N803 # type: (Any) -> None pass def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803 # type: (Any) -> None pass def onStageCompleted(self, stageCompleted): # noqa: N802,N803 # type: (Any) -> None pass def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 # type: (Any) -> None pass def onTaskEnd(self, taskEnd): # noqa: N802,N803 # type: (Any) -> None pass def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803 # type: (Any) -> None pass def onTaskStart(self, taskStart): # noqa: N802,N803 # type: (Any) -> None pass def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803 # type: (Any) -> None pass class Java: implements = ["org.apache.spark.scheduler.SparkListenerInterface"] class SentryListener(SparkListener): def __init__(self): # type: () -> None self.hub = Hub.current def onJobStart(self, jobStart): # noqa: N802,N803 # type: (Any) -> None message = "Job {} Started".format(jobStart.jobId()) self.hub.add_breadcrumb(level="info", message=message) _set_app_properties() def onJobEnd(self, jobEnd): # noqa: N802,N803 # type: (Any) -> None level = "" message = "" data = {"result": jobEnd.jobResult().toString()} if jobEnd.jobResult().toString() == "JobSucceeded": level = "info" message = "Job {} Ended".format(jobEnd.jobId()) else: level = "warning" message = "Job {} Failed".format(jobEnd.jobId()) self.hub.add_breadcrumb(level=level, message=message, data=data) def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 # type: (Any) -> None stage_info = stageSubmitted.stageInfo() message = "Stage {} Submitted".format(stage_info.stageId()) data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()} self.hub.add_breadcrumb(level="info", message=message, data=data) _set_app_properties() def onStageCompleted(self, stageCompleted): # noqa: N802,N803 # type: (Any) -> None from py4j.protocol import Py4JJavaError # type: ignore stage_info = stageCompleted.stageInfo() message = "" level = "" data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()} # Have to Try Except because stageInfo.failureReason() is typed with Scala Option try: data["reason"] = stage_info.failureReason().get() message = "Stage {} Failed".format(stage_info.stageId()) level = "warning" except Py4JJavaError: message = "Stage {} Completed".format(stage_info.stageId()) level = "info" self.hub.add_breadcrumb(level=level, message=message, data=data) sentry-python-1.39.2/sentry_sdk/integrations/spark/spark_worker.py000066400000000000000000000076361454744723200255770ustar00rootroot00000000000000from __future__ import absolute_import import sys from sentry_sdk import configure_scope from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration from sentry_sdk.utils import ( capture_internal_exceptions, exc_info_from_error, single_exception_from_error_tuple, walk_exception_chain, event_hint_with_exc_info, ) from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional from sentry_sdk._types import ExcInfo, Event, Hint class SparkWorkerIntegration(Integration): identifier = "spark_worker" @staticmethod def setup_once(): # type: () -> None import pyspark.daemon as original_daemon original_daemon.worker_main = _sentry_worker_main def _capture_exception(exc_info, hub): # type: (ExcInfo, Hub) -> None client = hub.client client_options = client.options # type: ignore mechanism = {"type": "spark", "handled": False} exc_info = exc_info_from_error(exc_info) exc_type, exc_value, tb = exc_info rv = [] # On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors for exc_type, exc_value, tb in walk_exception_chain(exc_info): if exc_type not in (SystemExit, EOFError, ConnectionResetError): rv.append( single_exception_from_error_tuple( exc_type, exc_value, tb, client_options, mechanism ) ) if rv: rv.reverse() hint = event_hint_with_exc_info(exc_info) event = {"level": "error", "exception": {"values": rv}} _tag_task_context() hub.capture_event(event, hint=hint) def _tag_task_context(): # type: () -> None from pyspark.taskcontext import TaskContext with configure_scope() as scope: @scope.add_event_processor def process_event(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): integration = Hub.current.get_integration(SparkWorkerIntegration) task_context = TaskContext.get() if integration is None or task_context is None: return event event.setdefault("tags", {}).setdefault( "stageId", str(task_context.stageId()) ) event["tags"].setdefault("partitionId", str(task_context.partitionId())) event["tags"].setdefault( "attemptNumber", str(task_context.attemptNumber()) ) event["tags"].setdefault( "taskAttemptId", str(task_context.taskAttemptId()) ) if task_context._localProperties: if "sentry_app_name" in task_context._localProperties: event["tags"].setdefault( "app_name", task_context._localProperties["sentry_app_name"] ) event["tags"].setdefault( "application_id", task_context._localProperties["sentry_application_id"], ) if "callSite.short" in task_context._localProperties: event.setdefault("extra", {}).setdefault( "callSite", task_context._localProperties["callSite.short"] ) return event def _sentry_worker_main(*args, **kwargs): # type: (*Optional[Any], **Optional[Any]) -> None import pyspark.worker as original_worker try: original_worker.main(*args, **kwargs) except SystemExit: if Hub.current.get_integration(SparkWorkerIntegration) is not None: hub = Hub.current exc_info = sys.exc_info() with capture_internal_exceptions(): _capture_exception(exc_info, hub) sentry-python-1.39.2/sentry_sdk/integrations/sqlalchemy.py000066400000000000000000000115421454744723200240770ustar00rootroot00000000000000from __future__ import absolute_import from sentry_sdk._compat import text_type from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import SPANDATA from sentry_sdk.db.explain_plan.sqlalchemy import attach_explain_plan_to_span from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import capture_internal_exceptions, parse_version try: from sqlalchemy.engine import Engine # type: ignore from sqlalchemy.event import listen # type: ignore from sqlalchemy import __version__ as SQLALCHEMY_VERSION # type: ignore except ImportError: raise DidNotEnable("SQLAlchemy not installed.") if TYPE_CHECKING: from typing import Any from typing import ContextManager from typing import Optional from sentry_sdk.tracing import Span class SqlalchemyIntegration(Integration): identifier = "sqlalchemy" @staticmethod def setup_once(): # type: () -> None version = parse_version(SQLALCHEMY_VERSION) if version is None: raise DidNotEnable( "Unparsable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION) ) if version < (1, 2): raise DidNotEnable("SQLAlchemy 1.2 or newer required.") listen(Engine, "before_cursor_execute", _before_cursor_execute) listen(Engine, "after_cursor_execute", _after_cursor_execute) listen(Engine, "handle_error", _handle_error) def _before_cursor_execute( conn, cursor, statement, parameters, context, executemany, *args ): # type: (Any, Any, Any, Any, Any, bool, *Any) -> None hub = Hub.current if hub.get_integration(SqlalchemyIntegration) is None: return ctx_mgr = record_sql_queries( hub, cursor, statement, parameters, paramstyle=context and context.dialect and context.dialect.paramstyle or None, executemany=executemany, ) context._sentry_sql_span_manager = ctx_mgr span = ctx_mgr.__enter__() if span is not None: _set_db_data(span, conn) if hub.client: options = hub.client.options["_experiments"].get("attach_explain_plans") if options is not None: attach_explain_plan_to_span( span, conn, statement, parameters, options, ) context._sentry_sql_span = span def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): # type: (Any, Any, Any, Any, Any, *Any) -> None hub = Hub.current if hub.get_integration(SqlalchemyIntegration) is None: return ctx_mgr = getattr( context, "_sentry_sql_span_manager", None ) # type: Optional[ContextManager[Any]] if ctx_mgr is not None: context._sentry_sql_span_manager = None ctx_mgr.__exit__(None, None, None) span = context._sentry_sql_span if span is not None: with capture_internal_exceptions(): add_query_source(hub, span) def _handle_error(context, *args): # type: (Any, *Any) -> None execution_context = context.execution_context if execution_context is None: return span = getattr(execution_context, "_sentry_sql_span", None) # type: Optional[Span] if span is not None: span.set_status("internal_error") # _after_cursor_execute does not get called for crashing SQL stmts. Judging # from SQLAlchemy codebase it does seem like any error coming into this # handler is going to be fatal. ctx_mgr = getattr( execution_context, "_sentry_sql_span_manager", None ) # type: Optional[ContextManager[Any]] if ctx_mgr is not None: execution_context._sentry_sql_span_manager = None ctx_mgr.__exit__(None, None, None) # See: https://docs.sqlalchemy.org/en/20/dialects/index.html def _get_db_system(name): # type: (str) -> Optional[str] name = text_type(name) if "sqlite" in name: return "sqlite" if "postgres" in name: return "postgresql" if "mariadb" in name: return "mariadb" if "mysql" in name: return "mysql" if "oracle" in name: return "oracle" return None def _set_db_data(span, conn): # type: (Span, Any) -> None db_system = _get_db_system(conn.engine.name) if db_system is not None: span.set_data(SPANDATA.DB_SYSTEM, db_system) db_name = conn.engine.url.database if db_name is not None: span.set_data(SPANDATA.DB_NAME, db_name) server_address = conn.engine.url.host if server_address is not None: span.set_data(SPANDATA.SERVER_ADDRESS, server_address) server_port = conn.engine.url.port if server_port is not None: span.set_data(SPANDATA.SERVER_PORT, server_port) sentry-python-1.39.2/sentry_sdk/integrations/starlette.py000066400000000000000000000574071454744723200237560ustar00rootroot00000000000000from __future__ import absolute_import import asyncio import functools from copy import deepcopy from sentry_sdk._compat import iteritems from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import ( _is_json_content_type, request_body_within_bounds, ) from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.tracing import ( SOURCE_FOR_STYLE, TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_ROUTE, ) from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, event_from_exception, logger, parse_version, transaction_from_function, ) if TYPE_CHECKING: from typing import Any, Awaitable, Callable, Dict, Optional, Tuple from sentry_sdk.scope import Scope as SentryScope try: import starlette # type: ignore from starlette import __version__ as STARLETTE_VERSION from starlette.applications import Starlette # type: ignore from starlette.datastructures import UploadFile # type: ignore from starlette.middleware import Middleware # type: ignore from starlette.middleware.authentication import ( # type: ignore AuthenticationMiddleware, ) from starlette.requests import Request # type: ignore from starlette.routing import Match # type: ignore from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send # type: ignore except ImportError: raise DidNotEnable("Starlette is not installed") try: # Starlette 0.20 from starlette.middleware.exceptions import ExceptionMiddleware # type: ignore except ImportError: # Startlette 0.19.1 from starlette.exceptions import ExceptionMiddleware # type: ignore try: # Optional dependency of Starlette to parse form data. import multipart # type: ignore except ImportError: multipart = None _DEFAULT_TRANSACTION_NAME = "generic Starlette request" TRANSACTION_STYLE_VALUES = ("endpoint", "url") class StarletteIntegration(Integration): identifier = "starlette" transaction_style = "" def __init__(self, transaction_style="url"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None version = parse_version(STARLETTE_VERSION) if version is None: raise DidNotEnable( "Unparsable Starlette version: {}".format(STARLETTE_VERSION) ) patch_middlewares() patch_asgi_app() patch_request_response() if version >= (0, 24): patch_templates() def _enable_span_for_middleware(middleware_class): # type: (Any) -> type old_call = middleware_class.__call__ async def _create_span_call(app, scope, receive, send, **kwargs): # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None hub = Hub.current integration = hub.get_integration(StarletteIntegration) if integration is not None: middleware_name = app.__class__.__name__ # Update transaction name with middleware name with hub.configure_scope() as sentry_scope: name, source = _get_transaction_from_middleware(app, scope, integration) if name is not None: sentry_scope.set_transaction_name( name, source=source, ) with hub.start_span( op=OP.MIDDLEWARE_STARLETTE, description=middleware_name ) as middleware_span: middleware_span.set_tag("starlette.middleware_name", middleware_name) # Creating spans for the "receive" callback async def _sentry_receive(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current with hub.start_span( op=OP.MIDDLEWARE_STARLETTE_RECEIVE, description=getattr(receive, "__qualname__", str(receive)), ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await receive(*args, **kwargs) receive_name = getattr(receive, "__name__", str(receive)) receive_patched = receive_name == "_sentry_receive" new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback async def _sentry_send(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current with hub.start_span( op=OP.MIDDLEWARE_STARLETTE_SEND, description=getattr(send, "__qualname__", str(send)), ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await send(*args, **kwargs) send_name = getattr(send, "__name__", str(send)) send_patched = send_name == "_sentry_send" new_send = _sentry_send if not send_patched else send return await old_call(app, scope, new_receive, new_send, **kwargs) else: return await old_call(app, scope, receive, send, **kwargs) not_yet_patched = old_call.__name__ not in [ "_create_span_call", "_sentry_authenticationmiddleware_call", "_sentry_exceptionmiddleware_call", ] if not_yet_patched: middleware_class.__call__ = _create_span_call return middleware_class def _capture_exception(exception, handled=False): # type: (BaseException, **Any) -> None hub = Hub.current if hub.get_integration(StarletteIntegration) is None: return event, hint = event_from_exception( exception, client_options=hub.client.options if hub.client else None, mechanism={"type": StarletteIntegration.identifier, "handled": handled}, ) hub.capture_event(event, hint=hint) def patch_exception_middleware(middleware_class): # type: (Any) -> None """ Capture all exceptions in Starlette app and also extract user information. """ old_middleware_init = middleware_class.__init__ not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init) if not_yet_patched: def _sentry_middleware_init(self, *args, **kwargs): # type: (Any, Any, Any) -> None old_middleware_init(self, *args, **kwargs) # Patch existing exception handlers old_handlers = self._exception_handlers.copy() async def _sentry_patched_exception_handler(self, *args, **kwargs): # type: (Any, Any, Any) -> None exp = args[0] is_http_server_error = ( hasattr(exp, "status_code") and isinstance(exp.status_code, int) and exp.status_code >= 500 ) if is_http_server_error: _capture_exception(exp, handled=True) # Find a matching handler old_handler = None for cls in type(exp).__mro__: if cls in old_handlers: old_handler = old_handlers[cls] break if old_handler is None: return if _is_async_callable(old_handler): return await old_handler(self, *args, **kwargs) else: return old_handler(self, *args, **kwargs) for key in self._exception_handlers.keys(): self._exception_handlers[key] = _sentry_patched_exception_handler middleware_class.__init__ = _sentry_middleware_init old_call = middleware_class.__call__ async def _sentry_exceptionmiddleware_call(self, scope, receive, send): # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None # Also add the user (that was eventually set by be Authentication middle # that was called before this middleware). This is done because the authentication # middleware sets the user in the scope and then (in the same function) # calls this exception middelware. In case there is no exception (or no handler # for the type of exception occuring) then the exception bubbles up and setting the # user information into the sentry scope is done in auth middleware and the # ASGI middleware will then send everything to Sentry and this is fine. # But if there is an exception happening that the exception middleware here # has a handler for, it will send the exception directly to Sentry, so we need # the user information right now. # This is why we do it here. _add_user_to_sentry_scope(scope) await old_call(self, scope, receive, send) middleware_class.__call__ = _sentry_exceptionmiddleware_call def _add_user_to_sentry_scope(scope): # type: (Dict[str, Any]) -> None """ Extracts user information from the ASGI scope and adds it to Sentry's scope. """ if "user" not in scope: return if not _should_send_default_pii(): return hub = Hub.current if hub.get_integration(StarletteIntegration) is None: return with hub.configure_scope() as sentry_scope: user_info = {} # type: Dict[str, Any] starlette_user = scope["user"] username = getattr(starlette_user, "username", None) if username: user_info.setdefault("username", starlette_user.username) user_id = getattr(starlette_user, "id", None) if user_id: user_info.setdefault("id", starlette_user.id) email = getattr(starlette_user, "email", None) if email: user_info.setdefault("email", starlette_user.email) sentry_scope.user = user_info def patch_authentication_middleware(middleware_class): # type: (Any) -> None """ Add user information to Sentry scope. """ old_call = middleware_class.__call__ not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call) if not_yet_patched: async def _sentry_authenticationmiddleware_call(self, scope, receive, send): # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None await old_call(self, scope, receive, send) _add_user_to_sentry_scope(scope) middleware_class.__call__ = _sentry_authenticationmiddleware_call def patch_middlewares(): # type: () -> None """ Patches Starlettes `Middleware` class to record spans for every middleware invoked. """ old_middleware_init = Middleware.__init__ not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init) if not_yet_patched: def _sentry_middleware_init(self, cls, **options): # type: (Any, Any, Any) -> None if cls == SentryAsgiMiddleware: return old_middleware_init(self, cls, **options) span_enabled_cls = _enable_span_for_middleware(cls) old_middleware_init(self, span_enabled_cls, **options) if cls == AuthenticationMiddleware: patch_authentication_middleware(cls) if cls == ExceptionMiddleware: patch_exception_middleware(cls) Middleware.__init__ = _sentry_middleware_init def patch_asgi_app(): # type: () -> None """ Instrument Starlette ASGI app using the SentryAsgiMiddleware. """ old_app = Starlette.__call__ async def _sentry_patched_asgi_app(self, scope, receive, send): # type: (Starlette, StarletteScope, Receive, Send) -> None integration = Hub.current.get_integration(StarletteIntegration) if integration is None: return await old_app(self, scope, receive, send) middleware = SentryAsgiMiddleware( lambda *a, **kw: old_app(self, *a, **kw), mechanism_type=StarletteIntegration.identifier, transaction_style=integration.transaction_style, ) middleware.__call__ = middleware._run_asgi3 return await middleware(scope, receive, send) Starlette.__call__ = _sentry_patched_asgi_app # This was vendored in from Starlette to support Starlette 0.19.1 because # this function was only introduced in 0.20.x def _is_async_callable(obj): # type: (Any) -> bool while isinstance(obj, functools.partial): obj = obj.func return asyncio.iscoroutinefunction(obj) or ( callable(obj) and asyncio.iscoroutinefunction(obj.__call__) ) def patch_request_response(): # type: () -> None old_request_response = starlette.routing.request_response def _sentry_request_response(func): # type: (Callable[[Any], Any]) -> ASGIApp old_func = func is_coroutine = _is_async_callable(old_func) if is_coroutine: async def _sentry_async_func(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(StarletteIntegration) if integration is None: return await old_func(*args, **kwargs) with hub.configure_scope() as sentry_scope: request = args[0] _set_transaction_name_and_source( sentry_scope, integration.transaction_style, request ) extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() def _make_request_event_processor(req, integration): # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] # Add info from request to event request_info = event.get("request", {}) if info: if "cookies" in info: request_info["cookies"] = info["cookies"] if "data" in info: request_info["data"] = info["data"] event["request"] = deepcopy(request_info) return event return event_processor sentry_scope._name = StarletteIntegration.identifier sentry_scope.add_event_processor( _make_request_event_processor(request, integration) ) return await old_func(*args, **kwargs) func = _sentry_async_func else: def _sentry_sync_func(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(StarletteIntegration) if integration is None: return old_func(*args, **kwargs) with hub.configure_scope() as sentry_scope: if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() request = args[0] _set_transaction_name_and_source( sentry_scope, integration.transaction_style, request ) extractor = StarletteRequestExtractor(request) cookies = extractor.extract_cookies_from_request() def _make_request_event_processor(req, integration): # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] # Extract information from request request_info = event.get("request", {}) if cookies: request_info["cookies"] = cookies event["request"] = deepcopy(request_info) return event return event_processor sentry_scope._name = StarletteIntegration.identifier sentry_scope.add_event_processor( _make_request_event_processor(request, integration) ) return old_func(*args, **kwargs) func = _sentry_sync_func return old_request_response(func) starlette.routing.request_response = _sentry_request_response def patch_templates(): # type: () -> None # If markupsafe is not installed, then Jinja2 is not installed # (markupsafe is a dependency of Jinja2) # In this case we do not need to patch the Jinja2Templates class try: from markupsafe import Markup except ImportError: return # Nothing to do from starlette.templating import Jinja2Templates # type: ignore old_jinja2templates_init = Jinja2Templates.__init__ not_yet_patched = "_sentry_jinja2templates_init" not in str( old_jinja2templates_init ) if not_yet_patched: def _sentry_jinja2templates_init(self, *args, **kwargs): # type: (Jinja2Templates, *Any, **Any) -> None def add_sentry_trace_meta(request): # type: (Request) -> Dict[str, Any] hub = Hub.current trace_meta = Markup(hub.trace_propagation_meta()) return { "sentry_trace_meta": trace_meta, } kwargs.setdefault("context_processors", []) if add_sentry_trace_meta not in kwargs["context_processors"]: kwargs["context_processors"].append(add_sentry_trace_meta) return old_jinja2templates_init(self, *args, **kwargs) Jinja2Templates.__init__ = _sentry_jinja2templates_init class StarletteRequestExtractor: """ Extracts useful information from the Starlette request (like form data or cookies) and adds it to the Sentry event. """ request = None # type: Request def __init__(self, request): # type: (StarletteRequestExtractor, Request) -> None self.request = request def extract_cookies_from_request(self): # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] client = Hub.current.client if client is None: return None cookies = None # type: Optional[Dict[str, Any]] if _should_send_default_pii(): cookies = self.cookies() return cookies async def extract_request_info(self): # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] client = Hub.current.client if client is None: return None request_info = {} # type: Dict[str, Any] with capture_internal_exceptions(): # Add cookies if _should_send_default_pii(): request_info["cookies"] = self.cookies() # If there is no body, just return the cookies content_length = await self.content_length() if not content_length: return request_info # Add annotation if body is too big if content_length and not request_body_within_bounds( client, content_length ): request_info["data"] = AnnotatedValue.removed_because_over_size_limit() return request_info # Add JSON body, if it is a JSON request json = await self.json() if json: request_info["data"] = json return request_info # Add form as key/value pairs, if request has form data form = await self.form() if form: form_data = {} for key, val in iteritems(form): is_file = isinstance(val, UploadFile) form_data[key] = ( val if not is_file else AnnotatedValue.removed_because_raw_data() ) request_info["data"] = form_data return request_info # Raw data, do not add body just an annotation request_info["data"] = AnnotatedValue.removed_because_raw_data() return request_info async def content_length(self): # type: (StarletteRequestExtractor) -> Optional[int] if "content-length" in self.request.headers: return int(self.request.headers["content-length"]) return None def cookies(self): # type: (StarletteRequestExtractor) -> Dict[str, Any] return self.request.cookies async def form(self): # type: (StarletteRequestExtractor) -> Any if multipart is None: return None # Parse the body first to get it cached, as Starlette does not cache form() as it # does with body() and json() https://github.com/encode/starlette/discussions/1933 # Calling `.form()` without calling `.body()` first will # potentially break the users project. await self.request.body() return await self.request.form() def is_json(self): # type: (StarletteRequestExtractor) -> bool return _is_json_content_type(self.request.headers.get("content-type")) async def json(self): # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] if not self.is_json(): return None return await self.request.json() def _transaction_name_from_router(scope): # type: (StarletteScope) -> Optional[str] router = scope.get("router") if not router: return None for route in router.routes: match = route.matches(scope) if match[0] == Match.FULL: return route.path return None def _set_transaction_name_and_source(scope, transaction_style, request): # type: (SentryScope, str, Any) -> None name = None source = SOURCE_FOR_STYLE[transaction_style] if transaction_style == "endpoint": endpoint = request.scope.get("endpoint") if endpoint: name = transaction_from_function(endpoint) or None elif transaction_style == "url": name = _transaction_name_from_router(request.scope) if name is None: name = _DEFAULT_TRANSACTION_NAME source = TRANSACTION_SOURCE_ROUTE scope.set_transaction_name(name, source=source) logger.debug( "[Starlette] Set transaction name and source on scope: %s / %s", name, source ) def _get_transaction_from_middleware(app, asgi_scope, integration): # type: (Any, Dict[str, Any], StarletteIntegration) -> Tuple[Optional[str], Optional[str]] name = None source = None if integration.transaction_style == "endpoint": name = transaction_from_function(app.__class__) source = TRANSACTION_SOURCE_COMPONENT elif integration.transaction_style == "url": name = _transaction_name_from_router(asgi_scope) source = TRANSACTION_SOURCE_ROUTE return name, source sentry-python-1.39.2/sentry_sdk/integrations/starlite.py000066400000000000000000000235711454744723200235710ustar00rootroot00000000000000from typing import TYPE_CHECKING from pydantic import BaseModel # type: ignore from sentry_sdk.consts import OP from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import event_from_exception, transaction_from_function try: from starlite import Request, Starlite, State # type: ignore from starlite.handlers.base import BaseRouteHandler # type: ignore from starlite.middleware import DefineMiddleware # type: ignore from starlite.plugins.base import get_plugin_for_value # type: ignore from starlite.routes.http import HTTPRoute # type: ignore from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref # type: ignore if TYPE_CHECKING: from typing import Any, Dict, List, Optional, Union from starlite.types import ( # type: ignore ASGIApp, HTTPReceiveMessage, HTTPScope, Message, Middleware, Receive, Scope, Send, WebSocketReceiveMessage, ) from starlite import MiddlewareProtocol from sentry_sdk._types import Event except ImportError: raise DidNotEnable("Starlite is not installed") _DEFAULT_TRANSACTION_NAME = "generic Starlite request" class SentryStarliteASGIMiddleware(SentryAsgiMiddleware): def __init__(self, app: "ASGIApp"): super().__init__( app=app, unsafe_context_data=False, transaction_style="endpoint", mechanism_type="asgi", ) class StarliteIntegration(Integration): identifier = "starlite" @staticmethod def setup_once() -> None: patch_app_init() patch_middlewares() patch_http_route_handle() def patch_app_init() -> None: """ Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the `SentryStarliteASGIMiddleware` as the outmost middleware in the stack. See: - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/ """ old__init__ = Starlite.__init__ def injection_wrapper(self: "Starlite", *args: "Any", **kwargs: "Any") -> None: after_exception = kwargs.pop("after_exception", []) kwargs.update( after_exception=[ exception_handler, *( after_exception if isinstance(after_exception, list) else [after_exception] ), ] ) SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3 # type: ignore middleware = kwargs.pop("middleware", None) or [] kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware] old__init__(self, *args, **kwargs) Starlite.__init__ = injection_wrapper def patch_middlewares() -> None: old__resolve_middleware_stack = BaseRouteHandler.resolve_middleware def resolve_middleware_wrapper(self: "Any") -> "List[Middleware]": return [ enable_span_for_middleware(middleware) for middleware in old__resolve_middleware_stack(self) ] BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper def enable_span_for_middleware(middleware: "Middleware") -> "Middleware": if ( not hasattr(middleware, "__call__") # noqa: B004 or middleware is SentryStarliteASGIMiddleware ): return middleware if isinstance(middleware, DefineMiddleware): old_call: "ASGIApp" = middleware.middleware.__call__ else: old_call = middleware.__call__ async def _create_span_call( self: "MiddlewareProtocol", scope: "Scope", receive: "Receive", send: "Send" ) -> None: hub = Hub.current integration = hub.get_integration(StarliteIntegration) if integration is not None: middleware_name = self.__class__.__name__ with hub.start_span( op=OP.MIDDLEWARE_STARLITE, description=middleware_name ) as middleware_span: middleware_span.set_tag("starlite.middleware_name", middleware_name) # Creating spans for the "receive" callback async def _sentry_receive( *args: "Any", **kwargs: "Any" ) -> "Union[HTTPReceiveMessage, WebSocketReceiveMessage]": hub = Hub.current with hub.start_span( op=OP.MIDDLEWARE_STARLITE_RECEIVE, description=getattr(receive, "__qualname__", str(receive)), ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await receive(*args, **kwargs) receive_name = getattr(receive, "__name__", str(receive)) receive_patched = receive_name == "_sentry_receive" new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback async def _sentry_send(message: "Message") -> None: hub = Hub.current with hub.start_span( op=OP.MIDDLEWARE_STARLITE_SEND, description=getattr(send, "__qualname__", str(send)), ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await send(message) send_name = getattr(send, "__name__", str(send)) send_patched = send_name == "_sentry_send" new_send = _sentry_send if not send_patched else send return await old_call(self, scope, new_receive, new_send) else: return await old_call(self, scope, receive, send) not_yet_patched = old_call.__name__ not in ["_create_span_call"] if not_yet_patched: if isinstance(middleware, DefineMiddleware): middleware.middleware.__call__ = _create_span_call else: middleware.__call__ = _create_span_call return middleware def patch_http_route_handle() -> None: old_handle = HTTPRoute.handle async def handle_wrapper( self: "HTTPRoute", scope: "HTTPScope", receive: "Receive", send: "Send" ) -> None: hub = Hub.current integration: StarliteIntegration = hub.get_integration(StarliteIntegration) if integration is None: return await old_handle(self, scope, receive, send) with hub.configure_scope() as sentry_scope: request: "Request[Any, Any]" = scope["app"].request_class( scope=scope, receive=receive, send=send ) extracted_request_data = ConnectionDataExtractor( parse_body=True, parse_query=True )(request) body = extracted_request_data.pop("body") request_data = await body def event_processor(event: "Event", _: "Dict[str, Any]") -> "Event": route_handler = scope.get("route_handler") request_info = event.get("request", {}) request_info["content_length"] = len(scope.get("_body", b"")) if _should_send_default_pii(): request_info["cookies"] = extracted_request_data["cookies"] if request_data is not None: request_info["data"] = request_data func = None if route_handler.name is not None: tx_name = route_handler.name elif isinstance(route_handler.fn, Ref): func = route_handler.fn.value else: func = route_handler.fn if func is not None: tx_name = transaction_from_function(func) tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]} if not tx_name: tx_name = _DEFAULT_TRANSACTION_NAME tx_info = {"source": TRANSACTION_SOURCE_ROUTE} event.update( request=request_info, transaction=tx_name, transaction_info=tx_info ) return event sentry_scope._name = StarliteIntegration.identifier sentry_scope.add_event_processor(event_processor) return await old_handle(self, scope, receive, send) HTTPRoute.handle = handle_wrapper def retrieve_user_from_scope(scope: "Scope") -> "Optional[Dict[str, Any]]": scope_user = scope.get("user", {}) if not scope_user: return None if isinstance(scope_user, dict): return scope_user if isinstance(scope_user, BaseModel): return scope_user.dict() if hasattr(scope_user, "asdict"): # dataclasses return scope_user.asdict() plugin = get_plugin_for_value(scope_user) if plugin and not is_async_callable(plugin.to_dict): return plugin.to_dict(scope_user) return None def exception_handler(exc: Exception, scope: "Scope", _: "State") -> None: hub = Hub.current if hub.get_integration(StarliteIntegration) is None: return user_info: "Optional[Dict[str, Any]]" = None if _should_send_default_pii(): user_info = retrieve_user_from_scope(scope) if user_info and isinstance(user_info, dict): with hub.configure_scope() as sentry_scope: sentry_scope.set_user(user_info) event, hint = event_from_exception( exc, client_options=hub.client.options if hub.client else None, mechanism={"type": StarliteIntegration.identifier, "handled": False}, ) hub.capture_event(event, hint=hint) sentry-python-1.39.2/sentry_sdk/integrations/stdlib.py000066400000000000000000000201531454744723200232140ustar00rootroot00000000000000import os import subprocess import sys import platform from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from sentry_sdk.tracing_utils import EnvironHeaders, should_propagate_trace from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, is_sentry_url, logger, safe_repr, parse_url, ) from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from typing import List from sentry_sdk._types import Event, Hint try: from httplib import HTTPConnection # type: ignore except ImportError: from http.client import HTTPConnection _RUNTIME_CONTEXT = { "name": platform.python_implementation(), "version": "%s.%s.%s" % (sys.version_info[:3]), "build": sys.version, } class StdlibIntegration(Integration): identifier = "stdlib" @staticmethod def setup_once(): # type: () -> None _install_httplib() _install_subprocess() @add_global_event_processor def add_python_runtime_context(event, hint): # type: (Event, Hint) -> Optional[Event] if Hub.current.get_integration(StdlibIntegration) is not None: contexts = event.setdefault("contexts", {}) if isinstance(contexts, dict) and "runtime" not in contexts: contexts["runtime"] = _RUNTIME_CONTEXT return event def _install_httplib(): # type: () -> None real_putrequest = HTTPConnection.putrequest real_getresponse = HTTPConnection.getresponse def putrequest(self, method, url, *args, **kwargs): # type: (HTTPConnection, str, str, *Any, **Any) -> Any hub = Hub.current host = self.host port = self.port default_port = self.default_port if hub.get_integration(StdlibIntegration) is None or is_sentry_url(hub, host): return real_putrequest(self, method, url, *args, **kwargs) real_url = url if real_url is None or not real_url.startswith(("http://", "https://")): real_url = "%s://%s%s%s" % ( default_port == 443 and "https" or "http", host, port != default_port and ":%s" % port or "", url, ) parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(real_url, sanitize=False) span = hub.start_span( op=OP.HTTP_CLIENT, description="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), ) span.set_data(SPANDATA.HTTP_METHOD, method) if parsed_url is not None: span.set_data("url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) rv = real_putrequest(self, method, url, *args, **kwargs) if should_propagate_trace(hub, real_url): for key, value in hub.iter_trace_propagation_headers(span): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format( key=key, value=value, real_url=real_url ) ) self.putheader(key, value) self._sentrysdk_span = span return rv def getresponse(self, *args, **kwargs): # type: (HTTPConnection, *Any, **Any) -> Any span = getattr(self, "_sentrysdk_span", None) if span is None: return real_getresponse(self, *args, **kwargs) rv = real_getresponse(self, *args, **kwargs) span.set_http_status(int(rv.status)) span.set_data("reason", rv.reason) span.finish() return rv HTTPConnection.putrequest = putrequest HTTPConnection.getresponse = getresponse def _init_argument(args, kwargs, name, position, setdefault_callback=None): # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any """ given (*args, **kwargs) of a function call, retrieve (and optionally set a default for) an argument by either name or position. This is useful for wrapping functions with complex type signatures and extracting a few arguments without needing to redefine that function's entire type signature. """ if name in kwargs: rv = kwargs[name] if setdefault_callback is not None: rv = setdefault_callback(rv) if rv is not None: kwargs[name] = rv elif position < len(args): rv = args[position] if setdefault_callback is not None: rv = setdefault_callback(rv) if rv is not None: args[position] = rv else: rv = setdefault_callback and setdefault_callback(None) if rv is not None: kwargs[name] = rv return rv def _install_subprocess(): # type: () -> None old_popen_init = subprocess.Popen.__init__ def sentry_patched_popen_init(self, *a, **kw): # type: (subprocess.Popen[Any], *Any, **Any) -> None hub = Hub.current if hub.get_integration(StdlibIntegration) is None: return old_popen_init(self, *a, **kw) # Convert from tuple to list to be able to set values. a = list(a) args = _init_argument(a, kw, "args", 0) or [] cwd = _init_argument(a, kw, "cwd", 9) # if args is not a list or tuple (and e.g. some iterator instead), # let's not use it at all. There are too many things that can go wrong # when trying to collect an iterator into a list and setting that list # into `a` again. # # Also invocations where `args` is not a sequence are not actually # legal. They just happen to work under CPython. description = None if isinstance(args, (list, tuple)) and len(args) < 100: with capture_internal_exceptions(): description = " ".join(map(str, args)) if description is None: description = safe_repr(args) env = None with hub.start_span(op=OP.SUBPROCESS, description=description) as span: for k, v in hub.iter_trace_propagation_headers(span): if env is None: env = _init_argument( a, kw, "env", 10, lambda x: dict(x or os.environ) ) env["SUBPROCESS_" + k.upper().replace("-", "_")] = v if cwd: span.set_data("subprocess.cwd", cwd) rv = old_popen_init(self, *a, **kw) span.set_tag("subprocess.pid", self.pid) return rv subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore old_popen_wait = subprocess.Popen.wait def sentry_patched_popen_wait(self, *a, **kw): # type: (subprocess.Popen[Any], *Any, **Any) -> Any hub = Hub.current if hub.get_integration(StdlibIntegration) is None: return old_popen_wait(self, *a, **kw) with hub.start_span(op=OP.SUBPROCESS_WAIT) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_wait(self, *a, **kw) subprocess.Popen.wait = sentry_patched_popen_wait # type: ignore old_popen_communicate = subprocess.Popen.communicate def sentry_patched_popen_communicate(self, *a, **kw): # type: (subprocess.Popen[Any], *Any, **Any) -> Any hub = Hub.current if hub.get_integration(StdlibIntegration) is None: return old_popen_communicate(self, *a, **kw) with hub.start_span(op=OP.SUBPROCESS_COMMUNICATE) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_communicate(self, *a, **kw) subprocess.Popen.communicate = sentry_patched_popen_communicate # type: ignore def get_subprocess_traceparent_headers(): # type: () -> EnvironHeaders return EnvironHeaders(os.environ, prefix="SUBPROCESS_") sentry-python-1.39.2/sentry_sdk/integrations/strawberry.py000066400000000000000000000343321454744723200241430ustar00rootroot00000000000000import hashlib from functools import cached_property from inspect import isawaitable from sentry_sdk import configure_scope, start_span from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, logger, package_version, _get_installed_modules, ) from sentry_sdk._types import TYPE_CHECKING try: import strawberry.schema.schema as strawberry_schema # type: ignore from strawberry import Schema from strawberry.extensions import SchemaExtension # type: ignore from strawberry.extensions.tracing.utils import should_skip_tracing as strawberry_should_skip_tracing # type: ignore from strawberry.extensions.tracing import ( # type: ignore SentryTracingExtension as StrawberrySentryAsyncExtension, SentryTracingExtensionSync as StrawberrySentrySyncExtension, ) from strawberry.http import async_base_view, sync_base_view # type: ignore except ImportError: raise DidNotEnable("strawberry-graphql is not installed") if TYPE_CHECKING: from typing import Any, Callable, Dict, Generator, List, Optional from graphql import GraphQLError, GraphQLResolveInfo # type: ignore from strawberry.http import GraphQLHTTPResponse from strawberry.types import ExecutionContext, ExecutionResult # type: ignore from sentry_sdk._types import EventProcessor ignore_logger("strawberry.execution") class StrawberryIntegration(Integration): identifier = "strawberry" def __init__(self, async_execution=None): # type: (Optional[bool]) -> None if async_execution not in (None, False, True): raise ValueError( 'Invalid value for async_execution: "{}" (must be bool)'.format( async_execution ) ) self.async_execution = async_execution @staticmethod def setup_once(): # type: () -> None version = package_version("strawberry-graphql") if version is None: raise DidNotEnable( "Unparsable strawberry-graphql version: {}".format(version) ) if version < (0, 209, 5): raise DidNotEnable("strawberry-graphql 0.209.5 or newer required.") _patch_schema_init() _patch_execute() _patch_views() def _patch_schema_init(): # type: () -> None old_schema_init = Schema.__init__ def _sentry_patched_schema_init(self, *args, **kwargs): # type: (Schema, Any, Any) -> None integration = Hub.current.get_integration(StrawberryIntegration) if integration is None: return old_schema_init(self, *args, **kwargs) extensions = kwargs.get("extensions") or [] if integration.async_execution is not None: should_use_async_extension = integration.async_execution else: # try to figure it out ourselves should_use_async_extension = _guess_if_using_async(extensions) logger.info( "Assuming strawberry is running %s. If not, initialize it as StrawberryIntegration(async_execution=%s).", "async" if should_use_async_extension else "sync", "False" if should_use_async_extension else "True", ) # remove the built in strawberry sentry extension, if present extensions = [ extension for extension in extensions if extension not in (StrawberrySentryAsyncExtension, StrawberrySentrySyncExtension) ] # add our extension extensions.append( SentryAsyncExtension if should_use_async_extension else SentrySyncExtension ) kwargs["extensions"] = extensions return old_schema_init(self, *args, **kwargs) Schema.__init__ = _sentry_patched_schema_init class SentryAsyncExtension(SchemaExtension): # type: ignore def __init__( self, *, execution_context=None, ): # type: (Any, Optional[ExecutionContext]) -> None if execution_context: self.execution_context = execution_context @cached_property def _resource_name(self): # type: () -> str query_hash = self.hash_query(self.execution_context.query) if self.execution_context.operation_name: return "{}:{}".format(self.execution_context.operation_name, query_hash) return query_hash def hash_query(self, query): # type: (str) -> str return hashlib.md5(query.encode("utf-8")).hexdigest() def on_operation(self): # type: () -> Generator[None, None, None] self._operation_name = self.execution_context.operation_name operation_type = "query" op = OP.GRAPHQL_QUERY if self.execution_context.query.strip().startswith("mutation"): operation_type = "mutation" op = OP.GRAPHQL_MUTATION elif self.execution_context.query.strip().startswith("subscription"): operation_type = "subscription" op = OP.GRAPHQL_SUBSCRIPTION description = operation_type if self._operation_name: description += " {}".format(self._operation_name) Hub.current.add_breadcrumb( category="graphql.operation", data={ "operation_name": self._operation_name, "operation_type": operation_type, }, ) with configure_scope() as scope: if scope.span: self.graphql_span = scope.span.start_child( op=op, description=description ) else: self.graphql_span = start_span(op=op, description=description) self.graphql_span.set_data("graphql.operation.type", operation_type) self.graphql_span.set_data("graphql.operation.name", self._operation_name) self.graphql_span.set_data("graphql.document", self.execution_context.query) self.graphql_span.set_data("graphql.resource_name", self._resource_name) yield self.graphql_span.finish() def on_validate(self): # type: () -> Generator[None, None, None] self.validation_span = self.graphql_span.start_child( op=OP.GRAPHQL_VALIDATE, description="validation" ) yield self.validation_span.finish() def on_parse(self): # type: () -> Generator[None, None, None] self.parsing_span = self.graphql_span.start_child( op=OP.GRAPHQL_PARSE, description="parsing" ) yield self.parsing_span.finish() def should_skip_tracing(self, _next, info): # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool return strawberry_should_skip_tracing(_next, info) async def _resolve(self, _next, root, info, *args, **kwargs): # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any result = _next(root, info, *args, **kwargs) if isawaitable(result): result = await result return result async def resolve(self, _next, root, info, *args, **kwargs): # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any if self.should_skip_tracing(_next, info): return await self._resolve(_next, root, info, *args, **kwargs) field_path = "{}.{}".format(info.parent_type, info.field_name) with self.graphql_span.start_child( op=OP.GRAPHQL_RESOLVE, description="resolving {}".format(field_path) ) as span: span.set_data("graphql.field_name", info.field_name) span.set_data("graphql.parent_type", info.parent_type.name) span.set_data("graphql.field_path", field_path) span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) return await self._resolve(_next, root, info, *args, **kwargs) class SentrySyncExtension(SentryAsyncExtension): def resolve(self, _next, root, info, *args, **kwargs): # type: (Callable[[Any, Any, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any if self.should_skip_tracing(_next, info): return _next(root, info, *args, **kwargs) field_path = "{}.{}".format(info.parent_type, info.field_name) with self.graphql_span.start_child( op=OP.GRAPHQL_RESOLVE, description="resolving {}".format(field_path) ) as span: span.set_data("graphql.field_name", info.field_name) span.set_data("graphql.parent_type", info.parent_type.name) span.set_data("graphql.field_path", field_path) span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) return _next(root, info, *args, **kwargs) def _patch_execute(): # type: () -> None old_execute_async = strawberry_schema.execute old_execute_sync = strawberry_schema.execute_sync async def _sentry_patched_execute_async(*args, **kwargs): # type: (Any, Any) -> ExecutionResult hub = Hub.current integration = hub.get_integration(StrawberryIntegration) if integration is None: return await old_execute_async(*args, **kwargs) result = await old_execute_async(*args, **kwargs) if "execution_context" in kwargs and result.errors: with hub.configure_scope() as scope: event_processor = _make_request_event_processor( kwargs["execution_context"] ) scope.add_event_processor(event_processor) return result def _sentry_patched_execute_sync(*args, **kwargs): # type: (Any, Any) -> ExecutionResult hub = Hub.current integration = hub.get_integration(StrawberryIntegration) if integration is None: return old_execute_sync(*args, **kwargs) result = old_execute_sync(*args, **kwargs) if "execution_context" in kwargs and result.errors: with hub.configure_scope() as scope: event_processor = _make_request_event_processor( kwargs["execution_context"] ) scope.add_event_processor(event_processor) return result strawberry_schema.execute = _sentry_patched_execute_async strawberry_schema.execute_sync = _sentry_patched_execute_sync def _patch_views(): # type: () -> None old_async_view_handle_errors = async_base_view.AsyncBaseHTTPView._handle_errors old_sync_view_handle_errors = sync_base_view.SyncBaseHTTPView._handle_errors def _sentry_patched_async_view_handle_errors(self, errors, response_data): # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None old_async_view_handle_errors(self, errors, response_data) _sentry_patched_handle_errors(self, errors, response_data) def _sentry_patched_sync_view_handle_errors(self, errors, response_data): # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None old_sync_view_handle_errors(self, errors, response_data) _sentry_patched_handle_errors(self, errors, response_data) def _sentry_patched_handle_errors(self, errors, response_data): # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None hub = Hub.current integration = hub.get_integration(StrawberryIntegration) if integration is None: return if not errors: return with hub.configure_scope() as scope: event_processor = _make_response_event_processor(response_data) scope.add_event_processor(event_processor) with capture_internal_exceptions(): for error in errors: event, hint = event_from_exception( error, client_options=hub.client.options if hub.client else None, mechanism={ "type": integration.identifier, "handled": False, }, ) hub.capture_event(event, hint=hint) async_base_view.AsyncBaseHTTPView._handle_errors = ( _sentry_patched_async_view_handle_errors ) sync_base_view.SyncBaseHTTPView._handle_errors = ( _sentry_patched_sync_view_handle_errors ) def _make_request_event_processor(execution_context): # type: (ExecutionContext) -> EventProcessor def inner(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] with capture_internal_exceptions(): if _should_send_default_pii(): request_data = event.setdefault("request", {}) request_data["api_target"] = "graphql" if not request_data.get("data"): request_data["data"] = {"query": execution_context.query} if execution_context.variables: request_data["data"]["variables"] = execution_context.variables if execution_context.operation_name: request_data["data"][ "operationName" ] = execution_context.operation_name else: try: del event["request"]["data"] except (KeyError, TypeError): pass return event return inner def _make_response_event_processor(response_data): # type: (GraphQLHTTPResponse) -> EventProcessor def inner(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] with capture_internal_exceptions(): if _should_send_default_pii(): contexts = event.setdefault("contexts", {}) contexts["response"] = {"data": response_data} return event return inner def _guess_if_using_async(extensions): # type: (List[SchemaExtension]) -> bool if StrawberrySentryAsyncExtension in extensions: return True elif StrawberrySentrySyncExtension in extensions: return False return bool( {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules()) ) sentry-python-1.39.2/sentry_sdk/integrations/threading.py000066400000000000000000000055711454744723200237070ustar00rootroot00000000000000from __future__ import absolute_import import sys from functools import wraps from threading import Thread, current_thread from sentry_sdk import Hub from sentry_sdk._compat import reraise from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.integrations import Integration from sentry_sdk.utils import event_from_exception, capture_internal_exceptions if TYPE_CHECKING: from typing import Any from typing import TypeVar from typing import Callable from typing import Optional from sentry_sdk._types import ExcInfo F = TypeVar("F", bound=Callable[..., Any]) class ThreadingIntegration(Integration): identifier = "threading" def __init__(self, propagate_hub=False): # type: (bool) -> None self.propagate_hub = propagate_hub @staticmethod def setup_once(): # type: () -> None old_start = Thread.start @wraps(old_start) def sentry_start(self, *a, **kw): # type: (Thread, *Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(ThreadingIntegration) if integration is not None: if not integration.propagate_hub: hub_ = None else: hub_ = Hub(hub) # Patching instance methods in `start()` creates a reference cycle if # done in a naive way. See # https://github.com/getsentry/sentry-python/pull/434 # # In threading module, using current_thread API will access current thread instance # without holding it to avoid a reference cycle in an easier way. with capture_internal_exceptions(): new_run = _wrap_run(hub_, getattr(self.run, "__func__", self.run)) self.run = new_run # type: ignore return old_start(self, *a, **kw) Thread.start = sentry_start # type: ignore def _wrap_run(parent_hub, old_run_func): # type: (Optional[Hub], F) -> F @wraps(old_run_func) def run(*a, **kw): # type: (*Any, **Any) -> Any hub = parent_hub or Hub.current with hub: try: self = current_thread() return old_run_func(self, *a, **kw) except Exception: reraise(*_capture_exception()) return run # type: ignore def _capture_exception(): # type: () -> ExcInfo hub = Hub.current exc_info = sys.exc_info() if hub.get_integration(ThreadingIntegration) is not None: # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "threading", "handled": False}, ) hub.capture_event(event, hint=hint) return exc_info sentry-python-1.39.2/sentry_sdk/integrations/tornado.py000066400000000000000000000162561454744723200234120ustar00rootroot00000000000000import weakref import contextlib from inspect import iscoroutinefunction from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.tracing import ( TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_ROUTE, ) from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, event_from_exception, capture_internal_exceptions, transaction_from_function, ) from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import ( RequestExtractor, _filter_headers, _is_json_content_type, ) from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk._compat import iteritems try: from tornado import version_info as TORNADO_VERSION from tornado.web import RequestHandler, HTTPError from tornado.gen import coroutine except ImportError: raise DidNotEnable("Tornado not installed") from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional from typing import Dict from typing import Callable from typing import Generator from sentry_sdk._types import EventProcessor class TornadoIntegration(Integration): identifier = "tornado" @staticmethod def setup_once(): # type: () -> None if TORNADO_VERSION < (5, 0): raise DidNotEnable("Tornado 5+ required") if not HAS_REAL_CONTEXTVARS: # Tornado is async. We better have contextvars or we're going to leak # state between requests. raise DidNotEnable( "The tornado integration for Sentry requires Python 3.7+ or the aiocontextvars package" + CONTEXTVARS_ERROR_MESSAGE ) ignore_logger("tornado.access") old_execute = RequestHandler._execute awaitable = iscoroutinefunction(old_execute) if awaitable: # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await) # In that case our method should be a coroutine function too async def sentry_execute_request_handler(self, *args, **kwargs): # type: (RequestHandler, *Any, **Any) -> Any with _handle_request_impl(self): return await old_execute(self, *args, **kwargs) else: @coroutine # type: ignore def sentry_execute_request_handler(self, *args, **kwargs): # type: (RequestHandler, *Any, **Any) -> Any with _handle_request_impl(self): result = yield from old_execute(self, *args, **kwargs) return result RequestHandler._execute = sentry_execute_request_handler old_log_exception = RequestHandler.log_exception def sentry_log_exception(self, ty, value, tb, *args, **kwargs): # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any] _capture_exception(ty, value, tb) return old_log_exception(self, ty, value, tb, *args, **kwargs) RequestHandler.log_exception = sentry_log_exception @contextlib.contextmanager def _handle_request_impl(self): # type: (RequestHandler) -> Generator[None, None, None] hub = Hub.current integration = hub.get_integration(TornadoIntegration) if integration is None: yield weak_handler = weakref.ref(self) with Hub(hub) as hub: headers = self.request.headers with hub.configure_scope() as scope: scope.clear_breadcrumbs() processor = _make_event_processor(weak_handler) scope.add_event_processor(processor) transaction = continue_trace( headers, op=OP.HTTP_SERVER, # Like with all other integrations, this is our # fallback transaction in case there is no route. # sentry_urldispatcher_resolve is responsible for # setting a transaction name later. name="generic Tornado request", source=TRANSACTION_SOURCE_ROUTE, ) with hub.start_transaction( transaction, custom_sampling_context={"tornado_request": self.request} ): yield def _capture_exception(ty, value, tb): # type: (type, BaseException, Any) -> None hub = Hub.current if hub.get_integration(TornadoIntegration) is None: return if isinstance(value, HTTPError): return # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = event_from_exception( (ty, value, tb), client_options=client.options, mechanism={"type": "tornado", "handled": False}, ) hub.capture_event(event, hint=hint) def _make_event_processor(weak_handler): # type: (Callable[[], RequestHandler]) -> EventProcessor def tornado_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] handler = weak_handler() if handler is None: return event request = handler.request with capture_internal_exceptions(): method = getattr(handler, handler.request.method.lower()) event["transaction"] = transaction_from_function(method) event["transaction_info"] = {"source": TRANSACTION_SOURCE_COMPONENT} with capture_internal_exceptions(): extractor = TornadoRequestExtractor(request) extractor.extract_into_event(event) request_info = event["request"] request_info["url"] = "%s://%s%s" % ( request.protocol, request.host, request.path, ) request_info["query_string"] = request.query request_info["method"] = request.method request_info["env"] = {"REMOTE_ADDR": request.remote_ip} request_info["headers"] = _filter_headers(dict(request.headers)) with capture_internal_exceptions(): if handler.current_user and _should_send_default_pii(): event.setdefault("user", {}).setdefault("is_authenticated", True) return event return tornado_processor class TornadoRequestExtractor(RequestExtractor): def content_length(self): # type: () -> int if self.request.body is None: return 0 return len(self.request.body) def cookies(self): # type: () -> Dict[str, str] return {k: v.value for k, v in iteritems(self.request.cookies)} def raw_data(self): # type: () -> bytes return self.request.body def form(self): # type: () -> Dict[str, Any] return { k: [v.decode("latin1", "replace") for v in vs] for k, vs in iteritems(self.request.body_arguments) } def is_json(self): # type: () -> bool return _is_json_content_type(self.request.headers.get("content-type")) def files(self): # type: () -> Dict[str, Any] return {k: v[0] for k, v in iteritems(self.request.files) if v} def size_of_file(self, file): # type: (Any) -> int return len(file.body or ()) sentry-python-1.39.2/sentry_sdk/integrations/trytond.py000066400000000000000000000033211454744723200234340ustar00rootroot00000000000000import sentry_sdk.hub import sentry_sdk.utils import sentry_sdk.integrations import sentry_sdk.integrations.wsgi from sentry_sdk._types import TYPE_CHECKING from trytond.exceptions import TrytonException # type: ignore from trytond.wsgi import app # type: ignore if TYPE_CHECKING: from typing import Any # TODO: trytond-worker, trytond-cron and trytond-admin intergations class TrytondWSGIIntegration(sentry_sdk.integrations.Integration): identifier = "trytond_wsgi" def __init__(self): # type: () -> None pass @staticmethod def setup_once(): # type: () -> None app.wsgi_app = sentry_sdk.integrations.wsgi.SentryWsgiMiddleware(app.wsgi_app) def error_handler(e): # type: (Exception) -> None hub = sentry_sdk.hub.Hub.current if hub.get_integration(TrytondWSGIIntegration) is None: return elif isinstance(e, TrytonException): return else: # If an integration is there, a client has to be there. client = hub.client # type: Any event, hint = sentry_sdk.utils.event_from_exception( e, client_options=client.options, mechanism={"type": "trytond", "handled": False}, ) hub.capture_event(event, hint=hint) # Expected error handlers signature was changed # when the error_handler decorator was introduced # in Tryton-5.4 if hasattr(app, "error_handler"): @app.error_handler def _(app, request, e): # type: ignore error_handler(e) else: app.error_handlers.append(error_handler) sentry-python-1.39.2/sentry_sdk/integrations/wsgi.py000066400000000000000000000225551454744723200227140ustar00rootroot00000000000000import sys from sentry_sdk._compat import PY2, reraise from sentry_sdk._functools import partial from sentry_sdk._types import TYPE_CHECKING from sentry_sdk._werkzeug import get_host, _get_headers from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, event_from_exception, ) from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE from sentry_sdk.sessions import auto_session_tracking from sentry_sdk.integrations._wsgi_common import _filter_headers if TYPE_CHECKING: from typing import Callable from typing import Dict from typing import Iterator from typing import Any from typing import Tuple from typing import Optional from typing import TypeVar from typing import Protocol from sentry_sdk.utils import ExcInfo from sentry_sdk._types import EventProcessor WsgiResponseIter = TypeVar("WsgiResponseIter") WsgiResponseHeaders = TypeVar("WsgiResponseHeaders") WsgiExcInfo = TypeVar("WsgiExcInfo") class StartResponse(Protocol): def __call__(self, status, response_headers, exc_info=None): # type: ignore # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter pass _wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") if PY2: def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): # type: (str, str, str) -> str return s.decode(charset, errors) else: def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): # type: (str, str, str) -> str return s.encode("latin1").decode(charset, errors) def get_request_url(environ, use_x_forwarded_for=False): # type: (Dict[str, str], bool) -> str """Return the absolute URL without query string for the given WSGI environment.""" return "%s://%s/%s" % ( environ.get("wsgi.url_scheme"), get_host(environ, use_x_forwarded_for), wsgi_decoding_dance(environ.get("PATH_INFO") or "").lstrip("/"), ) class SentryWsgiMiddleware(object): __slots__ = ("app", "use_x_forwarded_for") def __init__(self, app, use_x_forwarded_for=False): # type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool) -> None self.app = app self.use_x_forwarded_for = use_x_forwarded_for def __call__(self, environ, start_response): # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse if _wsgi_middleware_applied.get(False): return self.app(environ, start_response) _wsgi_middleware_applied.set(True) try: hub = Hub(Hub.current) with auto_session_tracking(hub, session_mode="request"): with hub: with capture_internal_exceptions(): with hub.configure_scope() as scope: scope.clear_breadcrumbs() scope._name = "wsgi" scope.add_event_processor( _make_wsgi_event_processor( environ, self.use_x_forwarded_for ) ) transaction = continue_trace( environ, op=OP.HTTP_SERVER, name="generic WSGI request", source=TRANSACTION_SOURCE_ROUTE, ) with hub.start_transaction( transaction, custom_sampling_context={"wsgi_environ": environ} ): try: rv = self.app( environ, partial( _sentry_start_response, start_response, transaction ), ) except BaseException: reraise(*_capture_exception(hub)) finally: _wsgi_middleware_applied.set(False) return _ScopedResponse(hub, rv) def _sentry_start_response( # type: ignore old_start_response, # type: StartResponse transaction, # type: Transaction status, # type: str response_headers, # type: WsgiResponseHeaders exc_info=None, # type: Optional[WsgiExcInfo] ): # type: (...) -> WsgiResponseIter with capture_internal_exceptions(): status_int = int(status.split(" ", 1)[0]) transaction.set_http_status(status_int) if exc_info is None: # The Django Rest Framework WSGI test client, and likely other # (incorrect) implementations, cannot deal with the exc_info argument # if one is present. Avoid providing a third argument if not necessary. return old_start_response(status, response_headers) else: return old_start_response(status, response_headers, exc_info) def _get_environ(environ): # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] """ Returns our explicitly included environment variables we want to capture (server name, port and remote addr if pii is enabled). """ keys = ["SERVER_NAME", "SERVER_PORT"] if _should_send_default_pii(): # make debugging of proxy setup easier. Proxy headers are # in headers. keys += ["REMOTE_ADDR"] for key in keys: if key in environ: yield key, environ[key] def get_client_ip(environ): # type: (Dict[str, str]) -> Optional[Any] """ Infer the user IP address from various headers. This cannot be used in security sensitive situations since the value may be forged from a client, but it's good enough for the event payload. """ try: return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip() except (KeyError, IndexError): pass try: return environ["HTTP_X_REAL_IP"] except KeyError: pass return environ.get("REMOTE_ADDR") def _capture_exception(hub): # type: (Hub) -> ExcInfo exc_info = sys.exc_info() # Check client here as it might have been unset while streaming response if hub.client is not None: e = exc_info[1] # SystemExit(0) is the only uncaught exception that is expected behavior should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None) if not should_skip_capture: event, hint = event_from_exception( exc_info, client_options=hub.client.options, mechanism={"type": "wsgi", "handled": False}, ) hub.capture_event(event, hint=hint) return exc_info class _ScopedResponse(object): __slots__ = ("_response", "_hub") def __init__(self, hub, response): # type: (Hub, Iterator[bytes]) -> None self._hub = hub self._response = response def __iter__(self): # type: () -> Iterator[bytes] iterator = iter(self._response) while True: with self._hub: try: chunk = next(iterator) except StopIteration: break except BaseException: reraise(*_capture_exception(self._hub)) yield chunk def close(self): # type: () -> None with self._hub: try: self._response.close() # type: ignore except AttributeError: pass except BaseException: reraise(*_capture_exception(self._hub)) def _make_wsgi_event_processor(environ, use_x_forwarded_for): # type: (Dict[str, str], bool) -> EventProcessor # It's a bit unfortunate that we have to extract and parse the request data # from the environ so eagerly, but there are a few good reasons for this. # # We might be in a situation where the scope/hub never gets torn down # properly. In that case we will have an unnecessary strong reference to # all objects in the environ (some of which may take a lot of memory) when # we're really just interested in a few of them. # # Keeping the environment around for longer than the request lifecycle is # also not necessarily something uWSGI can deal with: # https://github.com/unbit/uwsgi/issues/1950 client_ip = get_client_ip(environ) request_url = get_request_url(environ, use_x_forwarded_for) query_string = environ.get("QUERY_STRING") method = environ.get("REQUEST_METHOD") env = dict(_get_environ(environ)) headers = _filter_headers(dict(_get_headers(environ))) def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] with capture_internal_exceptions(): # if the code below fails halfway through we at least have some data request_info = event.setdefault("request", {}) if _should_send_default_pii(): user_info = event.setdefault("user", {}) if client_ip: user_info.setdefault("ip_address", client_ip) request_info["url"] = request_url request_info["query_string"] = query_string request_info["method"] = method request_info["env"] = env request_info["headers"] = headers return event return event_processor sentry-python-1.39.2/sentry_sdk/metrics.py000066400000000000000000000704331454744723200207010ustar00rootroot00000000000000import os import io import re import sys import threading import random import time import zlib from datetime import datetime from functools import wraps, partial from threading import Event, Lock, Thread from contextlib import contextmanager import sentry_sdk from sentry_sdk._compat import text_type, utc_from_timestamp, iteritems from sentry_sdk.utils import ( now, nanosecond_time, to_timestamp, serialize_frame, json_dumps, ) from sentry_sdk.envelope import Envelope, Item from sentry_sdk.tracing import ( TRANSACTION_SOURCE_ROUTE, TRANSACTION_SOURCE_VIEW, TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_TASK, ) from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Generator from typing import Iterable from typing import List from typing import Optional from typing import Set from typing import Tuple from typing import Union from sentry_sdk._types import BucketKey from sentry_sdk._types import DurationUnit from sentry_sdk._types import FlushedMetricValue from sentry_sdk._types import MeasurementUnit from sentry_sdk._types import MetricMetaKey from sentry_sdk._types import MetricTagValue from sentry_sdk._types import MetricTags from sentry_sdk._types import MetricTagsInternal from sentry_sdk._types import MetricType from sentry_sdk._types import MetricValue _thread_local = threading.local() _sanitize_key = partial(re.compile(r"[^a-zA-Z0-9_/.-]+").sub, "_") _sanitize_value = partial(re.compile(r"[^\w\d_:/@\.{}\[\]$-]+", re.UNICODE).sub, "_") _set = set # set is shadowed below GOOD_TRANSACTION_SOURCES = frozenset( [ TRANSACTION_SOURCE_ROUTE, TRANSACTION_SOURCE_VIEW, TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_TASK, ] ) def get_code_location(stacklevel): # type: (int) -> Optional[Dict[str, Any]] try: frm = sys._getframe(stacklevel) except Exception: return None return serialize_frame( frm, include_local_variables=False, include_source_context=True ) @contextmanager def recursion_protection(): # type: () -> Generator[bool, None, None] """Enters recursion protection and returns the old flag.""" try: in_metrics = _thread_local.in_metrics except AttributeError: in_metrics = False _thread_local.in_metrics = True try: yield in_metrics finally: _thread_local.in_metrics = in_metrics def metrics_noop(func): # type: (Any) -> Any """Convenient decorator that uses `recursion_protection` to make a function a noop. """ @wraps(func) def new_func(*args, **kwargs): # type: (*Any, **Any) -> Any with recursion_protection() as in_metrics: if not in_metrics: return func(*args, **kwargs) return new_func class Metric(object): __slots__ = () @property def weight(self): # type: (...) -> int raise NotImplementedError() def add( self, value # type: MetricValue ): # type: (...) -> None raise NotImplementedError() def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] raise NotImplementedError() class CounterMetric(Metric): __slots__ = ("value",) def __init__( self, first # type: MetricValue ): # type: (...) -> None self.value = float(first) @property def weight(self): # type: (...) -> int return 1 def add( self, value # type: MetricValue ): # type: (...) -> None self.value += float(value) def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] return (self.value,) class GaugeMetric(Metric): __slots__ = ( "last", "min", "max", "sum", "count", ) def __init__( self, first # type: MetricValue ): # type: (...) -> None first = float(first) self.last = first self.min = first self.max = first self.sum = first self.count = 1 @property def weight(self): # type: (...) -> int # Number of elements. return 5 def add( self, value # type: MetricValue ): # type: (...) -> None value = float(value) self.last = value self.min = min(self.min, value) self.max = max(self.max, value) self.sum += value self.count += 1 def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] return ( self.last, self.min, self.max, self.sum, self.count, ) class DistributionMetric(Metric): __slots__ = ("value",) def __init__( self, first # type: MetricValue ): # type(...) -> None self.value = [float(first)] @property def weight(self): # type: (...) -> int return len(self.value) def add( self, value # type: MetricValue ): # type: (...) -> None self.value.append(float(value)) def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] return self.value class SetMetric(Metric): __slots__ = ("value",) def __init__( self, first # type: MetricValue ): # type: (...) -> None self.value = {first} @property def weight(self): # type: (...) -> int return len(self.value) def add( self, value # type: MetricValue ): # type: (...) -> None self.value.add(value) def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] def _hash(x): # type: (MetricValue) -> int if isinstance(x, str): return zlib.crc32(x.encode("utf-8")) & 0xFFFFFFFF return int(x) return (_hash(value) for value in self.value) def _encode_metrics(flushable_buckets): # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) -> bytes out = io.BytesIO() _write = out.write # Note on sanetization: we intentionally sanetize in emission (serialization) # and not during aggregation for performance reasons. This means that the # envelope can in fact have duplicate buckets stored. This is acceptable for # relay side emission and should not happen commonly. for timestamp, buckets in flushable_buckets: for bucket_key, metric in iteritems(buckets): metric_type, metric_name, metric_unit, metric_tags = bucket_key metric_name = _sanitize_key(metric_name) _write(metric_name.encode("utf-8")) _write(b"@") _write(metric_unit.encode("utf-8")) for serialized_value in metric.serialize_value(): _write(b":") _write(str(serialized_value).encode("utf-8")) _write(b"|") _write(metric_type.encode("ascii")) if metric_tags: _write(b"|#") first = True for tag_key, tag_value in metric_tags: tag_key = _sanitize_key(tag_key) if not tag_key: continue if first: first = False else: _write(b",") _write(tag_key.encode("utf-8")) _write(b":") _write(_sanitize_value(tag_value).encode("utf-8")) _write(b"|T") _write(str(timestamp).encode("ascii")) _write(b"\n") return out.getvalue() def _encode_locations(timestamp, code_locations): # type: (int, Iterable[Tuple[MetricMetaKey, Dict[str, Any]]]) -> bytes mapping = {} # type: Dict[str, List[Any]] for key, loc in code_locations: metric_type, name, unit = key mri = "{}:{}@{}".format(metric_type, _sanitize_key(name), unit) loc["type"] = "location" mapping.setdefault(mri, []).append(loc) return json_dumps({"timestamp": timestamp, "mapping": mapping}) METRIC_TYPES = { "c": CounterMetric, "g": GaugeMetric, "d": DistributionMetric, "s": SetMetric, } # some of these are dumb TIMING_FUNCTIONS = { "nanosecond": nanosecond_time, "microsecond": lambda: nanosecond_time() / 1000.0, "millisecond": lambda: nanosecond_time() / 1000000.0, "second": now, "minute": lambda: now() / 60.0, "hour": lambda: now() / 3600.0, "day": lambda: now() / 3600.0 / 24.0, "week": lambda: now() / 3600.0 / 24.0 / 7.0, } class LocalAggregator(object): __slots__ = ("_measurements",) def __init__(self): # type: (...) -> None self._measurements = ( {} ) # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]] def add( self, ty, # type: MetricType key, # type: str value, # type: float unit, # type: MeasurementUnit tags, # type: MetricTagsInternal ): # type: (...) -> None export_key = "%s:%s@%s" % (ty, key, unit) bucket_key = (export_key, tags) old = self._measurements.get(bucket_key) if old is not None: v_min, v_max, v_count, v_sum = old v_min = min(v_min, value) v_max = max(v_max, value) v_count += 1 v_sum += value else: v_min = v_max = v_sum = value v_count = 1 self._measurements[bucket_key] = (v_min, v_max, v_count, v_sum) def to_json(self): # type: (...) -> Dict[str, Any] rv = {} # type: Any for (export_key, tags), ( v_min, v_max, v_count, v_sum, ) in self._measurements.items(): rv.setdefault(export_key, []).append( { "tags": _tags_to_dict(tags), "min": v_min, "max": v_max, "count": v_count, "sum": v_sum, } ) return rv class MetricsAggregator(object): ROLLUP_IN_SECONDS = 10.0 MAX_WEIGHT = 100000 FLUSHER_SLEEP_TIME = 5.0 def __init__( self, capture_func, # type: Callable[[Envelope], None] enable_code_locations=False, # type: bool ): # type: (...) -> None self.buckets = {} # type: Dict[int, Any] self._enable_code_locations = enable_code_locations self._seen_locations = _set() # type: Set[Tuple[int, MetricMetaKey]] self._pending_locations = {} # type: Dict[int, List[Tuple[MetricMetaKey, Any]]] self._buckets_total_weight = 0 self._capture_func = capture_func self._lock = Lock() self._running = True self._flush_event = Event() self._force_flush = False # The aggregator shifts it's flushing by up to an entire rollup window to # avoid multiple clients trampling on end of a 10 second window as all the # buckets are anchored to multiples of ROLLUP seconds. We randomize this # number once per aggregator boot to achieve some level of offsetting # across a fleet of deployed SDKs. Relay itself will also apply independent # jittering. self._flush_shift = random.random() * self.ROLLUP_IN_SECONDS self._flusher = None # type: Optional[Thread] self._flusher_pid = None # type: Optional[int] self._ensure_thread() def _ensure_thread(self): # type: (...) -> bool """For forking processes we might need to restart this thread. This ensures that our process actually has that thread running. """ if not self._running: return False pid = os.getpid() if self._flusher_pid == pid: return True with self._lock: self._flusher_pid = pid self._flusher = Thread(target=self._flush_loop) self._flusher.daemon = True try: self._flusher.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self._running = False return False return True def _flush_loop(self): # type: (...) -> None _thread_local.in_metrics = True while self._running or self._force_flush: self._flush() if self._running: self._flush_event.wait(self.FLUSHER_SLEEP_TIME) def _flush(self): # type: (...) -> None self._emit(self._flushable_buckets(), self._flushable_locations()) def _flushable_buckets(self): # type: (...) -> (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) with self._lock: force_flush = self._force_flush cutoff = time.time() - self.ROLLUP_IN_SECONDS - self._flush_shift flushable_buckets = () # type: Iterable[Tuple[int, Dict[BucketKey, Metric]]] weight_to_remove = 0 if force_flush: flushable_buckets = self.buckets.items() self.buckets = {} self._buckets_total_weight = 0 self._force_flush = False else: flushable_buckets = [] for buckets_timestamp, buckets in iteritems(self.buckets): # If the timestamp of the bucket is newer that the rollup we want to skip it. if buckets_timestamp <= cutoff: flushable_buckets.append((buckets_timestamp, buckets)) # We will clear the elements while holding the lock, in order to avoid requesting it downstream again. for buckets_timestamp, buckets in flushable_buckets: for _, metric in iteritems(buckets): weight_to_remove += metric.weight del self.buckets[buckets_timestamp] self._buckets_total_weight -= weight_to_remove return flushable_buckets def _flushable_locations(self): # type: (...) -> Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] with self._lock: locations = self._pending_locations self._pending_locations = {} return locations @metrics_noop def add( self, ty, # type: MetricType key, # type: str value, # type: MetricValue unit, # type: MeasurementUnit tags, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] local_aggregator=None, # type: Optional[LocalAggregator] stacklevel=0, # type: Optional[int] ): # type: (...) -> None if not self._ensure_thread() or self._flusher is None: return None if timestamp is None: timestamp = time.time() elif isinstance(timestamp, datetime): timestamp = to_timestamp(timestamp) bucket_timestamp = int( (timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS ) serialized_tags = _serialize_tags(tags) bucket_key = ( ty, key, unit, serialized_tags, ) with self._lock: local_buckets = self.buckets.setdefault(bucket_timestamp, {}) metric = local_buckets.get(bucket_key) if metric is not None: previous_weight = metric.weight metric.add(value) else: metric = local_buckets[bucket_key] = METRIC_TYPES[ty](value) previous_weight = 0 added = metric.weight - previous_weight if stacklevel is not None: self.record_code_location(ty, key, unit, stacklevel + 2, timestamp) # Given the new weight we consider whether we want to force flush. self._consider_force_flush() if local_aggregator is not None: local_value = float(added if ty == "s" else value) local_aggregator.add(ty, key, local_value, unit, serialized_tags) def record_code_location( self, ty, # type: MetricType key, # type: str unit, # type: MeasurementUnit stacklevel, # type: int timestamp=None, # type: Optional[float] ): # type: (...) -> None if not self._enable_code_locations: return if timestamp is None: timestamp = time.time() meta_key = (ty, key, unit) start_of_day = utc_from_timestamp(timestamp).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=None ) start_of_day = int(to_timestamp(start_of_day)) if (start_of_day, meta_key) not in self._seen_locations: self._seen_locations.add((start_of_day, meta_key)) loc = get_code_location(stacklevel + 3) if loc is not None: # Group metadata by day to make flushing more efficient. # There needs to be one envelope item per timestamp. self._pending_locations.setdefault(start_of_day, []).append( (meta_key, loc) ) @metrics_noop def need_code_loation( self, ty, # type: MetricType key, # type: str unit, # type: MeasurementUnit timestamp, # type: float ): # type: (...) -> bool if self._enable_code_locations: return False meta_key = (ty, key, unit) start_of_day = utc_from_timestamp(timestamp).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=None ) start_of_day = int(to_timestamp(start_of_day)) return (start_of_day, meta_key) not in self._seen_locations def kill(self): # type: (...) -> None if self._flusher is None: return self._running = False self._flush_event.set() self._flusher.join() self._flusher = None @metrics_noop def flush(self): # type: (...) -> None self._force_flush = True self._flush() def _consider_force_flush(self): # type: (...) -> None # It's important to acquire a lock around this method, since it will touch shared data structures. total_weight = len(self.buckets) + self._buckets_total_weight if total_weight >= self.MAX_WEIGHT: self._force_flush = True self._flush_event.set() def _emit( self, flushable_buckets, # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) code_locations, # type: Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] ): # type: (...) -> Optional[Envelope] envelope = Envelope() if flushable_buckets: encoded_metrics = _encode_metrics(flushable_buckets) envelope.add_item(Item(payload=encoded_metrics, type="statsd")) for timestamp, locations in iteritems(code_locations): encoded_locations = _encode_locations(timestamp, locations) envelope.add_item(Item(payload=encoded_locations, type="metric_meta")) if envelope.items: self._capture_func(envelope) return envelope return None def _serialize_tags( tags, # type: Optional[MetricTags] ): # type: (...) -> MetricTagsInternal if not tags: return () rv = [] for key, value in iteritems(tags): # If the value is a collection, we want to flatten it. if isinstance(value, (list, tuple)): for inner_value in value: if inner_value is not None: rv.append((key, text_type(inner_value))) elif value is not None: rv.append((key, text_type(value))) # It's very important to sort the tags in order to obtain the # same bucket key. return tuple(sorted(rv)) def _tags_to_dict(tags): # type: (MetricTagsInternal) -> Dict[str, Any] rv = {} # type: Dict[str, Any] for tag_name, tag_value in tags: old_value = rv.get(tag_name) if old_value is not None: if isinstance(old_value, list): old_value.append(tag_value) else: rv[tag_name] = [old_value, tag_value] else: rv[tag_name] = tag_value return rv def _get_aggregator(): # type: () -> Optional[MetricsAggregator] hub = sentry_sdk.Hub.current client = hub.client return ( client.metrics_aggregator if client is not None and client.metrics_aggregator is not None else None ) def _get_aggregator_and_update_tags(key, tags): # type: (str, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]] hub = sentry_sdk.Hub.current client = hub.client if client is None or client.metrics_aggregator is None: return None, None, tags experiments = client.options.get("_experiments", {}) updated_tags = dict(tags or ()) # type: Dict[str, MetricTagValue] updated_tags.setdefault("release", client.options["release"]) updated_tags.setdefault("environment", client.options["environment"]) scope = hub.scope local_aggregator = None # We go with the low-level API here to access transaction information as # this one is the same between just errors and errors + performance transaction_source = scope._transaction_info.get("source") if transaction_source in GOOD_TRANSACTION_SOURCES: transaction_name = scope._transaction if transaction_name: updated_tags.setdefault("transaction", transaction_name) if scope._span is not None: sample_rate = experiments.get("metrics_summary_sample_rate") or 0.0 should_summarize_metric_callback = experiments.get( "should_summarize_metric" ) if random.random() < sample_rate and ( should_summarize_metric_callback is None or should_summarize_metric_callback(key, updated_tags) ): local_aggregator = scope._span._get_local_aggregator() before_emit_callback = experiments.get("before_emit_metric") if before_emit_callback is not None: with recursion_protection() as in_metrics: if not in_metrics: if not before_emit_callback(key, updated_tags): return None, None, updated_tags return client.metrics_aggregator, local_aggregator, updated_tags def incr( key, # type: str value=1.0, # type: float unit="none", # type: MeasurementUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> None """Increments a counter.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: aggregator.add( "c", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) class _Timing(object): def __init__( self, key, # type: str tags, # type: Optional[MetricTags] timestamp, # type: Optional[Union[float, datetime]] value, # type: Optional[float] unit, # type: DurationUnit stacklevel, # type: int ): # type: (...) -> None self.key = key self.tags = tags self.timestamp = timestamp self.value = value self.unit = unit self.entered = None # type: Optional[float] self._span = None # type: Optional[sentry_sdk.tracing.Span] self.stacklevel = stacklevel def _validate_invocation(self, context): # type: (str) -> None if self.value is not None: raise TypeError( "cannot use timing as %s when a value is provided" % context ) def __enter__(self): # type: (...) -> _Timing self.entered = TIMING_FUNCTIONS[self.unit]() self._validate_invocation("context-manager") self._span = sentry_sdk.start_span(op="metric.timing", description=self.key) if self.tags: for key, value in self.tags.items(): if isinstance(value, (tuple, list)): value = ",".join(sorted(map(str, value))) self._span.set_tag(key, value) self._span.__enter__() # report code locations here for better accuracy aggregator = _get_aggregator() if aggregator is not None: aggregator.record_code_location("d", self.key, self.unit, self.stacklevel) return self def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None assert self._span, "did not enter" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( self.key, self.tags ) if aggregator is not None: elapsed = TIMING_FUNCTIONS[self.unit]() - self.entered # type: ignore aggregator.add( "d", self.key, elapsed, self.unit, tags, self.timestamp, local_aggregator, None, # code locations are reported in __enter__ ) self._span.__exit__(exc_type, exc_value, tb) self._span = None def __call__(self, f): # type: (Any) -> Any self._validate_invocation("decorator") @wraps(f) def timed_func(*args, **kwargs): # type: (*Any, **Any) -> Any with timing( key=self.key, tags=self.tags, timestamp=self.timestamp, unit=self.unit, stacklevel=self.stacklevel + 1, ): return f(*args, **kwargs) return timed_func def timing( key, # type: str value=None, # type: Optional[float] unit="second", # type: DurationUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> _Timing """Emits a distribution with the time it takes to run the given code block. This method supports three forms of invocation: - when a `value` is provided, it functions similar to `distribution` but with - it can be used as a context manager - it can be used as a decorator """ if value is not None: aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: aggregator.add( "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) return _Timing(key, tags, timestamp, value, unit, stacklevel) def distribution( key, # type: str value, # type: float unit="none", # type: MeasurementUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> None """Emits a distribution.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: aggregator.add( "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) def set( key, # type: str value, # type: MetricValue unit="none", # type: MeasurementUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> None """Emits a set.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: aggregator.add( "s", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) def gauge( key, # type: str value, # type: float unit="none", # type: MeasurementUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> None """Emits a gauge.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: aggregator.add( "g", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) sentry-python-1.39.2/sentry_sdk/monitor.py000066400000000000000000000072201454744723200207140ustar00rootroot00000000000000import os import time from threading import Thread, Lock import sentry_sdk from sentry_sdk.utils import logger from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional MAX_DOWNSAMPLE_FACTOR = 10 class Monitor(object): """ Performs health checks in a separate thread once every interval seconds and updates the internal state. Other parts of the SDK only read this state and act accordingly. """ name = "sentry.monitor" def __init__(self, transport, interval=10): # type: (sentry_sdk.transport.Transport, float) -> None self.transport = transport # type: sentry_sdk.transport.Transport self.interval = interval # type: float self._healthy = True self._downsample_factor = 0 # type: int self._thread = None # type: Optional[Thread] self._thread_lock = Lock() self._thread_for_pid = None # type: Optional[int] self._running = True def _ensure_running(self): # type: () -> None """ Check that the monitor has an active thread to run in, or create one if not. Note that this might fail (e.g. in Python 3.12 it's not possible to spawn new threads at interpreter shutdown). In that case self._running will be False after running this function. """ if self._thread_for_pid == os.getpid() and self._thread is not None: return None with self._thread_lock: if self._thread_for_pid == os.getpid() and self._thread is not None: return None def _thread(): # type: (...) -> None while self._running: time.sleep(self.interval) if self._running: self.run() thread = Thread(name=self.name, target=_thread) thread.daemon = True try: thread.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self._running = False return None self._thread = thread self._thread_for_pid = os.getpid() return None def run(self): # type: () -> None self.check_health() self.set_downsample_factor() def set_downsample_factor(self): # type: () -> None if self._healthy: if self._downsample_factor > 0: logger.debug( "[Monitor] health check positive, reverting to normal sampling" ) self._downsample_factor = 0 else: if self.downsample_factor < MAX_DOWNSAMPLE_FACTOR: self._downsample_factor += 1 logger.debug( "[Monitor] health check negative, downsampling with a factor of %d", self._downsample_factor, ) def check_health(self): # type: () -> None """ Perform the actual health checks, currently only checks if the transport is rate-limited. TODO: augment in the future with more checks. """ self._healthy = self.transport.is_healthy() def is_healthy(self): # type: () -> bool self._ensure_running() return self._healthy @property def downsample_factor(self): # type: () -> int self._ensure_running() return self._downsample_factor def kill(self): # type: () -> None self._running = False def __del__(self): # type: () -> None self.kill() sentry-python-1.39.2/sentry_sdk/profiler.py000066400000000000000000001041341454744723200210510ustar00rootroot00000000000000""" This file is originally based on code from https://github.com/nylas/nylas-perftools, which is published under the following license: The MIT License (MIT) Copyright (c) 2014 Nylas Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import atexit import os import platform import random import sys import threading import time import uuid from collections import deque import sentry_sdk from sentry_sdk._compat import PY33, PY311 from sentry_sdk._lru_cache import LRUCache from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import ( capture_internal_exception, filename_for_module, is_valid_sample_rate, logger, nanosecond_time, set_in_app_in_frames, ) if TYPE_CHECKING: from types import FrameType from typing import Any from typing import Callable from typing import Deque from typing import Dict from typing import List from typing import Optional from typing import Set from typing import Sequence from typing import Tuple from typing_extensions import TypedDict import sentry_sdk.tracing from sentry_sdk._types import SamplingContext, ProfilerMode ThreadId = str ProcessedSample = TypedDict( "ProcessedSample", { "elapsed_since_start_ns": str, "thread_id": ThreadId, "stack_id": int, }, ) ProcessedStack = List[int] ProcessedFrame = TypedDict( "ProcessedFrame", { "abs_path": str, "filename": Optional[str], "function": str, "lineno": int, "module": Optional[str], }, ) ProcessedThreadMetadata = TypedDict( "ProcessedThreadMetadata", {"name": str}, ) ProcessedProfile = TypedDict( "ProcessedProfile", { "frames": List[ProcessedFrame], "stacks": List[ProcessedStack], "samples": List[ProcessedSample], "thread_metadata": Dict[ThreadId, ProcessedThreadMetadata], }, ) ProfileContext = TypedDict( "ProfileContext", {"profile_id": str}, ) FrameId = Tuple[ str, # abs_path int, # lineno str, # function ] FrameIds = Tuple[FrameId, ...] # The exact value of this id is not very meaningful. The purpose # of this id is to give us a compact and unique identifier for a # raw stack that can be used as a key to a dictionary so that it # can be used during the sampled format generation. StackId = Tuple[int, int] ExtractedStack = Tuple[StackId, FrameIds, List[ProcessedFrame]] ExtractedSample = Sequence[Tuple[ThreadId, ExtractedStack]] try: from gevent import get_hub as get_gevent_hub # type: ignore from gevent.monkey import get_original, is_module_patched # type: ignore from gevent.threadpool import ThreadPool # type: ignore thread_sleep = get_original("time", "sleep") except ImportError: def get_gevent_hub(): # type: () -> Any return None thread_sleep = time.sleep def is_module_patched(*args, **kwargs): # type: (*Any, **Any) -> bool # unable to import from gevent means no modules have been patched return False ThreadPool = None def is_gevent(): # type: () -> bool return is_module_patched("threading") or is_module_patched("_thread") _scheduler = None # type: Optional[Scheduler] # The default sampling frequency to use. This is set at 101 in order to # mitigate the effects of lockstep sampling. DEFAULT_SAMPLING_FREQUENCY = 101 # The minimum number of unique samples that must exist in a profile to be # considered valid. PROFILE_MINIMUM_SAMPLES = 2 def has_profiling_enabled(options): # type: (Dict[str, Any]) -> bool profiles_sampler = options["profiles_sampler"] if profiles_sampler is not None: return True profiles_sample_rate = options["profiles_sample_rate"] if profiles_sample_rate is not None and profiles_sample_rate > 0: return True profiles_sample_rate = options["_experiments"].get("profiles_sample_rate") if profiles_sample_rate is not None and profiles_sample_rate > 0: return True return False def setup_profiler(options): # type: (Dict[str, Any]) -> bool global _scheduler if _scheduler is not None: logger.debug("[Profiling] Profiler is already setup") return False if not PY33: logger.warn("[Profiling] Profiler requires Python >= 3.3") return False frequency = DEFAULT_SAMPLING_FREQUENCY if is_gevent(): # If gevent has patched the threading modules then we cannot rely on # them to spawn a native thread for sampling. # Instead we default to the GeventScheduler which is capable of # spawning native threads within gevent. default_profiler_mode = GeventScheduler.mode else: default_profiler_mode = ThreadScheduler.mode if options.get("profiler_mode") is not None: profiler_mode = options["profiler_mode"] else: profiler_mode = ( options.get("_experiments", {}).get("profiler_mode") or default_profiler_mode ) if ( profiler_mode == ThreadScheduler.mode # for legacy reasons, we'll keep supporting sleep mode for this scheduler or profiler_mode == "sleep" ): _scheduler = ThreadScheduler(frequency=frequency) elif profiler_mode == GeventScheduler.mode: _scheduler = GeventScheduler(frequency=frequency) else: raise ValueError("Unknown profiler mode: {}".format(profiler_mode)) logger.debug( "[Profiling] Setting up profiler in {mode} mode".format(mode=_scheduler.mode) ) _scheduler.setup() atexit.register(teardown_profiler) return True def teardown_profiler(): # type: () -> None global _scheduler if _scheduler is not None: _scheduler.teardown() _scheduler = None # We want to impose a stack depth limit so that samples aren't too large. MAX_STACK_DEPTH = 128 def extract_stack( raw_frame, # type: Optional[FrameType] cache, # type: LRUCache cwd, # type: str max_stack_depth=MAX_STACK_DEPTH, # type: int ): # type: (...) -> ExtractedStack """ Extracts the stack starting the specified frame. The extracted stack assumes the specified frame is the top of the stack, and works back to the bottom of the stack. In the event that the stack is more than `MAX_STACK_DEPTH` frames deep, only the first `MAX_STACK_DEPTH` frames will be returned. """ raw_frames = deque(maxlen=max_stack_depth) # type: Deque[FrameType] while raw_frame is not None: f_back = raw_frame.f_back raw_frames.append(raw_frame) raw_frame = f_back frame_ids = tuple(frame_id(raw_frame) for raw_frame in raw_frames) frames = [] for i, fid in enumerate(frame_ids): frame = cache.get(fid) if frame is None: frame = extract_frame(fid, raw_frames[i], cwd) cache.set(fid, frame) frames.append(frame) # Instead of mapping the stack into frame ids and hashing # that as a tuple, we can directly hash the stack. # This saves us from having to generate yet another list. # Additionally, using the stack as the key directly is # costly because the stack can be large, so we pre-hash # the stack, and use the hash as the key as this will be # needed a few times to improve performance. # # To Reduce the likelihood of hash collisions, we include # the stack depth. This means that only stacks of the same # depth can suffer from hash collisions. stack_id = len(raw_frames), hash(frame_ids) return stack_id, frame_ids, frames def frame_id(raw_frame): # type: (FrameType) -> FrameId return (raw_frame.f_code.co_filename, raw_frame.f_lineno, get_frame_name(raw_frame)) def extract_frame(fid, raw_frame, cwd): # type: (FrameId, FrameType, str) -> ProcessedFrame abs_path = raw_frame.f_code.co_filename try: module = raw_frame.f_globals["__name__"] except Exception: module = None # namedtuples can be many times slower when initialing # and accessing attribute so we opt to use a tuple here instead return { # This originally was `os.path.abspath(abs_path)` but that had # a large performance overhead. # # According to docs, this is equivalent to # `os.path.normpath(os.path.join(os.getcwd(), path))`. # The `os.getcwd()` call is slow here, so we precompute it. # # Additionally, since we are using normalized path already, # we skip calling `os.path.normpath` entirely. "abs_path": os.path.join(cwd, abs_path), "module": module, "filename": filename_for_module(module, abs_path) or None, "function": fid[2], "lineno": raw_frame.f_lineno, } if PY311: def get_frame_name(frame): # type: (FrameType) -> str return frame.f_code.co_qualname else: def get_frame_name(frame): # type: (FrameType) -> str f_code = frame.f_code co_varnames = f_code.co_varnames # co_name only contains the frame name. If the frame was a method, # the class name will NOT be included. name = f_code.co_name # if it was a method, we can get the class name by inspecting # the f_locals for the `self` argument try: if ( # the co_varnames start with the frame's positional arguments # and we expect the first to be `self` if its an instance method co_varnames and co_varnames[0] == "self" and "self" in frame.f_locals ): for cls in frame.f_locals["self"].__class__.__mro__: if name in cls.__dict__: return "{}.{}".format(cls.__name__, name) except AttributeError: pass # if it was a class method, (decorated with `@classmethod`) # we can get the class name by inspecting the f_locals for the `cls` argument try: if ( # the co_varnames start with the frame's positional arguments # and we expect the first to be `cls` if its a class method co_varnames and co_varnames[0] == "cls" and "cls" in frame.f_locals ): for cls in frame.f_locals["cls"].__mro__: if name in cls.__dict__: return "{}.{}".format(cls.__name__, name) except AttributeError: pass # nothing we can do if it is a staticmethod (decorated with @staticmethod) # we've done all we can, time to give up and return what we have return name MAX_PROFILE_DURATION_NS = int(3e10) # 30 seconds def get_current_thread_id(thread=None): # type: (Optional[threading.Thread]) -> Optional[int] """ Try to get the id of the current thread, with various fall backs. """ # if a thread is specified, that takes priority if thread is not None: try: thread_id = thread.ident if thread_id is not None: return thread_id except AttributeError: pass # if the app is using gevent, we should look at the gevent hub first # as the id there differs from what the threading module reports if is_gevent(): gevent_hub = get_gevent_hub() if gevent_hub is not None: try: # this is undocumented, so wrap it in try except to be safe return gevent_hub.thread_ident except AttributeError: pass # use the current thread's id if possible try: current_thread_id = threading.current_thread().ident if current_thread_id is not None: return current_thread_id except AttributeError: pass # if we can't get the current thread id, fall back to the main thread id try: main_thread_id = threading.main_thread().ident if main_thread_id is not None: return main_thread_id except AttributeError: pass # we've tried everything, time to give up return None class Profile(object): def __init__( self, transaction, # type: sentry_sdk.tracing.Transaction hub=None, # type: Optional[sentry_sdk.Hub] scheduler=None, # type: Optional[Scheduler] ): # type: (...) -> None self.scheduler = _scheduler if scheduler is None else scheduler self.hub = hub self.event_id = uuid.uuid4().hex # type: str # Here, we assume that the sampling decision on the transaction has been finalized. # # We cannot keep a reference to the transaction around here because it'll create # a reference cycle. So we opt to pull out just the necessary attributes. self.sampled = transaction.sampled # type: Optional[bool] # Various framework integrations are capable of overwriting the active thread id. # If it is set to `None` at the end of the profile, we fall back to the default. self._default_active_thread_id = get_current_thread_id() or 0 # type: int self.active_thread_id = None # type: Optional[int] try: self.start_ns = transaction._start_timestamp_monotonic_ns # type: int except AttributeError: self.start_ns = 0 self.stop_ns = 0 # type: int self.active = False # type: bool self.indexed_frames = {} # type: Dict[FrameId, int] self.indexed_stacks = {} # type: Dict[StackId, int] self.frames = [] # type: List[ProcessedFrame] self.stacks = [] # type: List[ProcessedStack] self.samples = [] # type: List[ProcessedSample] self.unique_samples = 0 transaction._profile = self def update_active_thread_id(self): # type: () -> None self.active_thread_id = get_current_thread_id() logger.debug( "[Profiling] updating active thread id to {tid}".format( tid=self.active_thread_id ) ) def _set_initial_sampling_decision(self, sampling_context): # type: (SamplingContext) -> None """ Sets the profile's sampling decision according to the following precdence rules: 1. If the transaction to be profiled is not sampled, that decision will be used, regardless of anything else. 2. Use `profiles_sample_rate` to decide. """ # The corresponding transaction was not sampled, # so don't generate a profile for it. if not self.sampled: logger.debug( "[Profiling] Discarding profile because transaction is discarded." ) self.sampled = False return # The profiler hasn't been properly initialized. if self.scheduler is None: logger.debug( "[Profiling] Discarding profile because profiler was not started." ) self.sampled = False return hub = self.hub or sentry_sdk.Hub.current client = hub.client # The client is None, so we can't get the sample rate. if client is None: self.sampled = False return options = client.options if callable(options.get("profiles_sampler")): sample_rate = options["profiles_sampler"](sampling_context) elif options["profiles_sample_rate"] is not None: sample_rate = options["profiles_sample_rate"] else: sample_rate = options["_experiments"].get("profiles_sample_rate") # The profiles_sample_rate option was not set, so profiling # was never enabled. if sample_rate is None: logger.debug( "[Profiling] Discarding profile because profiling was not enabled." ) self.sampled = False return if not is_valid_sample_rate(sample_rate, source="Profiling"): logger.warning( "[Profiling] Discarding profile because of invalid sample rate." ) self.sampled = False return # Now we roll the dice. random.random is inclusive of 0, but not of 1, # so strict < is safe here. In case sample_rate is a boolean, cast it # to a float (True becomes 1.0 and False becomes 0.0) self.sampled = random.random() < float(sample_rate) if self.sampled: logger.debug("[Profiling] Initializing profile") else: logger.debug( "[Profiling] Discarding profile because it's not included in the random sample (sample rate = {sample_rate})".format( sample_rate=float(sample_rate) ) ) def start(self): # type: () -> None if not self.sampled or self.active: return assert self.scheduler, "No scheduler specified" logger.debug("[Profiling] Starting profile") self.active = True if not self.start_ns: self.start_ns = nanosecond_time() self.scheduler.start_profiling(self) def stop(self): # type: () -> None if not self.sampled or not self.active: return assert self.scheduler, "No scheduler specified" logger.debug("[Profiling] Stopping profile") self.active = False self.scheduler.stop_profiling(self) self.stop_ns = nanosecond_time() def __enter__(self): # type: () -> Profile hub = self.hub or sentry_sdk.Hub.current _, scope = hub._stack[-1] old_profile = scope.profile scope.profile = self self._context_manager_state = (hub, scope, old_profile) self.start() return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None self.stop() _, scope, old_profile = self._context_manager_state del self._context_manager_state scope.profile = old_profile def write(self, ts, sample): # type: (int, ExtractedSample) -> None if not self.active: return if ts < self.start_ns: return offset = ts - self.start_ns if offset > MAX_PROFILE_DURATION_NS: self.stop() return self.unique_samples += 1 elapsed_since_start_ns = str(offset) for tid, (stack_id, frame_ids, frames) in sample: try: # Check if the stack is indexed first, this lets us skip # indexing frames if it's not necessary if stack_id not in self.indexed_stacks: for i, frame_id in enumerate(frame_ids): if frame_id not in self.indexed_frames: self.indexed_frames[frame_id] = len(self.indexed_frames) self.frames.append(frames[i]) self.indexed_stacks[stack_id] = len(self.indexed_stacks) self.stacks.append( [self.indexed_frames[frame_id] for frame_id in frame_ids] ) self.samples.append( { "elapsed_since_start_ns": elapsed_since_start_ns, "thread_id": tid, "stack_id": self.indexed_stacks[stack_id], } ) except AttributeError: # For some reason, the frame we get doesn't have certain attributes. # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) def process(self): # type: () -> ProcessedProfile # This collects the thread metadata at the end of a profile. Doing it # this way means that any threads that terminate before the profile ends # will not have any metadata associated with it. thread_metadata = { str(thread.ident): { "name": str(thread.name), } for thread in threading.enumerate() } # type: Dict[str, ProcessedThreadMetadata] return { "frames": self.frames, "stacks": self.stacks, "samples": self.samples, "thread_metadata": thread_metadata, } def to_json(self, event_opt, options): # type: (Any, Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] profile = self.process() set_in_app_in_frames( profile["frames"], options["in_app_exclude"], options["in_app_include"], options["project_root"], ) return { "environment": event_opt.get("environment"), "event_id": self.event_id, "platform": "python", "profile": profile, "release": event_opt.get("release", ""), "timestamp": event_opt["start_timestamp"], "version": "1", "device": { "architecture": platform.machine(), }, "os": { "name": platform.system(), "version": platform.release(), }, "runtime": { "name": platform.python_implementation(), "version": platform.python_version(), }, "transactions": [ { "id": event_opt["event_id"], "name": event_opt["transaction"], # we start the transaction before the profile and this is # the transaction start time relative to the profile, so we # hardcode it to 0 until we can start the profile before "relative_start_ns": "0", # use the duration of the profile instead of the transaction # because we end the transaction after the profile "relative_end_ns": str(self.stop_ns - self.start_ns), "trace_id": event_opt["contexts"]["trace"]["trace_id"], "active_thread_id": str( self._default_active_thread_id if self.active_thread_id is None else self.active_thread_id ), } ], } def valid(self): # type: () -> bool hub = self.hub or sentry_sdk.Hub.current client = hub.client if client is None: return False if not has_profiling_enabled(client.options): return False if self.sampled is None or not self.sampled: if client.transport: client.transport.record_lost_event( "sample_rate", data_category="profile" ) return False if self.unique_samples < PROFILE_MINIMUM_SAMPLES: if client.transport: client.transport.record_lost_event( "insufficient_data", data_category="profile" ) logger.debug("[Profiling] Discarding profile because insufficient samples.") return False return True class Scheduler(object): mode = "unknown" # type: ProfilerMode def __init__(self, frequency): # type: (int) -> None self.interval = 1.0 / frequency self.sampler = self.make_sampler() # cap the number of new profiles at any time so it does not grow infinitely self.new_profiles = deque(maxlen=128) # type: Deque[Profile] self.active_profiles = set() # type: Set[Profile] def __enter__(self): # type: () -> Scheduler self.setup() return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None self.teardown() def setup(self): # type: () -> None raise NotImplementedError def teardown(self): # type: () -> None raise NotImplementedError def ensure_running(self): # type: () -> None raise NotImplementedError def start_profiling(self, profile): # type: (Profile) -> None self.ensure_running() self.new_profiles.append(profile) def stop_profiling(self, profile): # type: (Profile) -> None pass def make_sampler(self): # type: () -> Callable[..., None] cwd = os.getcwd() cache = LRUCache(max_size=256) def _sample_stack(*args, **kwargs): # type: (*Any, **Any) -> None """ Take a sample of the stack on all the threads in the process. This should be called at a regular interval to collect samples. """ # no profiles taking place, so we can stop early if not self.new_profiles and not self.active_profiles: # make sure to clear the cache if we're not profiling so we dont # keep a reference to the last stack of frames around return # This is the number of profiles we want to pop off. # It's possible another thread adds a new profile to # the list and we spend longer than we want inside # the loop below. # # Also make sure to set this value before extracting # frames so we do not write to any new profiles that # were started after this point. new_profiles = len(self.new_profiles) now = nanosecond_time() try: sample = [ (str(tid), extract_stack(frame, cache, cwd)) for tid, frame in sys._current_frames().items() ] except AttributeError: # For some reason, the frame we get doesn't have certain attributes. # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) return # Move the new profiles into the active_profiles set. # # We cannot directly add the to active_profiles set # in `start_profiling` because it is called from other # threads which can cause a RuntimeError when it the # set sizes changes during iteration without a lock. # # We also want to avoid using a lock here so threads # that are starting profiles are not blocked until it # can acquire the lock. for _ in range(new_profiles): self.active_profiles.add(self.new_profiles.popleft()) inactive_profiles = [] for profile in self.active_profiles: if profile.active: profile.write(now, sample) else: # If a thread is marked inactive, we buffer it # to `inactive_profiles` so it can be removed. # We cannot remove it here as it would result # in a RuntimeError. inactive_profiles.append(profile) for profile in inactive_profiles: self.active_profiles.remove(profile) return _sample_stack class ThreadScheduler(Scheduler): """ This scheduler is based on running a daemon thread that will call the sampler at a regular interval. """ mode = "thread" # type: ProfilerMode name = "sentry.profiler.ThreadScheduler" def __init__(self, frequency): # type: (int) -> None super(ThreadScheduler, self).__init__(frequency=frequency) # used to signal to the thread that it should stop self.running = False self.thread = None # type: Optional[threading.Thread] self.pid = None # type: Optional[int] self.lock = threading.Lock() def setup(self): # type: () -> None pass def teardown(self): # type: () -> None if self.running: self.running = False if self.thread is not None: self.thread.join() def ensure_running(self): # type: () -> None """ Check that the profiler has an active thread to run in, and start one if that's not the case. Note that this might fail (e.g. in Python 3.12 it's not possible to spawn new threads at interpreter shutdown). In that case self.running will be False after running this function. """ pid = os.getpid() # is running on the right process if self.running and self.pid == pid: return with self.lock: # another thread may have tried to acquire the lock # at the same time so it may start another thread # make sure to check again before proceeding if self.running and self.pid == pid: return self.pid = pid self.running = True # make sure the thread is a daemon here otherwise this # can keep the application running after other threads # have exited self.thread = threading.Thread(name=self.name, target=self.run, daemon=True) try: self.thread.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self.running = False self.thread = None return def run(self): # type: () -> None last = time.perf_counter() while self.running: self.sampler() # some time may have elapsed since the last time # we sampled, so we need to account for that and # not sleep for too long elapsed = time.perf_counter() - last if elapsed < self.interval: thread_sleep(self.interval - elapsed) # after sleeping, make sure to take the current # timestamp so we can use it next iteration last = time.perf_counter() class GeventScheduler(Scheduler): """ This scheduler is based on the thread scheduler but adapted to work with gevent. When using gevent, it may monkey patch the threading modules (`threading` and `_thread`). This results in the use of greenlets instead of native threads. This is an issue because the sampler CANNOT run in a greenlet because 1. Other greenlets doing sync work will prevent the sampler from running 2. The greenlet runs in the same thread as other greenlets so when taking a sample, other greenlets will have been evicted from the thread. This results in a sample containing only the sampler's code. """ mode = "gevent" # type: ProfilerMode name = "sentry.profiler.GeventScheduler" def __init__(self, frequency): # type: (int) -> None if ThreadPool is None: raise ValueError("Profiler mode: {} is not available".format(self.mode)) super(GeventScheduler, self).__init__(frequency=frequency) # used to signal to the thread that it should stop self.running = False self.thread = None # type: Optional[ThreadPool] self.pid = None # type: Optional[int] # This intentionally uses the gevent patched threading.Lock. # The lock will be required when first trying to start profiles # as we need to spawn the profiler thread from the greenlets. self.lock = threading.Lock() def setup(self): # type: () -> None pass def teardown(self): # type: () -> None if self.running: self.running = False if self.thread is not None: self.thread.join() def ensure_running(self): # type: () -> None pid = os.getpid() # is running on the right process if self.running and self.pid == pid: return with self.lock: # another thread may have tried to acquire the lock # at the same time so it may start another thread # make sure to check again before proceeding if self.running and self.pid == pid: return self.pid = pid self.running = True self.thread = ThreadPool(1) try: self.thread.spawn(self.run) except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self.running = False self.thread = None return def run(self): # type: () -> None last = time.perf_counter() while self.running: self.sampler() # some time may have elapsed since the last time # we sampled, so we need to account for that and # not sleep for too long elapsed = time.perf_counter() - last if elapsed < self.interval: thread_sleep(self.interval - elapsed) # after sleeping, make sure to take the current # timestamp so we can use it next iteration last = time.perf_counter() sentry-python-1.39.2/sentry_sdk/py.typed000066400000000000000000000000001454744723200203370ustar00rootroot00000000000000sentry-python-1.39.2/sentry_sdk/scope.py000066400000000000000000000652541454744723200203510ustar00rootroot00000000000000from copy import copy from collections import deque from itertools import chain import os import uuid from sentry_sdk.attachments import Attachment from sentry_sdk._functools import wraps from sentry_sdk.tracing_utils import ( Baggage, extract_sentrytrace_data, has_tracing_enabled, normalize_incoming_data, ) from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, Transaction, ) from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import logger, capture_internal_exceptions from sentry_sdk.consts import FALSE_VALUES if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Iterator from typing import Optional from typing import Deque from typing import List from typing import Callable from typing import Tuple from typing import TypeVar from sentry_sdk._types import ( Breadcrumb, Event, EventProcessor, ErrorProcessor, ExcInfo, Hint, Type, ) from sentry_sdk.profiler import Profile from sentry_sdk.tracing import Span from sentry_sdk.session import Session F = TypeVar("F", bound=Callable[..., Any]) T = TypeVar("T") global_event_processors = [] # type: List[EventProcessor] def add_global_event_processor(processor): # type: (EventProcessor) -> None global_event_processors.append(processor) def _attr_setter(fn): # type: (Any) -> Any return property(fset=fn, doc=fn.__doc__) def _disable_capture(fn): # type: (F) -> F @wraps(fn) def wrapper(self, *args, **kwargs): # type: (Any, *Dict[str, Any], **Any) -> Any if not self._should_capture: return try: self._should_capture = False return fn(self, *args, **kwargs) finally: self._should_capture = True return wrapper # type: ignore class Scope(object): """The scope holds extra information that should be sent with all events that belong to it. """ # NOTE: Even though it should not happen, the scope needs to not crash when # accessed by multiple threads. It's fine if it's full of races, but those # races should never make the user application crash. # # The same needs to hold for any accesses of the scope the SDK makes. __slots__ = ( "_level", "_name", "_fingerprint", # note that for legacy reasons, _transaction is the transaction *name*, # not a Transaction object (the object is stored in _span) "_transaction", "_transaction_info", "_user", "_tags", "_contexts", "_extras", "_breadcrumbs", "_event_processors", "_error_processors", "_should_capture", "_span", "_session", "_attachments", "_force_auto_session_tracking", "_profile", "_propagation_context", ) def __init__(self): # type: () -> None self._event_processors = [] # type: List[EventProcessor] self._error_processors = [] # type: List[ErrorProcessor] self._name = None # type: Optional[str] self._propagation_context = None # type: Optional[Dict[str, Any]] self.clear() incoming_trace_information = self._load_trace_data_from_env() self.generate_propagation_context(incoming_data=incoming_trace_information) def _load_trace_data_from_env(self): # type: () -> Optional[Dict[str, str]] """ Load Sentry trace id and baggage from environment variables. Can be disabled by setting SENTRY_USE_ENVIRONMENT to "false". """ incoming_trace_information = None sentry_use_environment = ( os.environ.get("SENTRY_USE_ENVIRONMENT") or "" ).lower() use_environment = sentry_use_environment not in FALSE_VALUES if use_environment: incoming_trace_information = {} if os.environ.get("SENTRY_TRACE"): incoming_trace_information[SENTRY_TRACE_HEADER_NAME] = ( os.environ.get("SENTRY_TRACE") or "" ) if os.environ.get("SENTRY_BAGGAGE"): incoming_trace_information[BAGGAGE_HEADER_NAME] = ( os.environ.get("SENTRY_BAGGAGE") or "" ) return incoming_trace_information or None def _extract_propagation_context(self, data): # type: (Dict[str, Any]) -> Optional[Dict[str, Any]] context = {} # type: Dict[str, Any] normalized_data = normalize_incoming_data(data) baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME) if baggage_header: context["dynamic_sampling_context"] = Baggage.from_incoming_header( baggage_header ).dynamic_sampling_context() sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME) if sentry_trace_header: sentrytrace_data = extract_sentrytrace_data(sentry_trace_header) if sentrytrace_data is not None: context.update(sentrytrace_data) only_baggage_no_sentry_trace = ( "dynamic_sampling_context" in context and "trace_id" not in context ) if only_baggage_no_sentry_trace: context.update(self._create_new_propagation_context()) if context: if not context.get("span_id"): context["span_id"] = uuid.uuid4().hex[16:] return context return None def _create_new_propagation_context(self): # type: () -> Dict[str, Any] return { "trace_id": uuid.uuid4().hex, "span_id": uuid.uuid4().hex[16:], "parent_span_id": None, "dynamic_sampling_context": None, } def set_new_propagation_context(self): # type: () -> None """ Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one. """ self._propagation_context = self._create_new_propagation_context() logger.debug( "[Tracing] Create new propagation context: %s", self._propagation_context, ) def generate_propagation_context(self, incoming_data=None): # type: (Optional[Dict[str, str]]) -> None """ Makes sure `_propagation_context` is set. If there is `incoming_data` overwrite existing `_propagation_context`. if there is no `incoming_data` create new `_propagation_context`, but do NOT overwrite if already existing. """ if incoming_data: context = self._extract_propagation_context(incoming_data) if context is not None: self._propagation_context = context logger.debug( "[Tracing] Extracted propagation context from incoming data: %s", self._propagation_context, ) if self._propagation_context is None: self.set_new_propagation_context() def get_dynamic_sampling_context(self): # type: () -> Optional[Dict[str, str]] """ Returns the Dynamic Sampling Context from the Propagation Context. If not existing, creates a new one. """ if self._propagation_context is None: return None baggage = self.get_baggage() if baggage is not None: self._propagation_context[ "dynamic_sampling_context" ] = baggage.dynamic_sampling_context() return self._propagation_context["dynamic_sampling_context"] def get_traceparent(self): # type: () -> Optional[str] """ Returns the Sentry "sentry-trace" header (aka the traceparent) from the Propagation Context. """ if self._propagation_context is None: return None traceparent = "%s-%s" % ( self._propagation_context["trace_id"], self._propagation_context["span_id"], ) return traceparent def get_baggage(self): # type: () -> Optional[Baggage] if self._propagation_context is None: return None dynamic_sampling_context = self._propagation_context.get( "dynamic_sampling_context" ) if dynamic_sampling_context is None: return Baggage.from_options(self) else: return Baggage(dynamic_sampling_context) def get_trace_context(self): # type: () -> Any """ Returns the Sentry "trace" context from the Propagation Context. """ if self._propagation_context is None: return None trace_context = { "trace_id": self._propagation_context["trace_id"], "span_id": self._propagation_context["span_id"], "parent_span_id": self._propagation_context["parent_span_id"], "dynamic_sampling_context": self.get_dynamic_sampling_context(), } # type: Dict[str, Any] return trace_context def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] """ Creates a generator which returns the `sentry-trace` and `baggage` headers from the Propagation Context. """ if self._propagation_context is not None: traceparent = self.get_traceparent() if traceparent is not None: yield SENTRY_TRACE_HEADER_NAME, traceparent dsc = self.get_dynamic_sampling_context() if dsc is not None: baggage = Baggage(dsc).serialize() yield BAGGAGE_HEADER_NAME, baggage def clear(self): # type: () -> None """Clears the entire scope.""" self._level = None # type: Optional[str] self._fingerprint = None # type: Optional[List[str]] self._transaction = None # type: Optional[str] self._transaction_info = {} # type: Dict[str, str] self._user = None # type: Optional[Dict[str, Any]] self._tags = {} # type: Dict[str, Any] self._contexts = {} # type: Dict[str, Dict[str, Any]] self._extras = {} # type: Dict[str, Any] self._attachments = [] # type: List[Attachment] self.clear_breadcrumbs() self._should_capture = True self._span = None # type: Optional[Span] self._session = None # type: Optional[Session] self._force_auto_session_tracking = None # type: Optional[bool] self._profile = None # type: Optional[Profile] self._propagation_context = None @_attr_setter def level(self, value): # type: (Optional[str]) -> None """When set this overrides the level. Deprecated in favor of set_level.""" self._level = value def set_level(self, value): # type: (Optional[str]) -> None """Sets the level for the scope.""" self._level = value @_attr_setter def fingerprint(self, value): # type: (Optional[List[str]]) -> None """When set this overrides the default fingerprint.""" self._fingerprint = value @property def transaction(self): # type: () -> Any # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 """Return the transaction (root span) in the scope, if any.""" # there is no span/transaction on the scope if self._span is None: return None # there is an orphan span on the scope if self._span.containing_transaction is None: return None # there is either a transaction (which is its own containing # transaction) or a non-orphan span on the scope return self._span.containing_transaction @transaction.setter def transaction(self, value): # type: (Any) -> None # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004 """When set this forces a specific transaction name to be set. Deprecated: use set_transaction_name instead.""" # XXX: the docstring above is misleading. The implementation of # apply_to_event prefers an existing value of event.transaction over # anything set in the scope. # XXX: note that with the introduction of the Scope.transaction getter, # there is a semantic and type mismatch between getter and setter. The # getter returns a Transaction, the setter sets a transaction name. # Without breaking version compatibility, we could make the setter set a # transaction name or transaction (self._span) depending on the type of # the value argument. logger.warning( "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead." ) self._transaction = value if self._span and self._span.containing_transaction: self._span.containing_transaction.name = value def set_transaction_name(self, name, source=None): # type: (str, Optional[str]) -> None """Set the transaction name and optionally the transaction source.""" self._transaction = name if self._span and self._span.containing_transaction: self._span.containing_transaction.name = name if source: self._span.containing_transaction.source = source if source: self._transaction_info["source"] = source @_attr_setter def user(self, value): # type: (Optional[Dict[str, Any]]) -> None """When set a specific user is bound to the scope. Deprecated in favor of set_user.""" self.set_user(value) def set_user(self, value): # type: (Optional[Dict[str, Any]]) -> None """Sets a user for the scope.""" self._user = value if self._session is not None: self._session.update(user=value) @property def span(self): # type: () -> Optional[Span] """Get/set current tracing span or transaction.""" return self._span @span.setter def span(self, span): # type: (Optional[Span]) -> None self._span = span # XXX: this differs from the implementation in JS, there Scope.setSpan # does not set Scope._transactionName. if isinstance(span, Transaction): transaction = span if transaction.name: self._transaction = transaction.name if transaction.source: self._transaction_info["source"] = transaction.source @property def profile(self): # type: () -> Optional[Profile] return self._profile @profile.setter def profile(self, profile): # type: (Optional[Profile]) -> None self._profile = profile def set_tag( self, key, # type: str value, # type: Any ): # type: (...) -> None """Sets a tag for a key to a specific value.""" self._tags[key] = value def remove_tag( self, key # type: str ): # type: (...) -> None """Removes a specific tag.""" self._tags.pop(key, None) def set_context( self, key, # type: str value, # type: Dict[str, Any] ): # type: (...) -> None """Binds a context at a certain key to a specific value.""" self._contexts[key] = value def remove_context( self, key # type: str ): # type: (...) -> None """Removes a context.""" self._contexts.pop(key, None) def set_extra( self, key, # type: str value, # type: Any ): # type: (...) -> None """Sets an extra key to a specific value.""" self._extras[key] = value def remove_extra( self, key # type: str ): # type: (...) -> None """Removes a specific extra key.""" self._extras.pop(key, None) def clear_breadcrumbs(self): # type: () -> None """Clears breadcrumb buffer.""" self._breadcrumbs = deque() # type: Deque[Breadcrumb] def add_attachment( self, bytes=None, # type: Optional[bytes] filename=None, # type: Optional[str] path=None, # type: Optional[str] content_type=None, # type: Optional[str] add_to_transactions=False, # type: bool ): # type: (...) -> None """Adds an attachment to future events sent.""" self._attachments.append( Attachment( bytes=bytes, path=path, filename=filename, content_type=content_type, add_to_transactions=add_to_transactions, ) ) def add_event_processor( self, func # type: EventProcessor ): # type: (...) -> None """Register a scope local event processor on the scope. :param func: This function behaves like `before_send.` """ if len(self._event_processors) > 20: logger.warning( "Too many event processors on scope! Clearing list to free up some memory: %r", self._event_processors, ) del self._event_processors[:] self._event_processors.append(func) def add_error_processor( self, func, # type: ErrorProcessor cls=None, # type: Optional[Type[BaseException]] ): # type: (...) -> None """Register a scope local error processor on the scope. :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument. :param cls: Optionally, only process exceptions of this type. """ if cls is not None: cls_ = cls # For mypy. real_func = func def func(event, exc_info): # type: (Event, ExcInfo) -> Optional[Event] try: is_inst = isinstance(exc_info[1], cls_) except Exception: is_inst = False if is_inst: return real_func(event, exc_info) return event self._error_processors.append(func) def _apply_level_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if self._level is not None: event["level"] = self._level def _apply_breadcrumbs_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None event.setdefault("breadcrumbs", {}).setdefault("values", []).extend( self._breadcrumbs ) def _apply_user_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if event.get("user") is None and self._user is not None: event["user"] = self._user def _apply_transaction_name_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if event.get("transaction") is None and self._transaction is not None: event["transaction"] = self._transaction def _apply_transaction_info_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if event.get("transaction_info") is None and self._transaction_info is not None: event["transaction_info"] = self._transaction_info def _apply_fingerprint_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if event.get("fingerprint") is None and self._fingerprint is not None: event["fingerprint"] = self._fingerprint def _apply_extra_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if self._extras: event.setdefault("extra", {}).update(self._extras) def _apply_tags_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if self._tags: event.setdefault("tags", {}).update(self._tags) def _apply_contexts_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if self._contexts: event.setdefault("contexts", {}).update(self._contexts) contexts = event.setdefault("contexts", {}) # Add "trace" context if contexts.get("trace") is None: if has_tracing_enabled(options) and self._span is not None: contexts["trace"] = self._span.get_trace_context() else: contexts["trace"] = self.get_trace_context() # Add "reply_id" context try: replay_id = contexts["trace"]["dynamic_sampling_context"]["replay_id"] except (KeyError, TypeError): replay_id = None if replay_id is not None: contexts["replay"] = { "replay_id": replay_id, } @_disable_capture def apply_to_event( self, event, # type: Event hint, # type: Hint options=None, # type: Optional[Dict[str, Any]] ): # type: (...) -> Optional[Event] """Applies the information contained on the scope to the given event.""" ty = event.get("type") is_transaction = ty == "transaction" is_check_in = ty == "check_in" # put all attachments into the hint. This lets callbacks play around # with attachments. We also later pull this out of the hint when we # create the envelope. attachments_to_send = hint.get("attachments") or [] for attachment in self._attachments: if not is_transaction or attachment.add_to_transactions: attachments_to_send.append(attachment) hint["attachments"] = attachments_to_send self._apply_contexts_to_event(event, hint, options) if is_check_in: # Check-ins only support the trace context, strip all others event["contexts"] = { "trace": event.setdefault("contexts", {}).get("trace", {}) } if not is_check_in: self._apply_level_to_event(event, hint, options) self._apply_fingerprint_to_event(event, hint, options) self._apply_user_to_event(event, hint, options) self._apply_transaction_name_to_event(event, hint, options) self._apply_transaction_info_to_event(event, hint, options) self._apply_tags_to_event(event, hint, options) self._apply_extra_to_event(event, hint, options) if not is_transaction and not is_check_in: self._apply_breadcrumbs_to_event(event, hint, options) def _drop(cause, ty): # type: (Any, str) -> Optional[Any] logger.info("%s (%s) dropped event", ty, cause) return None # run error processors exc_info = hint.get("exc_info") if exc_info is not None: for error_processor in self._error_processors: new_event = error_processor(event, exc_info) if new_event is None: return _drop(error_processor, "error processor") event = new_event # run event processors if not is_check_in: for event_processor in chain( global_event_processors, self._event_processors ): new_event = event with capture_internal_exceptions(): new_event = event_processor(event, hint) if new_event is None: return _drop(event_processor, "event processor") event = new_event return event def update_from_scope(self, scope): # type: (Scope) -> None """Update the scope with another scope's data.""" if scope._level is not None: self._level = scope._level if scope._fingerprint is not None: self._fingerprint = scope._fingerprint if scope._transaction is not None: self._transaction = scope._transaction if scope._transaction_info is not None: self._transaction_info.update(scope._transaction_info) if scope._user is not None: self._user = scope._user if scope._tags: self._tags.update(scope._tags) if scope._contexts: self._contexts.update(scope._contexts) if scope._extras: self._extras.update(scope._extras) if scope._breadcrumbs: self._breadcrumbs.extend(scope._breadcrumbs) if scope._span: self._span = scope._span if scope._attachments: self._attachments.extend(scope._attachments) if scope._profile: self._profile = scope._profile if scope._propagation_context: self._propagation_context = scope._propagation_context def update_from_kwargs( self, user=None, # type: Optional[Any] level=None, # type: Optional[str] extras=None, # type: Optional[Dict[str, Any]] contexts=None, # type: Optional[Dict[str, Any]] tags=None, # type: Optional[Dict[str, str]] fingerprint=None, # type: Optional[List[str]] ): # type: (...) -> None """Update the scope's attributes.""" if level is not None: self._level = level if user is not None: self._user = user if extras is not None: self._extras.update(extras) if contexts is not None: self._contexts.update(contexts) if tags is not None: self._tags.update(tags) if fingerprint is not None: self._fingerprint = fingerprint def __copy__(self): # type: () -> Scope rv = object.__new__(self.__class__) # type: Scope rv._level = self._level rv._name = self._name rv._fingerprint = self._fingerprint rv._transaction = self._transaction rv._transaction_info = dict(self._transaction_info) rv._user = self._user rv._tags = dict(self._tags) rv._contexts = dict(self._contexts) rv._extras = dict(self._extras) rv._breadcrumbs = copy(self._breadcrumbs) rv._event_processors = list(self._event_processors) rv._error_processors = list(self._error_processors) rv._propagation_context = self._propagation_context rv._should_capture = self._should_capture rv._span = self._span rv._session = self._session rv._force_auto_session_tracking = self._force_auto_session_tracking rv._attachments = list(self._attachments) rv._profile = self._profile return rv def __repr__(self): # type: () -> str return "<%s id=%s name=%s>" % ( self.__class__.__name__, hex(id(self)), self._name, ) sentry-python-1.39.2/sentry_sdk/scrubber.py000066400000000000000000000074651454744723200210470ustar00rootroot00000000000000from sentry_sdk.utils import ( capture_internal_exceptions, AnnotatedValue, iter_event_frames, ) from sentry_sdk._compat import string_types from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from sentry_sdk._types import Event from typing import Any from typing import Dict from typing import List from typing import Optional DEFAULT_DENYLIST = [ # stolen from relay "password", "passwd", "secret", "api_key", "apikey", "auth", "credentials", "mysql_pwd", "privatekey", "private_key", "token", "ip_address", "session", # django "csrftoken", "sessionid", # wsgi "remote_addr", "x_csrftoken", "x_forwarded_for", "set_cookie", "cookie", "authorization", "x_api_key", "x_forwarded_for", "x_real_ip", # other common names used in the wild "aiohttp_session", # aiohttp "connect.sid", # Express "csrf_token", # Pyramid "csrf", # (this is a cookie name used in accepted answers on stack overflow) "_csrf", # Express "_csrf_token", # Bottle "PHPSESSID", # PHP "_session", # Sanic "symfony", # Symfony "user_session", # Vue "_xsrf", # Tornado "XSRF-TOKEN", # Angular, Laravel ] class EventScrubber(object): def __init__(self, denylist=None): # type: (Optional[List[str]]) -> None self.denylist = DEFAULT_DENYLIST if denylist is None else denylist self.denylist = [x.lower() for x in self.denylist] def scrub_dict(self, d): # type: (Dict[str, Any]) -> None if not isinstance(d, dict): return for k in d.keys(): if isinstance(k, string_types) and k.lower() in self.denylist: d[k] = AnnotatedValue.substituted_because_contains_sensitive_data() def scrub_request(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "request" in event: if "headers" in event["request"]: self.scrub_dict(event["request"]["headers"]) if "cookies" in event["request"]: self.scrub_dict(event["request"]["cookies"]) if "data" in event["request"]: self.scrub_dict(event["request"]["data"]) def scrub_extra(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "extra" in event: self.scrub_dict(event["extra"]) def scrub_user(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "user" in event: self.scrub_dict(event["user"]) def scrub_breadcrumbs(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "breadcrumbs" in event: if "values" in event["breadcrumbs"]: for value in event["breadcrumbs"]["values"]: if "data" in value: self.scrub_dict(value["data"]) def scrub_frames(self, event): # type: (Event) -> None with capture_internal_exceptions(): for frame in iter_event_frames(event): if "vars" in frame: self.scrub_dict(frame["vars"]) def scrub_spans(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "spans" in event: for span in event["spans"]: if "data" in span: self.scrub_dict(span["data"]) def scrub_event(self, event): # type: (Event) -> None self.scrub_request(event) self.scrub_extra(event) self.scrub_user(event) self.scrub_breadcrumbs(event) self.scrub_frames(event) self.scrub_spans(event) sentry-python-1.39.2/sentry_sdk/serializer.py000066400000000000000000000317451454744723200214070ustar00rootroot00000000000000import sys import math from datetime import datetime from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exception, disable_capture_event, format_timestamp, safe_repr, strip_string, ) from sentry_sdk._compat import ( text_type, PY2, string_types, number_types, iteritems, binary_sequence_types, ) from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from types import TracebackType from typing import Any from typing import Callable from typing import ContextManager from typing import Dict from typing import List from typing import Optional from typing import Type from typing import Union from sentry_sdk._types import NotImplementedType, Event Span = Dict[str, Any] ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]] Segment = Union[str, int] if PY2: # Importing ABCs from collections is deprecated, and will stop working in 3.8 # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49 from collections import Mapping, Sequence, Set serializable_str_types = string_types + binary_sequence_types else: # New in 3.3 # https://docs.python.org/3/library/collections.abc.html from collections.abc import Mapping, Sequence, Set # Bytes are technically not strings in Python 3, but we can serialize them serializable_str_types = string_types + binary_sequence_types # Maximum length of JSON-serialized event payloads that can be safely sent # before the server may reject the event due to its size. This is not intended # to reflect actual values defined server-side, but rather only be an upper # bound for events sent by the SDK. # # Can be overwritten if wanting to send more bytes, e.g. with a custom server. # When changing this, keep in mind that events may be a little bit larger than # this value due to attached metadata, so keep the number conservative. MAX_EVENT_BYTES = 10**6 # Maximum depth and breadth of databags. Excess data will be trimmed. If # max_request_body_size is "always", request bodies won't be trimmed. MAX_DATABAG_DEPTH = 5 MAX_DATABAG_BREADTH = 10 CYCLE_MARKER = "" global_repr_processors = [] # type: List[ReprProcessor] def add_global_repr_processor(processor): # type: (ReprProcessor) -> None global_repr_processors.append(processor) class Memo(object): __slots__ = ("_ids", "_objs") def __init__(self): # type: () -> None self._ids = {} # type: Dict[int, Any] self._objs = [] # type: List[Any] def memoize(self, obj): # type: (Any) -> ContextManager[bool] self._objs.append(obj) return self def __enter__(self): # type: () -> bool obj = self._objs[-1] if id(obj) in self._ids: return True else: self._ids[id(obj)] = obj return False def __exit__( self, ty, # type: Optional[Type[BaseException]] value, # type: Optional[BaseException] tb, # type: Optional[TracebackType] ): # type: (...) -> None self._ids.pop(id(self._objs.pop()), None) def serialize(event, **kwargs): # type: (Event, **Any) -> Event memo = Memo() path = [] # type: List[Segment] meta_stack = [] # type: List[Dict[str, Any]] keep_request_bodies = ( kwargs.pop("max_request_body_size", None) == "always" ) # type: bool max_value_length = kwargs.pop("max_value_length", None) # type: Optional[int] def _annotate(**meta): # type: (**Any) -> None while len(meta_stack) <= len(path): try: segment = path[len(meta_stack) - 1] node = meta_stack[-1].setdefault(text_type(segment), {}) except IndexError: node = {} meta_stack.append(node) meta_stack[-1].setdefault("", {}).update(meta) def _should_repr_strings(): # type: () -> Optional[bool] """ By default non-serializable objects are going through safe_repr(). For certain places in the event (local vars) we want to repr() even things that are JSON-serializable to make their type more apparent. For example, it's useful to see the difference between a unicode-string and a bytestring when viewing a stacktrace. For container-types we still don't do anything different. Generally we just try to make the Sentry UI present exactly what a pretty-printed repr would look like. :returns: `True` if we are somewhere in frame variables, and `False` if we are in a position where we will never encounter frame variables when recursing (for example, we're in `event.extra`). `None` if we are not (yet) in frame variables, but might encounter them when recursing (e.g. we're in `event.exception`) """ try: p0 = path[0] if p0 == "stacktrace" and path[1] == "frames" and path[3] == "vars": return True if ( p0 in ("threads", "exception") and path[1] == "values" and path[3] == "stacktrace" and path[4] == "frames" and path[6] == "vars" ): return True except IndexError: return None return False def _is_databag(): # type: () -> Optional[bool] """ A databag is any value that we need to trim. :returns: Works like `_should_repr_strings()`. `True` for "yes", `False` for :"no", `None` for "maybe soon". """ try: rv = _should_repr_strings() if rv in (True, None): return rv is_request_body = _is_request_body() if is_request_body in (True, None): return is_request_body p0 = path[0] if p0 == "breadcrumbs" and path[1] == "values": path[2] return True if p0 == "extra": return True except IndexError: return None return False def _is_request_body(): # type: () -> Optional[bool] try: if path[0] == "request" and path[1] == "data": return True except IndexError: return None return False def _serialize_node( obj, # type: Any is_databag=None, # type: Optional[bool] is_request_body=None, # type: Optional[bool] should_repr_strings=None, # type: Optional[bool] segment=None, # type: Optional[Segment] remaining_breadth=None, # type: Optional[Union[int, float]] remaining_depth=None, # type: Optional[Union[int, float]] ): # type: (...) -> Any if segment is not None: path.append(segment) try: with memo.memoize(obj) as result: if result: return CYCLE_MARKER return _serialize_node_impl( obj, is_databag=is_databag, is_request_body=is_request_body, should_repr_strings=should_repr_strings, remaining_depth=remaining_depth, remaining_breadth=remaining_breadth, ) except BaseException: capture_internal_exception(sys.exc_info()) if is_databag: return "" return None finally: if segment is not None: path.pop() del meta_stack[len(path) + 1 :] def _flatten_annotated(obj): # type: (Any) -> Any if isinstance(obj, AnnotatedValue): _annotate(**obj.metadata) obj = obj.value return obj def _serialize_node_impl( obj, is_databag, is_request_body, should_repr_strings, remaining_depth, remaining_breadth, ): # type: (Any, Optional[bool], Optional[bool], Optional[bool], Optional[Union[float, int]], Optional[Union[float, int]]) -> Any if isinstance(obj, AnnotatedValue): should_repr_strings = False if should_repr_strings is None: should_repr_strings = _should_repr_strings() if is_databag is None: is_databag = _is_databag() if is_request_body is None: is_request_body = _is_request_body() if is_databag: if is_request_body and keep_request_bodies: remaining_depth = float("inf") remaining_breadth = float("inf") else: if remaining_depth is None: remaining_depth = MAX_DATABAG_DEPTH if remaining_breadth is None: remaining_breadth = MAX_DATABAG_BREADTH obj = _flatten_annotated(obj) if remaining_depth is not None and remaining_depth <= 0: _annotate(rem=[["!limit", "x"]]) if is_databag: return _flatten_annotated( strip_string(safe_repr(obj), max_length=max_value_length) ) return None if is_databag and global_repr_processors: hints = {"memo": memo, "remaining_depth": remaining_depth} for processor in global_repr_processors: result = processor(obj, hints) if result is not NotImplemented: return _flatten_annotated(result) sentry_repr = getattr(type(obj), "__sentry_repr__", None) if obj is None or isinstance(obj, (bool, number_types)): if should_repr_strings or ( isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj)) ): return safe_repr(obj) else: return obj elif callable(sentry_repr): return sentry_repr(obj) elif isinstance(obj, datetime): return ( text_type(format_timestamp(obj)) if not should_repr_strings else safe_repr(obj) ) elif isinstance(obj, Mapping): # Create temporary copy here to avoid calling too much code that # might mutate our dictionary while we're still iterating over it. obj = dict(iteritems(obj)) rv_dict = {} # type: Dict[str, Any] i = 0 for k, v in iteritems(obj): if remaining_breadth is not None and i >= remaining_breadth: _annotate(len=len(obj)) break str_k = text_type(k) v = _serialize_node( v, segment=str_k, should_repr_strings=should_repr_strings, is_databag=is_databag, is_request_body=is_request_body, remaining_depth=remaining_depth - 1 if remaining_depth is not None else None, remaining_breadth=remaining_breadth, ) rv_dict[str_k] = v i += 1 return rv_dict elif not isinstance(obj, serializable_str_types) and isinstance( obj, (Set, Sequence) ): rv_list = [] for i, v in enumerate(obj): if remaining_breadth is not None and i >= remaining_breadth: _annotate(len=len(obj)) break rv_list.append( _serialize_node( v, segment=i, should_repr_strings=should_repr_strings, is_databag=is_databag, is_request_body=is_request_body, remaining_depth=remaining_depth - 1 if remaining_depth is not None else None, remaining_breadth=remaining_breadth, ) ) return rv_list if should_repr_strings: obj = safe_repr(obj) else: if isinstance(obj, bytes) or isinstance(obj, bytearray): obj = obj.decode("utf-8", "replace") if not isinstance(obj, string_types): obj = safe_repr(obj) is_span_description = ( len(path) == 3 and path[0] == "spans" and path[-1] == "description" ) if is_span_description: return obj return _flatten_annotated(strip_string(obj, max_length=max_value_length)) # # Start of serialize() function # disable_capture_event.set(True) try: serialized_event = _serialize_node(event, **kwargs) if meta_stack and isinstance(serialized_event, dict): serialized_event["_meta"] = meta_stack[0] return serialized_event finally: disable_capture_event.set(False) sentry-python-1.39.2/sentry_sdk/session.py000066400000000000000000000127541454744723200207200ustar00rootroot00000000000000import uuid from sentry_sdk._compat import datetime_utcnow from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import format_timestamp if TYPE_CHECKING: from datetime import datetime from typing import Optional from typing import Union from typing import Any from typing import Dict from sentry_sdk._types import SessionStatus def _minute_trunc(ts): # type: (datetime) -> datetime return ts.replace(second=0, microsecond=0) def _make_uuid( val, # type: Union[str, uuid.UUID] ): # type: (...) -> uuid.UUID if isinstance(val, uuid.UUID): return val return uuid.UUID(val) class Session(object): def __init__( self, sid=None, # type: Optional[Union[str, uuid.UUID]] did=None, # type: Optional[str] timestamp=None, # type: Optional[datetime] started=None, # type: Optional[datetime] duration=None, # type: Optional[float] status=None, # type: Optional[SessionStatus] release=None, # type: Optional[str] environment=None, # type: Optional[str] user_agent=None, # type: Optional[str] ip_address=None, # type: Optional[str] errors=None, # type: Optional[int] user=None, # type: Optional[Any] session_mode="application", # type: str ): # type: (...) -> None if sid is None: sid = uuid.uuid4() if started is None: started = datetime_utcnow() if status is None: status = "ok" self.status = status self.did = None # type: Optional[str] self.started = started self.release = None # type: Optional[str] self.environment = None # type: Optional[str] self.duration = None # type: Optional[float] self.user_agent = None # type: Optional[str] self.ip_address = None # type: Optional[str] self.session_mode = session_mode # type: str self.errors = 0 self.update( sid=sid, did=did, timestamp=timestamp, duration=duration, release=release, environment=environment, user_agent=user_agent, ip_address=ip_address, errors=errors, user=user, ) @property def truncated_started(self): # type: (...) -> datetime return _minute_trunc(self.started) def update( self, sid=None, # type: Optional[Union[str, uuid.UUID]] did=None, # type: Optional[str] timestamp=None, # type: Optional[datetime] started=None, # type: Optional[datetime] duration=None, # type: Optional[float] status=None, # type: Optional[SessionStatus] release=None, # type: Optional[str] environment=None, # type: Optional[str] user_agent=None, # type: Optional[str] ip_address=None, # type: Optional[str] errors=None, # type: Optional[int] user=None, # type: Optional[Any] ): # type: (...) -> None # If a user is supplied we pull some data form it if user: if ip_address is None: ip_address = user.get("ip_address") if did is None: did = user.get("id") or user.get("email") or user.get("username") if sid is not None: self.sid = _make_uuid(sid) if did is not None: self.did = str(did) if timestamp is None: timestamp = datetime_utcnow() self.timestamp = timestamp if started is not None: self.started = started if duration is not None: self.duration = duration if release is not None: self.release = release if environment is not None: self.environment = environment if ip_address is not None: self.ip_address = ip_address if user_agent is not None: self.user_agent = user_agent if errors is not None: self.errors = errors if status is not None: self.status = status def close( self, status=None # type: Optional[SessionStatus] ): # type: (...) -> Any if status is None and self.status == "ok": status = "exited" if status is not None: self.update(status=status) def get_json_attrs( self, with_user_info=True # type: Optional[bool] ): # type: (...) -> Any attrs = {} if self.release is not None: attrs["release"] = self.release if self.environment is not None: attrs["environment"] = self.environment if with_user_info: if self.ip_address is not None: attrs["ip_address"] = self.ip_address if self.user_agent is not None: attrs["user_agent"] = self.user_agent return attrs def to_json(self): # type: (...) -> Any rv = { "sid": str(self.sid), "init": True, "started": format_timestamp(self.started), "timestamp": format_timestamp(self.timestamp), "status": self.status, } # type: Dict[str, Any] if self.errors: rv["errors"] = self.errors if self.did is not None: rv["did"] = self.did if self.duration is not None: rv["duration"] = self.duration attrs = self.get_json_attrs() if attrs: rv["attrs"] = attrs return rv sentry-python-1.39.2/sentry_sdk/sessions.py000066400000000000000000000145301454744723200210750ustar00rootroot00000000000000import os import time from threading import Thread, Lock from contextlib import contextmanager import sentry_sdk from sentry_sdk.envelope import Envelope from sentry_sdk.session import Session from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.utils import format_timestamp if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Generator from typing import List from typing import Optional from typing import Union def is_auto_session_tracking_enabled(hub=None): # type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None] """Utility function to find out if session tracking is enabled.""" if hub is None: hub = sentry_sdk.Hub.current should_track = hub.scope._force_auto_session_tracking if should_track is None: client_options = hub.client.options if hub.client else {} should_track = client_options.get("auto_session_tracking", False) return should_track @contextmanager def auto_session_tracking(hub=None, session_mode="application"): # type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None] """Starts and stops a session automatically around a block.""" if hub is None: hub = sentry_sdk.Hub.current should_track = is_auto_session_tracking_enabled(hub) if should_track: hub.start_session(session_mode=session_mode) try: yield finally: if should_track: hub.end_session() TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed") MAX_ENVELOPE_ITEMS = 100 def make_aggregate_envelope(aggregate_states, attrs): # type: (Any, Any) -> Any return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())} class SessionFlusher(object): def __init__( self, capture_func, # type: Callable[[Envelope], None] flush_interval=60, # type: int ): # type: (...) -> None self.capture_func = capture_func self.flush_interval = flush_interval self.pending_sessions = [] # type: List[Any] self.pending_aggregates = {} # type: Dict[Any, Any] self._thread = None # type: Optional[Thread] self._thread_lock = Lock() self._aggregate_lock = Lock() self._thread_for_pid = None # type: Optional[int] self._running = True def flush(self): # type: (...) -> None pending_sessions = self.pending_sessions self.pending_sessions = [] with self._aggregate_lock: pending_aggregates = self.pending_aggregates self.pending_aggregates = {} envelope = Envelope() for session in pending_sessions: if len(envelope.items) == MAX_ENVELOPE_ITEMS: self.capture_func(envelope) envelope = Envelope() envelope.add_session(session) for attrs, states in pending_aggregates.items(): if len(envelope.items) == MAX_ENVELOPE_ITEMS: self.capture_func(envelope) envelope = Envelope() envelope.add_sessions(make_aggregate_envelope(states, attrs)) if len(envelope.items) > 0: self.capture_func(envelope) def _ensure_running(self): # type: (...) -> None """ Check that we have an active thread to run in, or create one if not. Note that this might fail (e.g. in Python 3.12 it's not possible to spawn new threads at interpreter shutdown). In that case self._running will be False after running this function. """ if self._thread_for_pid == os.getpid() and self._thread is not None: return None with self._thread_lock: if self._thread_for_pid == os.getpid() and self._thread is not None: return None def _thread(): # type: (...) -> None while self._running: time.sleep(self.flush_interval) if self._running: self.flush() thread = Thread(target=_thread) thread.daemon = True try: thread.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self._running = False return None self._thread = thread self._thread_for_pid = os.getpid() return None def add_aggregate_session( self, session # type: Session ): # type: (...) -> None # NOTE on `session.did`: # the protocol can deal with buckets that have a distinct-id, however # in practice we expect the python SDK to have an extremely high cardinality # here, effectively making aggregation useless, therefore we do not # aggregate per-did. # For this part we can get away with using the global interpreter lock with self._aggregate_lock: attrs = session.get_json_attrs(with_user_info=False) primary_key = tuple(sorted(attrs.items())) secondary_key = session.truncated_started # (, session.did) states = self.pending_aggregates.setdefault(primary_key, {}) state = states.setdefault(secondary_key, {}) if "started" not in state: state["started"] = format_timestamp(session.truncated_started) # if session.did is not None: # state["did"] = session.did if session.status == "crashed": state["crashed"] = state.get("crashed", 0) + 1 elif session.status == "abnormal": state["abnormal"] = state.get("abnormal", 0) + 1 elif session.errors > 0: state["errored"] = state.get("errored", 0) + 1 else: state["exited"] = state.get("exited", 0) + 1 def add_session( self, session # type: Session ): # type: (...) -> None if session.session_mode == "request": self.add_aggregate_session(session) else: self.pending_sessions.append(session.to_json()) self._ensure_running() def kill(self): # type: (...) -> None self._running = False def __del__(self): # type: (...) -> None self.kill() sentry-python-1.39.2/sentry_sdk/spotlight.py000066400000000000000000000026431454744723200212460ustar00rootroot00000000000000import io import urllib3 from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from sentry_sdk.utils import logger from sentry_sdk.envelope import Envelope class SpotlightClient(object): def __init__(self, url): # type: (str) -> None self.url = url self.http = urllib3.PoolManager() self.tries = 0 def capture_envelope(self, envelope): # type: (Envelope) -> None if self.tries > 3: logger.warning( "Too many errors sending to Spotlight, stop sending events there." ) return body = io.BytesIO() envelope.serialize_into(body) try: req = self.http.request( url=self.url, body=body.getvalue(), method="POST", headers={ "Content-Type": "application/x-sentry-envelope", }, ) req.close() except Exception as e: self.tries += 1 logger.warning(str(e)) def setup_spotlight(options): # type: (Dict[str, Any]) -> Optional[SpotlightClient] url = options.get("spotlight") if isinstance(url, str): pass elif url is True: url = "http://localhost:8969/stream" else: return None return SpotlightClient(url) sentry-python-1.39.2/sentry_sdk/tracing.py000066400000000000000000001050311454744723200206530ustar00rootroot00000000000000import uuid import random from datetime import datetime, timedelta import sentry_sdk from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.utils import is_valid_sample_rate, logger, nanosecond_time from sentry_sdk._compat import datetime_utcnow, utc_from_timestamp, PY2 from sentry_sdk.consts import SPANDATA from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: import typing from typing import Any from typing import Dict from typing import Iterator from typing import List from typing import Optional from typing import Tuple from typing import Union import sentry_sdk.profiler from sentry_sdk._types import Event, MeasurementUnit, SamplingContext BAGGAGE_HEADER_NAME = "baggage" SENTRY_TRACE_HEADER_NAME = "sentry-trace" # Transaction source # see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations TRANSACTION_SOURCE_CUSTOM = "custom" TRANSACTION_SOURCE_URL = "url" TRANSACTION_SOURCE_ROUTE = "route" TRANSACTION_SOURCE_VIEW = "view" TRANSACTION_SOURCE_COMPONENT = "component" TRANSACTION_SOURCE_TASK = "task" # These are typically high cardinality and the server hates them LOW_QUALITY_TRANSACTION_SOURCES = [ TRANSACTION_SOURCE_URL, ] SOURCE_FOR_STYLE = { "endpoint": TRANSACTION_SOURCE_COMPONENT, "function_name": TRANSACTION_SOURCE_COMPONENT, "handler_name": TRANSACTION_SOURCE_COMPONENT, "method_and_path_pattern": TRANSACTION_SOURCE_ROUTE, "path": TRANSACTION_SOURCE_URL, "route_name": TRANSACTION_SOURCE_COMPONENT, "route_pattern": TRANSACTION_SOURCE_ROUTE, "uri_template": TRANSACTION_SOURCE_ROUTE, "url": TRANSACTION_SOURCE_ROUTE, } class _SpanRecorder(object): """Limits the number of spans recorded in a transaction.""" __slots__ = ("maxlen", "spans") def __init__(self, maxlen): # type: (int) -> None # FIXME: this is `maxlen - 1` only to preserve historical behavior # enforced by tests. # Either this should be changed to `maxlen` or the JS SDK implementation # should be changed to match a consistent interpretation of what maxlen # limits: either transaction+spans or only child spans. self.maxlen = maxlen - 1 self.spans = [] # type: List[Span] def add(self, span): # type: (Span) -> None if len(self.spans) > self.maxlen: span._span_recorder = None else: self.spans.append(span) class Span(object): """A span holds timing information of a block of code. Spans can have multiple child spans thus forming a span tree.""" __slots__ = ( "trace_id", "span_id", "parent_span_id", "same_process_as_parent", "sampled", "op", "description", "start_timestamp", "_start_timestamp_monotonic_ns", "status", "timestamp", "_tags", "_data", "_span_recorder", "hub", "_context_manager_state", "_containing_transaction", "_local_aggregator", ) def __new__(cls, **kwargs): # type: (**Any) -> Any """ Backwards-compatible implementation of Span and Transaction creation. """ # TODO: consider removing this in a future release. # This is for backwards compatibility with releases before Transaction # existed, to allow for a smoother transition. if "transaction" in kwargs: return object.__new__(Transaction) return object.__new__(cls) def __init__( self, trace_id=None, # type: Optional[str] span_id=None, # type: Optional[str] parent_span_id=None, # type: Optional[str] same_process_as_parent=True, # type: bool sampled=None, # type: Optional[bool] op=None, # type: Optional[str] description=None, # type: Optional[str] hub=None, # type: Optional[sentry_sdk.Hub] status=None, # type: Optional[str] transaction=None, # type: Optional[str] # deprecated containing_transaction=None, # type: Optional[Transaction] start_timestamp=None, # type: Optional[Union[datetime, float]] ): # type: (...) -> None self.trace_id = trace_id or uuid.uuid4().hex self.span_id = span_id or uuid.uuid4().hex[16:] self.parent_span_id = parent_span_id self.same_process_as_parent = same_process_as_parent self.sampled = sampled self.op = op self.description = description self.status = status self.hub = hub self._tags = {} # type: Dict[str, str] self._data = {} # type: Dict[str, Any] self._containing_transaction = containing_transaction if start_timestamp is None: start_timestamp = datetime_utcnow() elif isinstance(start_timestamp, float): start_timestamp = utc_from_timestamp(start_timestamp) self.start_timestamp = start_timestamp try: # profiling depends on this value and requires that # it is measured in nanoseconds self._start_timestamp_monotonic_ns = nanosecond_time() except AttributeError: pass #: End timestamp of span self.timestamp = None # type: Optional[datetime] self._span_recorder = None # type: Optional[_SpanRecorder] self._local_aggregator = None # type: Optional[LocalAggregator] # TODO this should really live on the Transaction class rather than the Span # class def init_span_recorder(self, maxlen): # type: (int) -> None if self._span_recorder is None: self._span_recorder = _SpanRecorder(maxlen) def _get_local_aggregator(self): # type: (...) -> LocalAggregator rv = self._local_aggregator if rv is None: rv = self._local_aggregator = LocalAggregator() return rv def __repr__(self): # type: () -> str return ( "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % ( self.__class__.__name__, self.op, self.description, self.trace_id, self.span_id, self.parent_span_id, self.sampled, ) ) def __enter__(self): # type: () -> Span hub = self.hub or sentry_sdk.Hub.current _, scope = hub._stack[-1] old_span = scope.span scope.span = self self._context_manager_state = (hub, scope, old_span) return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None if value is not None: self.set_status("internal_error") hub, scope, old_span = self._context_manager_state del self._context_manager_state self.finish(hub) scope.span = old_span @property def containing_transaction(self): # type: () -> Optional[Transaction] """The ``Transaction`` that this span belongs to. The ``Transaction`` is the root of the span tree, so one could also think of this ``Transaction`` as the "root span".""" # this is a getter rather than a regular attribute so that transactions # can return `self` here instead (as a way to prevent them circularly # referencing themselves) return self._containing_transaction def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): # type: (str, **Any) -> Span """ Start a sub-span from the current span or transaction. Takes the same arguments as the initializer of :py:class:`Span`. The trace id, sampling decision, transaction pointer, and span recorder are inherited from the current span/transaction. """ hub = self.hub or sentry_sdk.Hub.current client = hub.client configuration_instrumenter = client and client.options["instrumenter"] if instrumenter != configuration_instrumenter: return NoOpSpan() kwargs.setdefault("sampled", self.sampled) child = Span( trace_id=self.trace_id, parent_span_id=self.span_id, containing_transaction=self.containing_transaction, **kwargs ) span_recorder = ( self.containing_transaction and self.containing_transaction._span_recorder ) if span_recorder: span_recorder.add(child) return child def new_span(self, **kwargs): # type: (**Any) -> Span """DEPRECATED: use :py:meth:`sentry_sdk.tracing.Span.start_child` instead.""" logger.warning( "Deprecated: use Span.start_child instead of Span.new_span. This will be removed in the future." ) return self.start_child(**kwargs) @classmethod def continue_from_environ( cls, environ, # type: typing.Mapping[str, str] **kwargs # type: Any ): # type: (...) -> Transaction """ Create a Transaction with the given params, then add in data pulled from the ``sentry-trace`` and ``baggage`` headers from the environ (if any) before returning the Transaction. This is different from :py:meth:`~sentry_sdk.tracing.Span.continue_from_headers` in that it assumes header names in the form ``HTTP_HEADER_NAME`` - such as you would get from a WSGI/ASGI environ - rather than the form ``header-name``. :param environ: The ASGI/WSGI environ to pull information from. """ if cls is Span: logger.warning( "Deprecated: use Transaction.continue_from_environ " "instead of Span.continue_from_environ." ) return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs) @classmethod def continue_from_headers( cls, headers, # type: typing.Mapping[str, str] **kwargs # type: Any ): # type: (...) -> Transaction """ Create a transaction with the given params (including any data pulled from the ``sentry-trace`` and ``baggage`` headers). :param headers: The dictionary with the HTTP headers to pull information from. """ # TODO move this to the Transaction class if cls is Span: logger.warning( "Deprecated: use Transaction.continue_from_headers " "instead of Span.continue_from_headers." ) # TODO-neel move away from this kwargs stuff, it's confusing and opaque # make more explicit baggage = Baggage.from_incoming_header(headers.get(BAGGAGE_HEADER_NAME)) kwargs.update({BAGGAGE_HEADER_NAME: baggage}) sentrytrace_kwargs = extract_sentrytrace_data( headers.get(SENTRY_TRACE_HEADER_NAME) ) if sentrytrace_kwargs is not None: kwargs.update(sentrytrace_kwargs) # If there's an incoming sentry-trace but no incoming baggage header, # for instance in traces coming from older SDKs, # baggage will be empty and immutable and won't be populated as head SDK. baggage.freeze() transaction = Transaction(**kwargs) transaction.same_process_as_parent = False return transaction def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] """ Creates a generator which returns the span's ``sentry-trace`` and ``baggage`` headers. If the span's containing transaction doesn't yet have a ``baggage`` value, this will cause one to be generated and stored. """ yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() if self.containing_transaction: baggage = self.containing_transaction.get_baggage().serialize() if baggage: yield BAGGAGE_HEADER_NAME, baggage @classmethod def from_traceparent( cls, traceparent, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> Optional[Transaction] """ DEPRECATED: Use :py:meth:`sentry_sdk.tracing.Span.continue_from_headers`. Create a ``Transaction`` with the given params, then add in data pulled from the given ``sentry-trace`` header value before returning the ``Transaction``. """ logger.warning( "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) " "instead of from_traceparent(traceparent, **kwargs)" ) if not traceparent: return None return cls.continue_from_headers( {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs ) def to_traceparent(self): # type: () -> str if self.sampled is True: sampled = "1" elif self.sampled is False: sampled = "0" else: sampled = None traceparent = "%s-%s" % (self.trace_id, self.span_id) if sampled is not None: traceparent += "-%s" % (sampled,) return traceparent def to_baggage(self): # type: () -> Optional[Baggage] """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` associated with this ``Span``, if any. (Taken from the root of the span tree.) """ if self.containing_transaction: return self.containing_transaction.get_baggage() return None def set_tag(self, key, value): # type: (str, Any) -> None self._tags[key] = value def set_data(self, key, value): # type: (str, Any) -> None self._data[key] = value def set_status(self, value): # type: (str) -> None self.status = value def set_http_status(self, http_status): # type: (int) -> None self.set_tag( "http.status_code", str(http_status) ) # we keep this for backwards compatability self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status) if http_status < 400: self.set_status("ok") elif 400 <= http_status < 500: if http_status == 403: self.set_status("permission_denied") elif http_status == 404: self.set_status("not_found") elif http_status == 429: self.set_status("resource_exhausted") elif http_status == 413: self.set_status("failed_precondition") elif http_status == 401: self.set_status("unauthenticated") elif http_status == 409: self.set_status("already_exists") else: self.set_status("invalid_argument") elif 500 <= http_status < 600: if http_status == 504: self.set_status("deadline_exceeded") elif http_status == 501: self.set_status("unimplemented") elif http_status == 503: self.set_status("unavailable") else: self.set_status("internal_error") else: self.set_status("unknown_error") def is_success(self): # type: () -> bool return self.status == "ok" def finish(self, hub=None, end_timestamp=None): # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str] # Note: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads # to incompatible return types for Span.finish and Transaction.finish. """Sets the end timestamp of the span. Additionally it also creates a breadcrumb from the span, if the span represents a database or HTTP request. :param hub: The hub to use for this transaction. If not provided, the current hub will be used. :param end_timestamp: Optional timestamp that should be used as timestamp instead of the current time. :return: Always ``None``. The type is ``Optional[str]`` to match the return value of :py:meth:`sentry_sdk.tracing.Transaction.finish`. """ if self.timestamp is not None: # This span is already finished, ignore. return None hub = hub or self.hub or sentry_sdk.Hub.current try: if end_timestamp: if isinstance(end_timestamp, float): end_timestamp = utc_from_timestamp(end_timestamp) self.timestamp = end_timestamp else: elapsed = nanosecond_time() - self._start_timestamp_monotonic_ns self.timestamp = self.start_timestamp + timedelta( microseconds=elapsed / 1000 ) except AttributeError: self.timestamp = datetime_utcnow() maybe_create_breadcrumbs_from_span(hub, self) return None def to_json(self): # type: () -> Dict[str, Any] """Returns a JSON-compatible representation of the span.""" rv = { "trace_id": self.trace_id, "span_id": self.span_id, "parent_span_id": self.parent_span_id, "same_process_as_parent": self.same_process_as_parent, "op": self.op, "description": self.description, "start_timestamp": self.start_timestamp, "timestamp": self.timestamp, } # type: Dict[str, Any] if self.status: self._tags["status"] = self.status if self._local_aggregator is not None: metrics_summary = self._local_aggregator.to_json() if metrics_summary: rv["_metrics_summary"] = metrics_summary tags = self._tags if tags: rv["tags"] = tags data = self._data if data: rv["data"] = data return rv def get_trace_context(self): # type: () -> Any rv = { "trace_id": self.trace_id, "span_id": self.span_id, "parent_span_id": self.parent_span_id, "op": self.op, "description": self.description, } # type: Dict[str, Any] if self.status: rv["status"] = self.status if self.containing_transaction: rv[ "dynamic_sampling_context" ] = self.containing_transaction.get_baggage().dynamic_sampling_context() return rv class Transaction(Span): """The Transaction is the root element that holds all the spans for Sentry performance instrumentation.""" __slots__ = ( "name", "source", "parent_sampled", # used to create baggage value for head SDKs in dynamic sampling "sample_rate", "_measurements", "_contexts", "_profile", "_baggage", ) def __init__( self, name="", # type: str parent_sampled=None, # type: Optional[bool] baggage=None, # type: Optional[Baggage] source=TRANSACTION_SOURCE_CUSTOM, # type: str **kwargs # type: Any ): # type: (...) -> None """Constructs a new Transaction. :param name: Identifier of the transaction. Will show up in the Sentry UI. :param parent_sampled: Whether the parent transaction was sampled. If True this transaction will be kept, if False it will be discarded. :param baggage: The W3C baggage header value. (see https://www.w3.org/TR/baggage/) :param source: A string describing the source of the transaction name. This will be used to determine the transaction's type. See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations for more information. Default "custom". """ # TODO: consider removing this in a future release. # This is for backwards compatibility with releases before Transaction # existed, to allow for a smoother transition. if not name and "transaction" in kwargs: logger.warning( "Deprecated: use Transaction(name=...) to create transactions " "instead of Span(transaction=...)." ) name = kwargs.pop("transaction") super(Transaction, self).__init__(**kwargs) self.name = name self.source = source self.sample_rate = None # type: Optional[float] self.parent_sampled = parent_sampled self._measurements = {} # type: Dict[str, Any] self._contexts = {} # type: Dict[str, Any] self._profile = None # type: Optional[sentry_sdk.profiler.Profile] self._baggage = baggage def __repr__(self): # type: () -> str return ( "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r)>" % ( self.__class__.__name__, self.name, self.op, self.trace_id, self.span_id, self.parent_span_id, self.sampled, self.source, ) ) def __enter__(self): # type: () -> Transaction super(Transaction, self).__enter__() if self._profile is not None: self._profile.__enter__() return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None if self._profile is not None: self._profile.__exit__(ty, value, tb) super(Transaction, self).__exit__(ty, value, tb) @property def containing_transaction(self): # type: () -> Transaction """The root element of the span tree. In the case of a transaction it is the transaction itself. """ # Transactions (as spans) belong to themselves (as transactions). This # is a getter rather than a regular attribute to avoid having a circular # reference. return self def finish(self, hub=None, end_timestamp=None): # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str] """Finishes the transaction and sends it to Sentry. All finished spans in the transaction will also be sent to Sentry. :param hub: The hub to use for this transaction. If not provided, the current hub will be used. :param end_timestamp: Optional timestamp that should be used as timestamp instead of the current time. :return: The event ID if the transaction was sent to Sentry, otherwise None. """ if self.timestamp is not None: # This transaction is already finished, ignore. return None hub = hub or self.hub or sentry_sdk.Hub.current client = hub.client if client is None: # We have no client and therefore nowhere to send this transaction. return None # This is a de facto proxy for checking if sampled = False if self._span_recorder is None: logger.debug("Discarding transaction because sampled = False") # This is not entirely accurate because discards here are not # exclusively based on sample rate but also traces sampler, but # we handle this the same here. if client.transport and has_tracing_enabled(client.options): if client.monitor and client.monitor.downsample_factor > 0: reason = "backpressure" else: reason = "sample_rate" client.transport.record_lost_event(reason, data_category="transaction") return None if not self.name: logger.warning( "Transaction has no name, falling back to ``." ) self.name = "" super(Transaction, self).finish(hub, end_timestamp) if not self.sampled: # At this point a `sampled = None` should have already been resolved # to a concrete decision. if self.sampled is None: logger.warning("Discarding transaction without sampling decision.") return None finished_spans = [ span.to_json() for span in self._span_recorder.spans if span.timestamp is not None ] # we do this to break the circular reference of transaction -> span # recorder -> span -> containing transaction (which is where we started) # before either the spans or the transaction goes out of scope and has # to be garbage collected self._span_recorder = None contexts = {} contexts.update(self._contexts) contexts.update({"trace": self.get_trace_context()}) event = { "type": "transaction", "transaction": self.name, "transaction_info": {"source": self.source}, "contexts": contexts, "tags": self._tags, "timestamp": self.timestamp, "start_timestamp": self.start_timestamp, "spans": finished_spans, } # type: Event if self._profile is not None and self._profile.valid(): event["profile"] = self._profile self._profile = None event["measurements"] = self._measurements # This is here since `to_json` is not invoked. This really should # be gone when we switch to onlyspans. if self._local_aggregator is not None: metrics_summary = self._local_aggregator.to_json() if metrics_summary: event["_metrics_summary"] = metrics_summary return hub.capture_event(event) def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None self._measurements[name] = {"value": value, "unit": unit} def set_context(self, key, value): # type: (str, Any) -> None """Sets a context. Transactions can have multiple contexts and they should follow the format described in the "Contexts Interface" documentation. :param key: The name of the context. :param value: The information about the context. """ self._contexts[key] = value def set_http_status(self, http_status): # type: (int) -> None """Sets the status of the Transaction according to the given HTTP status. :param http_status: The HTTP status code.""" super(Transaction, self).set_http_status(http_status) self.set_context("response", {"status_code": http_status}) def to_json(self): # type: () -> Dict[str, Any] """Returns a JSON-compatible representation of the transaction.""" rv = super(Transaction, self).to_json() rv["name"] = self.name rv["source"] = self.source rv["sampled"] = self.sampled return rv def get_baggage(self): # type: () -> Baggage """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` associated with the Transaction. The first time a new baggage with Sentry items is made, it will be frozen.""" if not self._baggage or self._baggage.mutable: self._baggage = Baggage.populate_from_transaction(self) return self._baggage def _set_initial_sampling_decision(self, sampling_context): # type: (SamplingContext) -> None """ Sets the transaction's sampling decision, according to the following precedence rules: 1. If a sampling decision is passed to `start_transaction` (`start_transaction(name: "my transaction", sampled: True)`), that decision will be used, regardless of anything else 2. If `traces_sampler` is defined, its decision will be used. It can choose to keep or ignore any parent sampling decision, or use the sampling context data to make its own decision or to choose a sample rate for the transaction. 3. If `traces_sampler` is not defined, but there's a parent sampling decision, the parent sampling decision will be used. 4. If `traces_sampler` is not defined and there's no parent sampling decision, `traces_sample_rate` will be used. """ hub = self.hub or sentry_sdk.Hub.current client = hub.client options = (client and client.options) or {} transaction_description = "{op}transaction <{name}>".format( op=("<" + self.op + "> " if self.op else ""), name=self.name ) # nothing to do if there's no client or if tracing is disabled if not client or not has_tracing_enabled(options): self.sampled = False return # if the user has forced a sampling decision by passing a `sampled` # value when starting the transaction, go with that if self.sampled is not None: self.sample_rate = float(self.sampled) return # we would have bailed already if neither `traces_sampler` nor # `traces_sample_rate` were defined, so one of these should work; prefer # the hook if so sample_rate = ( options["traces_sampler"](sampling_context) if callable(options.get("traces_sampler")) else ( # default inheritance behavior sampling_context["parent_sampled"] if sampling_context["parent_sampled"] is not None else options["traces_sample_rate"] ) ) # Since this is coming from the user (or from a function provided by the # user), who knows what we might get. (The only valid values are # booleans or numbers between 0 and 1.) if not is_valid_sample_rate(sample_rate, source="Tracing"): logger.warning( "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format( transaction_description=transaction_description, ) ) self.sampled = False return self.sample_rate = float(sample_rate) if client.monitor: self.sample_rate /= 2**client.monitor.downsample_factor # if the function returned 0 (or false), or if `traces_sample_rate` is # 0, it's a sign the transaction should be dropped if not self.sample_rate: logger.debug( "[Tracing] Discarding {transaction_description} because {reason}".format( transaction_description=transaction_description, reason=( "traces_sampler returned 0 or False" if callable(options.get("traces_sampler")) else "traces_sample_rate is set to 0" ), ) ) self.sampled = False return # Now we roll the dice. random.random is inclusive of 0, but not of 1, # so strict < is safe here. In case sample_rate is a boolean, cast it # to a float (True becomes 1.0 and False becomes 0.0) self.sampled = random.random() < self.sample_rate if self.sampled: logger.debug( "[Tracing] Starting {transaction_description}".format( transaction_description=transaction_description, ) ) else: logger.debug( "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format( transaction_description=transaction_description, sample_rate=self.sample_rate, ) ) class NoOpSpan(Span): def __repr__(self): # type: () -> str return self.__class__.__name__ @property def containing_transaction(self): # type: () -> Optional[Transaction] return None def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): # type: (str, **Any) -> NoOpSpan return NoOpSpan() def new_span(self, **kwargs): # type: (**Any) -> NoOpSpan return self.start_child(**kwargs) def to_traceparent(self): # type: () -> str return "" def to_baggage(self): # type: () -> Optional[Baggage] return None def get_baggage(self): # type: () -> Optional[Baggage] return None def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] return iter(()) def set_tag(self, key, value): # type: (str, Any) -> None pass def set_data(self, key, value): # type: (str, Any) -> None pass def set_status(self, value): # type: (str) -> None pass def set_http_status(self, http_status): # type: (int) -> None pass def is_success(self): # type: () -> bool return True def to_json(self): # type: () -> Dict[str, Any] return {} def get_trace_context(self): # type: () -> Any return {} def finish(self, hub=None, end_timestamp=None): # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str] pass def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None pass def set_context(self, key, value): # type: (str, Any) -> None pass def init_span_recorder(self, maxlen): # type: (int) -> None pass def _set_initial_sampling_decision(self, sampling_context): # type: (SamplingContext) -> None pass def trace(func=None): # type: (Any) -> Any """ Decorator to start a child span under the existing current transaction. If there is no current transaction, then nothing will be traced. .. code-block:: :caption: Usage import sentry_sdk @sentry_sdk.trace def my_function(): ... @sentry_sdk.trace async def my_async_function(): ... """ if PY2: from sentry_sdk.tracing_utils_py2 import start_child_span_decorator else: from sentry_sdk.tracing_utils_py3 import start_child_span_decorator # This patterns allows usage of both @sentry_traced and @sentry_traced(...) # See https://stackoverflow.com/questions/52126071/decorator-with-arguments-avoid-parenthesis-when-no-arguments/52126278 if func: return start_child_span_decorator(func) else: return start_child_span_decorator # Circular imports from sentry_sdk.tracing_utils import ( Baggage, EnvironHeaders, extract_sentrytrace_data, has_tracing_enabled, maybe_create_breadcrumbs_from_span, ) from sentry_sdk.metrics import LocalAggregator sentry-python-1.39.2/sentry_sdk/tracing_utils.py000066400000000000000000000364561454744723200221110ustar00rootroot00000000000000import contextlib import os import re import sys import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.utils import ( capture_internal_exceptions, Dsn, match_regex_list, to_string, is_sentry_url, _is_external_source, _module_in_list, ) from sentry_sdk._compat import PY2, iteritems from sentry_sdk._types import TYPE_CHECKING if PY2: from collections import Mapping from urllib import quote, unquote else: from collections.abc import Mapping from urllib.parse import quote, unquote if TYPE_CHECKING: import typing from typing import Any from typing import Dict from typing import Generator from typing import Optional from typing import Union from types import FrameType SENTRY_TRACE_REGEX = re.compile( "^[ \t]*" # whitespace "([0-9a-f]{32})?" # trace_id "-?([0-9a-f]{16})?" # span_id "-?([01])?" # sampled "[ \t]*$" # whitespace ) # This is a normal base64 regex, modified to reflect that fact that we strip the # trailing = or == off base64_stripped = ( # any of the characters in the base64 "alphabet", in multiples of 4 "([a-zA-Z0-9+/]{4})*" # either nothing or 2 or 3 base64-alphabet characters (see # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding for # why there's never only 1 extra character) "([a-zA-Z0-9+/]{2,3})?" ) class EnvironHeaders(Mapping): # type: ignore def __init__( self, environ, # type: typing.Mapping[str, str] prefix="HTTP_", # type: str ): # type: (...) -> None self.environ = environ self.prefix = prefix def __getitem__(self, key): # type: (str) -> Optional[Any] return self.environ[self.prefix + key.replace("-", "_").upper()] def __len__(self): # type: () -> int return sum(1 for _ in iter(self)) def __iter__(self): # type: () -> Generator[str, None, None] for k in self.environ: if not isinstance(k, str): continue k = k.replace("-", "_").upper() if not k.startswith(self.prefix): continue yield k[len(self.prefix) :] def has_tracing_enabled(options): # type: (Optional[Dict[str, Any]]) -> bool """ Returns True if either traces_sample_rate or traces_sampler is defined and enable_tracing is set and not false. """ if options is None: return False return bool( options.get("enable_tracing") is not False and ( options.get("traces_sample_rate") is not None or options.get("traces_sampler") is not None ) ) @contextlib.contextmanager def record_sql_queries( hub, # type: sentry_sdk.Hub cursor, # type: Any query, # type: Any params_list, # type: Any paramstyle, # type: Optional[str] executemany, # type: bool record_cursor_repr=False, # type: bool ): # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] # TODO: Bring back capturing of params by default if hub.client and hub.client.options["_experiments"].get( "record_sql_params", False ): if not params_list or params_list == [None]: params_list = None if paramstyle == "pyformat": paramstyle = "format" else: params_list = None paramstyle = None query = _format_sql(cursor, query) data = {} if params_list is not None: data["db.params"] = params_list if paramstyle is not None: data["db.paramstyle"] = paramstyle if executemany: data["db.executemany"] = True if record_cursor_repr and cursor is not None: data["db.cursor"] = cursor with capture_internal_exceptions(): hub.add_breadcrumb(message=query, category="query", data=data) with hub.start_span(op=OP.DB, description=query) as span: for k, v in data.items(): span.set_data(k, v) yield span def maybe_create_breadcrumbs_from_span(hub, span): # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None if span.op == OP.DB_REDIS: hub.add_breadcrumb( message=span.description, type="redis", category="redis", data=span._tags ) elif span.op == OP.HTTP_CLIENT: hub.add_breadcrumb(type="http", category="httplib", data=span._data) elif span.op == "subprocess": hub.add_breadcrumb( type="subprocess", category="subprocess", message=span.description, data=span._data, ) def add_query_source(hub, span): # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None """ Adds OTel compatible source code information to the span """ client = hub.client if client is None: return if span.timestamp is None or span.start_timestamp is None: return should_add_query_source = client.options.get("enable_db_query_source", False) if not should_add_query_source: return duration = span.timestamp - span.start_timestamp threshold = client.options.get("db_query_source_threshold_ms", 0) slow_query = duration.microseconds > threshold * 1000 if not slow_query: return project_root = client.options["project_root"] in_app_include = client.options.get("in_app_include") in_app_exclude = client.options.get("in_app_exclude") # Find the correct frame frame = sys._getframe() # type: Union[FrameType, None] while frame is not None: try: abs_path = frame.f_code.co_filename if abs_path and PY2: abs_path = os.path.abspath(abs_path) except Exception: abs_path = "" try: namespace = frame.f_globals.get("__name__") # type: Optional[str] except Exception: namespace = None is_sentry_sdk_frame = namespace is not None and namespace.startswith( "sentry_sdk." ) should_be_included = not _is_external_source(abs_path) if namespace is not None: if in_app_exclude and _module_in_list(namespace, in_app_exclude): should_be_included = False if in_app_include and _module_in_list(namespace, in_app_include): # in_app_include takes precedence over in_app_exclude, so doing it # at the end should_be_included = True if ( abs_path.startswith(project_root) and should_be_included and not is_sentry_sdk_frame ): break frame = frame.f_back else: frame = None # Set the data if frame is not None: try: lineno = frame.f_lineno except Exception: lineno = None if lineno is not None: span.set_data(SPANDATA.CODE_LINENO, frame.f_lineno) try: namespace = frame.f_globals.get("__name__") except Exception: namespace = None if namespace is not None: span.set_data(SPANDATA.CODE_NAMESPACE, namespace) try: filepath = frame.f_code.co_filename except Exception: filepath = None if filepath is not None: if project_root is not None and filepath.startswith(project_root): in_app_path = filepath.replace(project_root, "").lstrip(os.sep) else: in_app_path = filepath span.set_data(SPANDATA.CODE_FILEPATH, in_app_path) try: code_function = frame.f_code.co_name except Exception: code_function = None if code_function is not None: span.set_data(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) def extract_sentrytrace_data(header): # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]] """ Given a `sentry-trace` header string, return a dictionary of data. """ if not header: return None if header.startswith("00-") and header.endswith("-00"): header = header[3:-3] match = SENTRY_TRACE_REGEX.match(header) if not match: return None trace_id, parent_span_id, sampled_str = match.groups() parent_sampled = None if trace_id: trace_id = "{:032x}".format(int(trace_id, 16)) if parent_span_id: parent_span_id = "{:016x}".format(int(parent_span_id, 16)) if sampled_str: parent_sampled = sampled_str != "0" return { "trace_id": trace_id, "parent_span_id": parent_span_id, "parent_sampled": parent_sampled, } def _format_sql(cursor, sql): # type: (Any, str) -> Optional[str] real_sql = None # If we're using psycopg2, it could be that we're # looking at a query that uses Composed objects. Use psycopg2's mogrify # function to format the query. We lose per-parameter trimming but gain # accuracy in formatting. try: if hasattr(cursor, "mogrify"): real_sql = cursor.mogrify(sql) if isinstance(real_sql, bytes): real_sql = real_sql.decode(cursor.connection.encoding) except Exception: real_sql = None return real_sql or to_string(sql) class Baggage(object): """ The W3C Baggage header information (see https://www.w3.org/TR/baggage/). """ __slots__ = ("sentry_items", "third_party_items", "mutable") SENTRY_PREFIX = "sentry-" SENTRY_PREFIX_REGEX = re.compile("^sentry-") def __init__( self, sentry_items, # type: Dict[str, str] third_party_items="", # type: str mutable=True, # type: bool ): self.sentry_items = sentry_items self.third_party_items = third_party_items self.mutable = mutable @classmethod def from_incoming_header(cls, header): # type: (Optional[str]) -> Baggage """ freeze if incoming header already has sentry baggage """ sentry_items = {} third_party_items = "" mutable = True if header: for item in header.split(","): if "=" not in item: continue with capture_internal_exceptions(): item = item.strip() key, val = item.split("=") if Baggage.SENTRY_PREFIX_REGEX.match(key): baggage_key = unquote(key.split("-")[1]) sentry_items[baggage_key] = unquote(val) mutable = False else: third_party_items += ("," if third_party_items else "") + item return Baggage(sentry_items, third_party_items, mutable) @classmethod def from_options(cls, scope): # type: (sentry_sdk.scope.Scope) -> Optional[Baggage] sentry_items = {} # type: Dict[str, str] third_party_items = "" mutable = False client = sentry_sdk.Hub.current.client if client is None or scope._propagation_context is None: return Baggage(sentry_items) options = client.options propagation_context = scope._propagation_context if propagation_context is not None and "trace_id" in propagation_context: sentry_items["trace_id"] = propagation_context["trace_id"] if options.get("environment"): sentry_items["environment"] = options["environment"] if options.get("release"): sentry_items["release"] = options["release"] if options.get("dsn"): sentry_items["public_key"] = Dsn(options["dsn"]).public_key if options.get("traces_sample_rate"): sentry_items["sample_rate"] = options["traces_sample_rate"] user = (scope and scope._user) or {} if user.get("segment"): sentry_items["user_segment"] = user["segment"] return Baggage(sentry_items, third_party_items, mutable) @classmethod def populate_from_transaction(cls, transaction): # type: (sentry_sdk.tracing.Transaction) -> Baggage """ Populate fresh baggage entry with sentry_items and make it immutable if this is the head SDK which originates traces. """ hub = transaction.hub or sentry_sdk.Hub.current client = hub.client sentry_items = {} # type: Dict[str, str] if not client: return Baggage(sentry_items) options = client.options or {} user = (hub.scope and hub.scope._user) or {} sentry_items["trace_id"] = transaction.trace_id if options.get("environment"): sentry_items["environment"] = options["environment"] if options.get("release"): sentry_items["release"] = options["release"] if options.get("dsn"): sentry_items["public_key"] = Dsn(options["dsn"]).public_key if ( transaction.name and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES ): sentry_items["transaction"] = transaction.name if user.get("segment"): sentry_items["user_segment"] = user["segment"] if transaction.sample_rate is not None: sentry_items["sample_rate"] = str(transaction.sample_rate) if transaction.sampled is not None: sentry_items["sampled"] = "true" if transaction.sampled else "false" # there's an existing baggage but it was mutable, # which is why we are creating this new baggage. # However, if by chance the user put some sentry items in there, give them precedence. if transaction._baggage and transaction._baggage.sentry_items: sentry_items.update(transaction._baggage.sentry_items) return Baggage(sentry_items, mutable=False) def freeze(self): # type: () -> None self.mutable = False def dynamic_sampling_context(self): # type: () -> Dict[str, str] header = {} for key, item in iteritems(self.sentry_items): header[key] = item return header def serialize(self, include_third_party=False): # type: (bool) -> str items = [] for key, val in iteritems(self.sentry_items): with capture_internal_exceptions(): item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(str(val)) items.append(item) if include_third_party: items.append(self.third_party_items) return ",".join(items) def should_propagate_trace(hub, url): # type: (sentry_sdk.Hub, str) -> bool """ Returns True if url matches trace_propagation_targets configured in the given hub. Otherwise, returns False. """ client = hub.client # type: Any trace_propagation_targets = client.options["trace_propagation_targets"] if is_sentry_url(hub, url): return False return match_regex_list(url, trace_propagation_targets, substring_matching=True) def normalize_incoming_data(incoming_data): # type: (Dict[str, Any]) -> Dict[str, Any] """ Normalizes incoming data so the keys are all lowercase with dashes instead of underscores and stripped from known prefixes. """ data = {} for key, value in incoming_data.items(): if key.startswith("HTTP_"): key = key[5:] key = key.replace("_", "-").lower() data[key] = value return data # Circular imports from sentry_sdk.tracing import LOW_QUALITY_TRANSACTION_SOURCES sentry-python-1.39.2/sentry_sdk/tracing_utils_py2.py000066400000000000000000000023341454744723200226670ustar00rootroot00000000000000from functools import wraps import sentry_sdk from sentry_sdk import get_current_span from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.utils import logger, qualname_from_function if TYPE_CHECKING: from typing import Any def start_child_span_decorator(func): # type: (Any) -> Any """ Decorator to add child spans for functions. This is the Python 2 compatible version of the decorator. Duplicated code from ``sentry_sdk.tracing_utils_python3.start_child_span_decorator``. See also ``sentry_sdk.tracing.trace()``. """ @wraps(func) def func_with_tracing(*args, **kwargs): # type: (*Any, **Any) -> Any span = get_current_span(sentry_sdk.Hub.current) if span is None: logger.warning( "Can not create a child span for %s. " "Please start a Sentry transaction before calling this function.", qualname_from_function(func), ) return func(*args, **kwargs) with span.start_child( op=OP.FUNCTION, description=qualname_from_function(func), ): return func(*args, **kwargs) return func_with_tracing sentry-python-1.39.2/sentry_sdk/tracing_utils_py3.py000066400000000000000000000041421454744723200226670ustar00rootroot00000000000000import inspect from functools import wraps import sentry_sdk from sentry_sdk import get_current_span from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.utils import logger, qualname_from_function if TYPE_CHECKING: from typing import Any def start_child_span_decorator(func): # type: (Any) -> Any """ Decorator to add child spans for functions. This is the Python 3 compatible version of the decorator. For Python 2 there is duplicated code here: ``sentry_sdk.tracing_utils_python2.start_child_span_decorator()``. See also ``sentry_sdk.tracing.trace()``. """ # Asynchronous case if inspect.iscoroutinefunction(func): @wraps(func) async def func_with_tracing(*args, **kwargs): # type: (*Any, **Any) -> Any span = get_current_span(sentry_sdk.Hub.current) if span is None: logger.warning( "Can not create a child span for %s. " "Please start a Sentry transaction before calling this function.", qualname_from_function(func), ) return await func(*args, **kwargs) with span.start_child( op=OP.FUNCTION, description=qualname_from_function(func), ): return await func(*args, **kwargs) # Synchronous case else: @wraps(func) def func_with_tracing(*args, **kwargs): # type: (*Any, **Any) -> Any span = get_current_span(sentry_sdk.Hub.current) if span is None: logger.warning( "Can not create a child span for %s. " "Please start a Sentry transaction before calling this function.", qualname_from_function(func), ) return func(*args, **kwargs) with span.start_child( op=OP.FUNCTION, description=qualname_from_function(func), ): return func(*args, **kwargs) return func_with_tracing sentry-python-1.39.2/sentry_sdk/transport.py000066400000000000000000000457751454744723200213020ustar00rootroot00000000000000from __future__ import print_function import io import urllib3 import certifi import gzip import time from datetime import timedelta from collections import defaultdict from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dumps from sentry_sdk.worker import BackgroundWorker from sentry_sdk.envelope import Envelope, Item, PayloadRef from sentry_sdk._compat import datetime_utcnow from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from datetime import datetime from typing import Any from typing import Callable from typing import Dict from typing import Iterable from typing import Optional from typing import Tuple from typing import Type from typing import Union from typing import DefaultDict from urllib3.poolmanager import PoolManager from urllib3.poolmanager import ProxyManager from sentry_sdk._types import Event, EndpointType DataCategory = Optional[str] try: from urllib.request import getproxies except ImportError: from urllib import getproxies # type: ignore class Transport(object): """Baseclass for all transports. A transport is used to send an event to sentry. """ parsed_dsn = None # type: Optional[Dsn] def __init__( self, options=None # type: Optional[Dict[str, Any]] ): # type: (...) -> None self.options = options if options and options["dsn"] is not None and options["dsn"]: self.parsed_dsn = Dsn(options["dsn"]) else: self.parsed_dsn = None def capture_event( self, event # type: Event ): # type: (...) -> None """ This gets invoked with the event dictionary when an event should be sent to sentry. """ raise NotImplementedError() def capture_envelope( self, envelope # type: Envelope ): # type: (...) -> None """ Send an envelope to Sentry. Envelopes are a data container format that can hold any type of data submitted to Sentry. We use it for transactions and sessions, but regular "error" events should go through `capture_event` for backwards compat. """ raise NotImplementedError() def flush( self, timeout, # type: float callback=None, # type: Optional[Any] ): # type: (...) -> None """Wait `timeout` seconds for the current events to be sent out.""" pass def kill(self): # type: () -> None """Forcefully kills the transport.""" pass def record_lost_event( self, reason, # type: str data_category=None, # type: Optional[str] item=None, # type: Optional[Item] ): # type: (...) -> None """This increments a counter for event loss by reason and data category. """ return None def is_healthy(self): # type: () -> bool return True def __del__(self): # type: () -> None try: self.kill() except Exception: pass def _parse_rate_limits(header, now=None): # type: (Any, Optional[datetime]) -> Iterable[Tuple[DataCategory, datetime]] if now is None: now = datetime_utcnow() for limit in header.split(","): try: retry_after, categories, _ = limit.strip().split(":", 2) retry_after = now + timedelta(seconds=int(retry_after)) for category in categories and categories.split(";") or (None,): yield category, retry_after except (LookupError, ValueError): continue class HttpTransport(Transport): """The default HTTP transport.""" def __init__( self, options # type: Dict[str, Any] ): # type: (...) -> None from sentry_sdk.consts import VERSION Transport.__init__(self, options) assert self.parsed_dsn is not None self.options = options # type: Dict[str, Any] self._worker = BackgroundWorker(queue_size=options["transport_queue_size"]) self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self._disabled_until = {} # type: Dict[DataCategory, datetime] self._retry = urllib3.util.Retry() self._discarded_events = defaultdict( int ) # type: DefaultDict[Tuple[str, str], int] self._last_client_report_sent = time.time() compresslevel = options.get("_experiments", {}).get( "transport_zlib_compression_level" ) self._compresslevel = 9 if compresslevel is None else int(compresslevel) num_pools = options.get("_experiments", {}).get("transport_num_pools") self._num_pools = 2 if num_pools is None else int(num_pools) self._pool = self._make_pool( self.parsed_dsn, http_proxy=options["http_proxy"], https_proxy=options["https_proxy"], ca_certs=options["ca_certs"], proxy_headers=options["proxy_headers"], ) from sentry_sdk import Hub self.hub_cls = Hub def record_lost_event( self, reason, # type: str data_category=None, # type: Optional[str] item=None, # type: Optional[Item] ): # type: (...) -> None if not self.options["send_client_reports"]: return quantity = 1 if item is not None: data_category = item.data_category if data_category == "attachment": # quantity of 0 is actually 1 as we do not want to count # empty attachments as actually empty. quantity = len(item.get_bytes()) or 1 elif data_category is None: raise TypeError("data category not provided") self._discarded_events[data_category, reason] += quantity def _update_rate_limits(self, response): # type: (urllib3.BaseHTTPResponse) -> None # new sentries with more rate limit insights. We honor this header # no matter of the status code to update our internal rate limits. header = response.headers.get("x-sentry-rate-limits") if header: logger.warning("Rate-limited via x-sentry-rate-limits") self._disabled_until.update(_parse_rate_limits(header)) # old sentries only communicate global rate limit hits via the # retry-after header on 429. This header can also be emitted on new # sentries if a proxy in front wants to globally slow things down. elif response.status == 429: logger.warning("Rate-limited via 429") self._disabled_until[None] = datetime_utcnow() + timedelta( seconds=self._retry.get_retry_after(response) or 60 ) def _send_request( self, body, # type: bytes headers, # type: Dict[str, str] endpoint_type="store", # type: EndpointType envelope=None, # type: Optional[Envelope] ): # type: (...) -> None def record_loss(reason): # type: (str) -> None if envelope is None: self.record_lost_event(reason, data_category="error") else: for item in envelope.items: self.record_lost_event(reason, item=item) headers.update( { "User-Agent": str(self._auth.client), "X-Sentry-Auth": str(self._auth.to_header()), } ) try: response = self._pool.request( "POST", str(self._auth.get_api_url(endpoint_type)), body=body, headers=headers, ) except Exception: self.on_dropped_event("network") record_loss("network_error") raise try: self._update_rate_limits(response) if response.status == 429: # if we hit a 429. Something was rate limited but we already # acted on this in `self._update_rate_limits`. Note that we # do not want to record event loss here as we will have recorded # an outcome in relay already. self.on_dropped_event("status_429") pass elif response.status >= 300 or response.status < 200: logger.error( "Unexpected status code: %s (body: %s)", response.status, response.data, ) self.on_dropped_event("status_{}".format(response.status)) record_loss("network_error") finally: response.close() def on_dropped_event(self, reason): # type: (str) -> None return None def _fetch_pending_client_report(self, force=False, interval=60): # type: (bool, int) -> Optional[Item] if not self.options["send_client_reports"]: return None if not (force or self._last_client_report_sent < time.time() - interval): return None discarded_events = self._discarded_events self._discarded_events = defaultdict(int) self._last_client_report_sent = time.time() if not discarded_events: return None return Item( PayloadRef( json={ "timestamp": time.time(), "discarded_events": [ {"reason": reason, "category": category, "quantity": quantity} for ( (category, reason), quantity, ) in discarded_events.items() ], } ), type="client_report", ) def _flush_client_reports(self, force=False): # type: (bool) -> None client_report = self._fetch_pending_client_report(force=force, interval=60) if client_report is not None: self.capture_envelope(Envelope(items=[client_report])) def _check_disabled(self, category): # type: (str) -> bool def _disabled(bucket): # type: (Any) -> bool ts = self._disabled_until.get(bucket) return ts is not None and ts > datetime_utcnow() return _disabled(category) or _disabled(None) def _is_rate_limited(self): # type: () -> bool return any(ts > datetime_utcnow() for ts in self._disabled_until.values()) def _is_worker_full(self): # type: () -> bool return self._worker.full() def is_healthy(self): # type: () -> bool return not (self._is_worker_full() or self._is_rate_limited()) def _send_event( self, event # type: Event ): # type: (...) -> None if self._check_disabled("error"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", data_category="error") return None body = io.BytesIO() if self._compresslevel == 0: body.write(json_dumps(event)) else: with gzip.GzipFile( fileobj=body, mode="w", compresslevel=self._compresslevel ) as f: f.write(json_dumps(event)) assert self.parsed_dsn is not None logger.debug( "Sending event, type:%s level:%s event_id:%s project:%s host:%s" % ( event.get("type") or "null", event.get("level") or "null", event.get("event_id") or "null", self.parsed_dsn.project_id, self.parsed_dsn.host, ) ) headers = { "Content-Type": "application/json", } if self._compresslevel > 0: headers["Content-Encoding"] = "gzip" self._send_request(body.getvalue(), headers=headers) return None def _send_envelope( self, envelope # type: Envelope ): # type: (...) -> None # remove all items from the envelope which are over quota new_items = [] for item in envelope.items: if self._check_disabled(item.data_category): if item.data_category in ("transaction", "error", "default"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", item=item) else: new_items.append(item) # Since we're modifying the envelope here make a copy so that others # that hold references do not see their envelope modified. envelope = Envelope(headers=envelope.headers, items=new_items) if not envelope.items: return None # since we're already in the business of sending out an envelope here # check if we have one pending for the stats session envelopes so we # can attach it to this enveloped scheduled for sending. This will # currently typically attach the client report to the most recent # session update. client_report_item = self._fetch_pending_client_report(interval=30) if client_report_item is not None: envelope.items.append(client_report_item) body = io.BytesIO() if self._compresslevel == 0: envelope.serialize_into(body) else: with gzip.GzipFile( fileobj=body, mode="w", compresslevel=self._compresslevel ) as f: envelope.serialize_into(f) assert self.parsed_dsn is not None logger.debug( "Sending envelope [%s] project:%s host:%s", envelope.description, self.parsed_dsn.project_id, self.parsed_dsn.host, ) headers = { "Content-Type": "application/x-sentry-envelope", } if self._compresslevel > 0: headers["Content-Encoding"] = "gzip" self._send_request( body.getvalue(), headers=headers, endpoint_type="envelope", envelope=envelope, ) return None def _get_pool_options(self, ca_certs): # type: (Optional[Any]) -> Dict[str, Any] return { "num_pools": self._num_pools, "cert_reqs": "CERT_REQUIRED", "ca_certs": ca_certs or certifi.where(), } def _in_no_proxy(self, parsed_dsn): # type: (Dsn) -> bool no_proxy = getproxies().get("no") if not no_proxy: return False for host in no_proxy.split(","): host = host.strip() if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith(host): return True return False def _make_pool( self, parsed_dsn, # type: Dsn http_proxy, # type: Optional[str] https_proxy, # type: Optional[str] ca_certs, # type: Optional[Any] proxy_headers, # type: Optional[Dict[str, str]] ): # type: (...) -> Union[PoolManager, ProxyManager] proxy = None no_proxy = self._in_no_proxy(parsed_dsn) # try HTTPS first if parsed_dsn.scheme == "https" and (https_proxy != ""): proxy = https_proxy or (not no_proxy and getproxies().get("https")) # maybe fallback to HTTP proxy if not proxy and (http_proxy != ""): proxy = http_proxy or (not no_proxy and getproxies().get("http")) opts = self._get_pool_options(ca_certs) if proxy: if proxy_headers: opts["proxy_headers"] = proxy_headers if proxy.startswith("socks"): use_socks_proxy = True try: # Check if PySocks depencency is available from urllib3.contrib.socks import SOCKSProxyManager except ImportError: use_socks_proxy = False logger.warning( "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support. Please add `PySocks` (or `urllib3` with the `[socks]` extra) to your dependencies.", proxy, ) if use_socks_proxy: return SOCKSProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts) else: return urllib3.ProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts) def capture_event( self, event # type: Event ): # type: (...) -> None hub = self.hub_cls.current def send_event_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_event(event) self._flush_client_reports() if not self._worker.submit(send_event_wrapper): self.on_dropped_event("full_queue") self.record_lost_event("queue_overflow", data_category="error") def capture_envelope( self, envelope # type: Envelope ): # type: (...) -> None hub = self.hub_cls.current def send_envelope_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_envelope(envelope) self._flush_client_reports() if not self._worker.submit(send_envelope_wrapper): self.on_dropped_event("full_queue") for item in envelope.items: self.record_lost_event("queue_overflow", item=item) def flush( self, timeout, # type: float callback=None, # type: Optional[Any] ): # type: (...) -> None logger.debug("Flushing HTTP transport") if timeout > 0: self._worker.submit(lambda: self._flush_client_reports(force=True)) self._worker.flush(timeout, callback) def kill(self): # type: () -> None logger.debug("Killing HTTP transport") self._worker.kill() class _FunctionTransport(Transport): def __init__( self, func # type: Callable[[Event], None] ): # type: (...) -> None Transport.__init__(self) self._func = func def capture_event( self, event # type: Event ): # type: (...) -> None self._func(event) return None def make_transport(options): # type: (Dict[str, Any]) -> Optional[Transport] ref_transport = options["transport"] # If no transport is given, we use the http transport class if ref_transport is None: transport_cls = HttpTransport # type: Type[Transport] elif isinstance(ref_transport, Transport): return ref_transport elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport): transport_cls = ref_transport elif callable(ref_transport): return _FunctionTransport(ref_transport) # if a transport class is given only instantiate it if the dsn is not # empty or None if options["dsn"]: return transport_cls(options) return None sentry-python-1.39.2/sentry_sdk/utils.py000066400000000000000000001401501454744723200203650ustar00rootroot00000000000000import base64 import json import linecache import logging import math import os import re import subprocess import sys import threading import time from collections import namedtuple from copy import copy from decimal import Decimal from numbers import Real try: # Python 3 from urllib.parse import parse_qs from urllib.parse import unquote from urllib.parse import urlencode from urllib.parse import urlsplit from urllib.parse import urlunsplit except ImportError: # Python 2 from cgi import parse_qs # type: ignore from urllib import unquote # type: ignore from urllib import urlencode # type: ignore from urlparse import urlsplit # type: ignore from urlparse import urlunsplit # type: ignore try: # Python 3 FileNotFoundError except NameError: # Python 2 FileNotFoundError = IOError try: # Python 3.11 from builtins import BaseExceptionGroup except ImportError: # Python 3.10 and below BaseExceptionGroup = None # type: ignore from datetime import datetime from functools import partial try: from functools import partialmethod _PARTIALMETHOD_AVAILABLE = True except ImportError: _PARTIALMETHOD_AVAILABLE = False import sentry_sdk from sentry_sdk._compat import PY2, PY33, PY37, implements_str, text_type, urlparse from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH if TYPE_CHECKING: from types import FrameType, TracebackType from typing import ( Any, Callable, ContextManager, Dict, Iterator, List, Optional, Set, Tuple, Type, Union, ) from sentry_sdk._types import EndpointType, ExcInfo epoch = datetime(1970, 1, 1) # The logger is created here but initialized in the debug support module logger = logging.getLogger("sentry_sdk.errors") _installed_modules = None BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$") SENSITIVE_DATA_SUBSTITUTE = "[Filtered]" def json_dumps(data): # type: (Any) -> bytes """Serialize data into a compact JSON representation encoded as UTF-8.""" return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8") def _get_debug_hub(): # type: () -> Optional[sentry_sdk.Hub] # This function is replaced by debug.py pass def get_git_revision(): # type: () -> Optional[str] try: with open(os.path.devnull, "w+") as null: revision = ( subprocess.Popen( ["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, stderr=null, stdin=null, ) .communicate()[0] .strip() .decode("utf-8") ) except (OSError, IOError, FileNotFoundError): return None return revision def get_default_release(): # type: () -> Optional[str] """Try to guess a default release.""" release = os.environ.get("SENTRY_RELEASE") if release: return release release = get_git_revision() if release: return release for var in ( "HEROKU_SLUG_COMMIT", "SOURCE_VERSION", "CODEBUILD_RESOLVED_SOURCE_VERSION", "CIRCLE_SHA1", "GAE_DEPLOYMENT_ID", ): release = os.environ.get(var) if release: return release return None def get_sdk_name(installed_integrations): # type: (List[str]) -> str """Return the SDK name including the name of the used web framework.""" # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier # here because if django is not installed the integration is not accessible. framework_integrations = [ "django", "flask", "fastapi", "bottle", "falcon", "quart", "sanic", "starlette", "chalice", "serverless", "pyramid", "tornado", "aiohttp", "aws_lambda", "gcp", "beam", "asgi", "wsgi", ] for integration in framework_integrations: if integration in installed_integrations: return "sentry.python.{}".format(integration) return "sentry.python" class CaptureInternalException(object): __slots__ = () def __enter__(self): # type: () -> ContextManager[Any] return self def __exit__(self, ty, value, tb): # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool if ty is not None and value is not None: capture_internal_exception((ty, value, tb)) return True _CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException() def capture_internal_exceptions(): # type: () -> ContextManager[Any] return _CAPTURE_INTERNAL_EXCEPTION def capture_internal_exception(exc_info): # type: (ExcInfo) -> None hub = _get_debug_hub() if hub is not None: hub._capture_internal_exception(exc_info) def to_timestamp(value): # type: (datetime) -> float return (value - epoch).total_seconds() def format_timestamp(value): # type: (datetime) -> str return value.strftime("%Y-%m-%dT%H:%M:%S.%fZ") def event_hint_with_exc_info(exc_info=None): # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]] """Creates a hint with the exc info filled in.""" if exc_info is None: exc_info = sys.exc_info() else: exc_info = exc_info_from_error(exc_info) if exc_info[0] is None: exc_info = None return {"exc_info": exc_info} class BadDsn(ValueError): """Raised on invalid DSNs.""" @implements_str class Dsn(object): """Represents a DSN.""" def __init__(self, value): # type: (Union[Dsn, str]) -> None if isinstance(value, Dsn): self.__dict__ = dict(value.__dict__) return parts = urlparse.urlsplit(text_type(value)) if parts.scheme not in ("http", "https"): raise BadDsn("Unsupported scheme %r" % parts.scheme) self.scheme = parts.scheme if parts.hostname is None: raise BadDsn("Missing hostname") self.host = parts.hostname if parts.port is None: self.port = self.scheme == "https" and 443 or 80 # type: int else: self.port = parts.port if not parts.username: raise BadDsn("Missing public key") self.public_key = parts.username self.secret_key = parts.password path = parts.path.rsplit("/", 1) try: self.project_id = text_type(int(path.pop())) except (ValueError, TypeError): raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:]) self.path = "/".join(path) + "/" @property def netloc(self): # type: () -> str """The netloc part of a DSN.""" rv = self.host if (self.scheme, self.port) not in (("http", 80), ("https", 443)): rv = "%s:%s" % (rv, self.port) return rv def to_auth(self, client=None): # type: (Optional[Any]) -> Auth """Returns the auth info object for this dsn.""" return Auth( scheme=self.scheme, host=self.netloc, path=self.path, project_id=self.project_id, public_key=self.public_key, secret_key=self.secret_key, client=client, ) def __str__(self): # type: () -> str return "%s://%s%s@%s%s%s" % ( self.scheme, self.public_key, self.secret_key and "@" + self.secret_key or "", self.netloc, self.path, self.project_id, ) class Auth(object): """Helper object that represents the auth info.""" def __init__( self, scheme, host, project_id, public_key, secret_key=None, version=7, client=None, path="/", ): # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None self.scheme = scheme self.host = host self.path = path self.project_id = project_id self.public_key = public_key self.secret_key = secret_key self.version = version self.client = client @property def store_api_url(self): # type: () -> str """Returns the API url for storing events. Deprecated: use get_api_url instead. """ return self.get_api_url(type="store") def get_api_url( self, type="store" # type: EndpointType ): # type: (...) -> str """Returns the API url for storing events.""" return "%s://%s%sapi/%s/%s/" % ( self.scheme, self.host, self.path, self.project_id, type, ) def to_header(self): # type: () -> str """Returns the auth header a string.""" rv = [("sentry_key", self.public_key), ("sentry_version", self.version)] if self.client is not None: rv.append(("sentry_client", self.client)) if self.secret_key is not None: rv.append(("sentry_secret", self.secret_key)) return "Sentry " + ", ".join("%s=%s" % (key, value) for key, value in rv) class AnnotatedValue(object): """ Meta information for a data field in the event payload. This is to tell Relay that we have tampered with the fields value. See: https://github.com/getsentry/relay/blob/be12cd49a0f06ea932ed9b9f93a655de5d6ad6d1/relay-general/src/types/meta.rs#L407-L423 """ __slots__ = ("value", "metadata") def __init__(self, value, metadata): # type: (Optional[Any], Dict[str, Any]) -> None self.value = value self.metadata = metadata @classmethod def removed_because_raw_data(cls): # type: () -> AnnotatedValue """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form.""" return AnnotatedValue( value="", metadata={ "rem": [ # Remark [ "!raw", # Unparsable raw data "x", # The fields original value was removed ] ] }, ) @classmethod def removed_because_over_size_limit(cls): # type: () -> AnnotatedValue """The actual value was removed because the size of the field exceeded the configured maximum size (specified with the max_request_body_size sdk option)""" return AnnotatedValue( value="", metadata={ "rem": [ # Remark [ "!config", # Because of configured maximum size "x", # The fields original value was removed ] ] }, ) @classmethod def substituted_because_contains_sensitive_data(cls): # type: () -> AnnotatedValue """The actual value was removed because it contained sensitive information.""" return AnnotatedValue( value=SENSITIVE_DATA_SUBSTITUTE, metadata={ "rem": [ # Remark [ "!config", # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies) "s", # The fields original value was substituted ] ] }, ) if TYPE_CHECKING: from typing import TypeVar T = TypeVar("T") Annotated = Union[AnnotatedValue, T] def get_type_name(cls): # type: (Optional[type]) -> Optional[str] return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None) def get_type_module(cls): # type: (Optional[type]) -> Optional[str] mod = getattr(cls, "__module__", None) if mod not in (None, "builtins", "__builtins__"): return mod return None def should_hide_frame(frame): # type: (FrameType) -> bool try: mod = frame.f_globals["__name__"] if mod.startswith("sentry_sdk."): return True except (AttributeError, KeyError): pass for flag_name in "__traceback_hide__", "__tracebackhide__": try: if frame.f_locals[flag_name]: return True except Exception: pass return False def iter_stacks(tb): # type: (Optional[TracebackType]) -> Iterator[TracebackType] tb_ = tb # type: Optional[TracebackType] while tb_ is not None: if not should_hide_frame(tb_.tb_frame): yield tb_ tb_ = tb_.tb_next def get_lines_from_file( filename, # type: str lineno, # type: int max_length=None, # type: Optional[int] loader=None, # type: Optional[Any] module=None, # type: Optional[str] ): # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] context_lines = 5 source = None if loader is not None and hasattr(loader, "get_source"): try: source_str = loader.get_source(module) # type: Optional[str] except (ImportError, IOError): source_str = None if source_str is not None: source = source_str.splitlines() if source is None: try: source = linecache.getlines(filename) except (OSError, IOError): return [], None, [] if not source: return [], None, [] lower_bound = max(0, lineno - context_lines) upper_bound = min(lineno + 1 + context_lines, len(source)) try: pre_context = [ strip_string(line.strip("\r\n"), max_length=max_length) for line in source[lower_bound:lineno] ] context_line = strip_string(source[lineno].strip("\r\n"), max_length=max_length) post_context = [ strip_string(line.strip("\r\n"), max_length=max_length) for line in source[(lineno + 1) : upper_bound] ] return pre_context, context_line, post_context except IndexError: # the file may have changed since it was loaded into memory return [], None, [] def get_source_context( frame, # type: FrameType tb_lineno, # type: int max_value_length=None, # type: Optional[int] ): # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] try: abs_path = frame.f_code.co_filename # type: Optional[str] except Exception: abs_path = None try: module = frame.f_globals["__name__"] except Exception: return [], None, [] try: loader = frame.f_globals["__loader__"] except Exception: loader = None lineno = tb_lineno - 1 if lineno is not None and abs_path: return get_lines_from_file( abs_path, lineno, max_value_length, loader=loader, module=module ) return [], None, [] def safe_str(value): # type: (Any) -> str try: return text_type(value) except Exception: return safe_repr(value) if PY2: def safe_repr(value): # type: (Any) -> str try: rv = repr(value).decode("utf-8", "replace") # At this point `rv` contains a bunch of literal escape codes, like # this (exaggerated example): # # u"\\x2f" # # But we want to show this string as: # # u"/" try: # unicode-escape does this job, but can only decode latin1. So we # attempt to encode in latin1. return rv.encode("latin1").decode("unicode-escape") except Exception: # Since usually strings aren't latin1 this can break. In those # cases we just give up. return rv except Exception: # If e.g. the call to `repr` already fails return "" else: def safe_repr(value): # type: (Any) -> str try: return repr(value) except Exception: return "" def filename_for_module(module, abs_path): # type: (Optional[str], Optional[str]) -> Optional[str] if not abs_path or not module: return abs_path try: if abs_path.endswith(".pyc"): abs_path = abs_path[:-1] base_module = module.split(".", 1)[0] if base_module == module: return os.path.basename(abs_path) base_module_path = sys.modules[base_module].__file__ if not base_module_path: return abs_path return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip( os.sep ) except Exception: return abs_path def serialize_frame( frame, tb_lineno=None, include_local_variables=True, include_source_context=True, max_value_length=None, ): # type: (FrameType, Optional[int], bool, bool, Optional[int]) -> Dict[str, Any] f_code = getattr(frame, "f_code", None) if not f_code: abs_path = None function = None else: abs_path = frame.f_code.co_filename function = frame.f_code.co_name try: module = frame.f_globals["__name__"] except Exception: module = None if tb_lineno is None: tb_lineno = frame.f_lineno rv = { "filename": filename_for_module(module, abs_path) or None, "abs_path": os.path.abspath(abs_path) if abs_path else None, "function": function or "", "module": module, "lineno": tb_lineno, } # type: Dict[str, Any] if include_source_context: rv["pre_context"], rv["context_line"], rv["post_context"] = get_source_context( frame, tb_lineno, max_value_length ) if include_local_variables: rv["vars"] = copy(frame.f_locals) return rv def current_stacktrace( include_local_variables=True, # type: bool include_source_context=True, # type: bool max_value_length=None, # type: Optional[int] ): # type: (...) -> Dict[str, Any] __tracebackhide__ = True frames = [] f = sys._getframe() # type: Optional[FrameType] while f is not None: if not should_hide_frame(f): frames.append( serialize_frame( f, include_local_variables=include_local_variables, include_source_context=include_source_context, max_value_length=max_value_length, ) ) f = f.f_back frames.reverse() return {"frames": frames} def get_errno(exc_value): # type: (BaseException) -> Optional[Any] return getattr(exc_value, "errno", None) def get_error_message(exc_value): # type: (Optional[BaseException]) -> str return ( getattr(exc_value, "message", "") or getattr(exc_value, "detail", "") or safe_str(exc_value) ) def single_exception_from_error_tuple( exc_type, # type: Optional[type] exc_value, # type: Optional[BaseException] tb, # type: Optional[TracebackType] client_options=None, # type: Optional[Dict[str, Any]] mechanism=None, # type: Optional[Dict[str, Any]] exception_id=None, # type: Optional[int] parent_id=None, # type: Optional[int] source=None, # type: Optional[str] ): # type: (...) -> Dict[str, Any] """ Creates a dict that goes into the events `exception.values` list and is ingestible by Sentry. See the Exception Interface documentation for more details: https://develop.sentry.dev/sdk/event-payloads/exception/ """ exception_value = {} # type: Dict[str, Any] exception_value["mechanism"] = ( mechanism.copy() if mechanism else {"type": "generic", "handled": True} ) if exception_id is not None: exception_value["mechanism"]["exception_id"] = exception_id if exc_value is not None: errno = get_errno(exc_value) else: errno = None if errno is not None: exception_value["mechanism"].setdefault("meta", {}).setdefault( "errno", {} ).setdefault("number", errno) if source is not None: exception_value["mechanism"]["source"] = source is_root_exception = exception_id == 0 if not is_root_exception and parent_id is not None: exception_value["mechanism"]["parent_id"] = parent_id exception_value["mechanism"]["type"] = "chained" if is_root_exception and "type" not in exception_value["mechanism"]: exception_value["mechanism"]["type"] = "generic" is_exception_group = BaseExceptionGroup is not None and isinstance( exc_value, BaseExceptionGroup ) if is_exception_group: exception_value["mechanism"]["is_exception_group"] = True exception_value["module"] = get_type_module(exc_type) exception_value["type"] = get_type_name(exc_type) exception_value["value"] = get_error_message(exc_value) if client_options is None: include_local_variables = True include_source_context = True max_value_length = DEFAULT_MAX_VALUE_LENGTH # fallback else: include_local_variables = client_options["include_local_variables"] include_source_context = client_options["include_source_context"] max_value_length = client_options["max_value_length"] frames = [ serialize_frame( tb.tb_frame, tb_lineno=tb.tb_lineno, include_local_variables=include_local_variables, include_source_context=include_source_context, max_value_length=max_value_length, ) for tb in iter_stacks(tb) ] if frames: exception_value["stacktrace"] = {"frames": frames} return exception_value HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__") if HAS_CHAINED_EXCEPTIONS: def walk_exception_chain(exc_info): # type: (ExcInfo) -> Iterator[ExcInfo] exc_type, exc_value, tb = exc_info seen_exceptions = [] seen_exception_ids = set() # type: Set[int] while ( exc_type is not None and exc_value is not None and id(exc_value) not in seen_exception_ids ): yield exc_type, exc_value, tb # Avoid hashing random types we don't know anything # about. Use the list to keep a ref so that the `id` is # not used for another object. seen_exceptions.append(exc_value) seen_exception_ids.add(id(exc_value)) if exc_value.__suppress_context__: cause = exc_value.__cause__ else: cause = exc_value.__context__ if cause is None: break exc_type = type(cause) exc_value = cause tb = getattr(cause, "__traceback__", None) else: def walk_exception_chain(exc_info): # type: (ExcInfo) -> Iterator[ExcInfo] yield exc_info def exceptions_from_error( exc_type, # type: Optional[type] exc_value, # type: Optional[BaseException] tb, # type: Optional[TracebackType] client_options=None, # type: Optional[Dict[str, Any]] mechanism=None, # type: Optional[Dict[str, Any]] exception_id=0, # type: int parent_id=0, # type: int source=None, # type: Optional[str] ): # type: (...) -> Tuple[int, List[Dict[str, Any]]] """ Creates the list of exceptions. This can include chained exceptions and exceptions from an ExceptionGroup. See the Exception Interface documentation for more details: https://develop.sentry.dev/sdk/event-payloads/exception/ """ parent = single_exception_from_error_tuple( exc_type=exc_type, exc_value=exc_value, tb=tb, client_options=client_options, mechanism=mechanism, exception_id=exception_id, parent_id=parent_id, source=source, ) exceptions = [parent] parent_id = exception_id exception_id += 1 should_supress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore if should_supress_context: # Add direct cause. # The field `__cause__` is set when raised with the exception (using the `from` keyword). exception_has_cause = ( exc_value and hasattr(exc_value, "__cause__") and exc_value.__cause__ is not None ) if exception_has_cause: cause = exc_value.__cause__ # type: ignore (exception_id, child_exceptions) = exceptions_from_error( exc_type=type(cause), exc_value=cause, tb=getattr(cause, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, source="__cause__", ) exceptions.extend(child_exceptions) else: # Add indirect cause. # The field `__context__` is assigned if another exception occurs while handling the exception. exception_has_content = ( exc_value and hasattr(exc_value, "__context__") and exc_value.__context__ is not None ) if exception_has_content: context = exc_value.__context__ # type: ignore (exception_id, child_exceptions) = exceptions_from_error( exc_type=type(context), exc_value=context, tb=getattr(context, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, source="__context__", ) exceptions.extend(child_exceptions) # Add exceptions from an ExceptionGroup. is_exception_group = exc_value and hasattr(exc_value, "exceptions") if is_exception_group: for idx, e in enumerate(exc_value.exceptions): # type: ignore (exception_id, child_exceptions) = exceptions_from_error( exc_type=type(e), exc_value=e, tb=getattr(e, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, parent_id=parent_id, source="exceptions[%s]" % idx, ) exceptions.extend(child_exceptions) return (exception_id, exceptions) def exceptions_from_error_tuple( exc_info, # type: ExcInfo client_options=None, # type: Optional[Dict[str, Any]] mechanism=None, # type: Optional[Dict[str, Any]] ): # type: (...) -> List[Dict[str, Any]] exc_type, exc_value, tb = exc_info is_exception_group = BaseExceptionGroup is not None and isinstance( exc_value, BaseExceptionGroup ) if is_exception_group: (_, exceptions) = exceptions_from_error( exc_type=exc_type, exc_value=exc_value, tb=tb, client_options=client_options, mechanism=mechanism, exception_id=0, parent_id=0, ) else: exceptions = [] for exc_type, exc_value, tb in walk_exception_chain(exc_info): exceptions.append( single_exception_from_error_tuple( exc_type, exc_value, tb, client_options, mechanism ) ) exceptions.reverse() return exceptions def to_string(value): # type: (str) -> str try: return text_type(value) except UnicodeDecodeError: return repr(value)[1:-1] def iter_event_stacktraces(event): # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]] if "stacktrace" in event: yield event["stacktrace"] if "threads" in event: for thread in event["threads"].get("values") or (): if "stacktrace" in thread: yield thread["stacktrace"] if "exception" in event: for exception in event["exception"].get("values") or (): if "stacktrace" in exception: yield exception["stacktrace"] def iter_event_frames(event): # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]] for stacktrace in iter_event_stacktraces(event): for frame in stacktrace.get("frames") or (): yield frame def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None): # type: (Dict[str, Any], Optional[List[str]], Optional[List[str]], Optional[str]) -> Dict[str, Any] for stacktrace in iter_event_stacktraces(event): set_in_app_in_frames( stacktrace.get("frames"), in_app_exclude=in_app_exclude, in_app_include=in_app_include, project_root=project_root, ) return event def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=None): # type: (Any, Optional[List[str]], Optional[List[str]], Optional[str]) -> Optional[Any] if not frames: return None for frame in frames: # if frame has already been marked as in_app, skip it current_in_app = frame.get("in_app") if current_in_app is not None: continue module = frame.get("module") # check if module in frame is in the list of modules to include if _module_in_list(module, in_app_include): frame["in_app"] = True continue # check if module in frame is in the list of modules to exclude if _module_in_list(module, in_app_exclude): frame["in_app"] = False continue # if frame has no abs_path, skip further checks abs_path = frame.get("abs_path") if abs_path is None: continue if _is_external_source(abs_path): frame["in_app"] = False continue if _is_in_project_root(abs_path, project_root): frame["in_app"] = True continue return frames def exc_info_from_error(error): # type: (Union[BaseException, ExcInfo]) -> ExcInfo if isinstance(error, tuple) and len(error) == 3: exc_type, exc_value, tb = error elif isinstance(error, BaseException): tb = getattr(error, "__traceback__", None) if tb is not None: exc_type = type(error) exc_value = error else: exc_type, exc_value, tb = sys.exc_info() if exc_value is not error: tb = None exc_value = error exc_type = type(error) else: raise ValueError("Expected Exception object to report, got %s!" % type(error)) return exc_type, exc_value, tb def event_from_exception( exc_info, # type: Union[BaseException, ExcInfo] client_options=None, # type: Optional[Dict[str, Any]] mechanism=None, # type: Optional[Dict[str, Any]] ): # type: (...) -> Tuple[Dict[str, Any], Dict[str, Any]] exc_info = exc_info_from_error(exc_info) hint = event_hint_with_exc_info(exc_info) return ( { "level": "error", "exception": { "values": exceptions_from_error_tuple( exc_info, client_options, mechanism ) }, }, hint, ) def _module_in_list(name, items): # type: (str, Optional[List[str]]) -> bool if name is None: return False if not items: return False for item in items: if item == name or name.startswith(item + "."): return True return False def _is_external_source(abs_path): # type: (str) -> bool # check if frame is in 'site-packages' or 'dist-packages' external_source = ( re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None ) return external_source def _is_in_project_root(abs_path, project_root): # type: (str, Optional[str]) -> bool if project_root is None: return False # check if path is in the project root if abs_path.startswith(project_root): return True return False def strip_string(value, max_length=None): # type: (str, Optional[int]) -> Union[AnnotatedValue, str] if not value: return value if max_length is None: max_length = DEFAULT_MAX_VALUE_LENGTH length = len(value.encode("utf-8")) if length > max_length: return AnnotatedValue( value=value[: max_length - 3] + "...", metadata={ "len": length, "rem": [["!limit", "x", max_length - 3, max_length]], }, ) return value def parse_version(version): # type: (str) -> Optional[Tuple[int, ...]] """ Parses a version string into a tuple of integers. This uses the parsing loging from PEP 440: https://peps.python.org/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions """ VERSION_PATTERN = r""" # noqa: N806 v? (?: (?:(?P[0-9]+)!)? # epoch (?P[0-9]+(?:\.[0-9]+)*) # release segment (?P
                                          # pre-release
                [-_\.]?
                (?P(a|b|c|rc|alpha|beta|pre|preview))
                [-_\.]?
                (?P[0-9]+)?
            )?
            (?P                                         # post release
                (?:-(?P[0-9]+))
                |
                (?:
                    [-_\.]?
                    (?Ppost|rev|r)
                    [-_\.]?
                    (?P[0-9]+)?
                )
            )?
            (?P                                          # dev release
                [-_\.]?
                (?Pdev)
                [-_\.]?
                (?P[0-9]+)?
            )?
        )
        (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
    """

    pattern = re.compile(
        r"^\s*" + VERSION_PATTERN + r"\s*$",
        re.VERBOSE | re.IGNORECASE,
    )

    try:
        release = pattern.match(version).groupdict()["release"]  # type: ignore
        release_tuple = tuple(map(int, release.split(".")[:3]))  # type: Tuple[int, ...]
    except (TypeError, ValueError, AttributeError):
        return None

    return release_tuple


def _is_contextvars_broken():
    # type: () -> bool
    """
    Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars.
    """
    try:
        import gevent  # type: ignore
        from gevent.monkey import is_object_patched  # type: ignore

        # Get the MAJOR and MINOR version numbers of Gevent
        version_tuple = tuple(
            [int(part) for part in re.split(r"a|b|rc|\.", gevent.__version__)[:2]]
        )
        if is_object_patched("threading", "local"):
            # Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching
            # context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine.
            # Ref: https://github.com/gevent/gevent/blob/83c9e2ae5b0834b8f84233760aabe82c3ba065b4/src/gevent/monkey.py#L604-L609
            # Gevent 20.5, that doesn't depend on Greenlet 0.4.17 with native support
            # for contextvars, is able to patch both thread locals and contextvars, in
            # that case, check if contextvars are effectively patched.
            if (
                # Gevent 20.9.0+
                (sys.version_info >= (3, 7) and version_tuple >= (20, 9))
                # Gevent 20.5.0+ or Python < 3.7
                or (is_object_patched("contextvars", "ContextVar"))
            ):
                return False

            return True
    except ImportError:
        pass

    try:
        import greenlet  # type: ignore
        from eventlet.patcher import is_monkey_patched  # type: ignore

        greenlet_version = parse_version(greenlet.__version__)

        if greenlet_version is None:
            logger.error(
                "Internal error in Sentry SDK: Could not parse Greenlet version from greenlet.__version__."
            )
            return False

        if is_monkey_patched("thread") and greenlet_version < (0, 5):
            return True
    except ImportError:
        pass

    return False


def _make_threadlocal_contextvars(local):
    # type: (type) -> type
    class ContextVar(object):
        # Super-limited impl of ContextVar

        def __init__(self, name):
            # type: (str) -> None
            self._name = name
            self._local = local()

        def get(self, default):
            # type: (Any) -> Any
            return getattr(self._local, "value", default)

        def set(self, value):
            # type: (Any) -> None
            self._local.value = value

    return ContextVar


def _get_contextvars():
    # type: () -> Tuple[bool, type]
    """
    Figure out the "right" contextvars installation to use. Returns a
    `contextvars.ContextVar`-like class with a limited API.

    See https://docs.sentry.io/platforms/python/contextvars/ for more information.
    """
    if not _is_contextvars_broken():
        # aiocontextvars is a PyPI package that ensures that the contextvars
        # backport (also a PyPI package) works with asyncio under Python 3.6
        #
        # Import it if available.
        if sys.version_info < (3, 7):
            # `aiocontextvars` is absolutely required for functional
            # contextvars on Python 3.6.
            try:
                from aiocontextvars import ContextVar

                return True, ContextVar
            except ImportError:
                pass
        else:
            # On Python 3.7 contextvars are functional.
            try:
                from contextvars import ContextVar

                return True, ContextVar
            except ImportError:
                pass

    # Fall back to basic thread-local usage.

    from threading import local

    return False, _make_threadlocal_contextvars(local)


HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()

CONTEXTVARS_ERROR_MESSAGE = """

With asyncio/ASGI applications, the Sentry SDK requires a functional
installation of `contextvars` to avoid leaking scope/context data across
requests.

Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more information.
"""


def qualname_from_function(func):
    # type: (Callable[..., Any]) -> Optional[str]
    """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
    func_qualname = None  # type: Optional[str]

    # Python 2
    try:
        return "%s.%s.%s" % (
            func.im_class.__module__,  # type: ignore
            func.im_class.__name__,  # type: ignore
            func.__name__,
        )
    except Exception:
        pass

    prefix, suffix = "", ""

    if (
        _PARTIALMETHOD_AVAILABLE
        and hasattr(func, "_partialmethod")
        and isinstance(func._partialmethod, partialmethod)
    ):
        prefix, suffix = "partialmethod()"
        func = func._partialmethod.func
    elif isinstance(func, partial) and hasattr(func.func, "__name__"):
        prefix, suffix = "partial()"
        func = func.func

    if hasattr(func, "__qualname__"):
        func_qualname = func.__qualname__
    elif hasattr(func, "__name__"):  # Python 2.7 has no __qualname__
        func_qualname = func.__name__

    # Python 3: methods, functions, classes
    if func_qualname is not None:
        if hasattr(func, "__module__"):
            func_qualname = func.__module__ + "." + func_qualname
        func_qualname = prefix + func_qualname + suffix

    return func_qualname


def transaction_from_function(func):
    # type: (Callable[..., Any]) -> Optional[str]
    return qualname_from_function(func)


disable_capture_event = ContextVar("disable_capture_event")


class ServerlessTimeoutWarning(Exception):  # noqa: N818
    """Raised when a serverless method is about to reach its timeout."""

    pass


class TimeoutThread(threading.Thread):
    """Creates a Thread which runs (sleeps) for a time duration equal to
    waiting_time and raises a custom ServerlessTimeout exception.
    """

    def __init__(self, waiting_time, configured_timeout):
        # type: (float, int) -> None
        threading.Thread.__init__(self)
        self.waiting_time = waiting_time
        self.configured_timeout = configured_timeout
        self._stop_event = threading.Event()

    def stop(self):
        # type: () -> None
        self._stop_event.set()

    def run(self):
        # type: () -> None

        self._stop_event.wait(self.waiting_time)

        if self._stop_event.is_set():
            return

        integer_configured_timeout = int(self.configured_timeout)

        # Setting up the exact integer value of configured time(in seconds)
        if integer_configured_timeout < self.configured_timeout:
            integer_configured_timeout = integer_configured_timeout + 1

        # Raising Exception after timeout duration is reached
        raise ServerlessTimeoutWarning(
            "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
                integer_configured_timeout
            )
        )


def to_base64(original):
    # type: (str) -> Optional[str]
    """
    Convert a string to base64, via UTF-8. Returns None on invalid input.
    """
    base64_string = None

    try:
        utf8_bytes = original.encode("UTF-8")
        base64_bytes = base64.b64encode(utf8_bytes)
        base64_string = base64_bytes.decode("UTF-8")
    except Exception as err:
        logger.warning("Unable to encode {orig} to base64:".format(orig=original), err)

    return base64_string


def from_base64(base64_string):
    # type: (str) -> Optional[str]
    """
    Convert a string from base64, via UTF-8. Returns None on invalid input.
    """
    utf8_string = None

    try:
        only_valid_chars = BASE64_ALPHABET.match(base64_string)
        assert only_valid_chars

        base64_bytes = base64_string.encode("UTF-8")
        utf8_bytes = base64.b64decode(base64_bytes)
        utf8_string = utf8_bytes.decode("UTF-8")
    except Exception as err:
        logger.warning(
            "Unable to decode {b64} from base64:".format(b64=base64_string), err
        )

    return utf8_string


Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"])


def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False):
    # type: (str, bool, bool, bool) -> Union[str, Components]
    """
    Removes the authority and query parameter values from a given URL.
    """
    parsed_url = urlsplit(url)
    query_params = parse_qs(parsed_url.query, keep_blank_values=True)

    # strip username:password (netloc can be usr:pwd@example.com)
    if remove_authority:
        netloc_parts = parsed_url.netloc.split("@")
        if len(netloc_parts) > 1:
            netloc = "%s:%s@%s" % (
                SENSITIVE_DATA_SUBSTITUTE,
                SENSITIVE_DATA_SUBSTITUTE,
                netloc_parts[-1],
            )
        else:
            netloc = parsed_url.netloc
    else:
        netloc = parsed_url.netloc

    # strip values from query string
    if remove_query_values:
        query_string = unquote(
            urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params})
        )
    else:
        query_string = parsed_url.query

    components = Components(
        scheme=parsed_url.scheme,
        netloc=netloc,
        query=query_string,
        path=parsed_url.path,
        fragment=parsed_url.fragment,
    )

    if split:
        return components
    else:
        return urlunsplit(components)


ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"])


def parse_url(url, sanitize=True):
    # type: (str, bool) -> ParsedUrl
    """
    Splits a URL into a url (including path), query and fragment. If sanitize is True, the query
    parameters will be sanitized to remove sensitive data. The autority (username and password)
    in the URL will always be removed.
    """
    parsed_url = sanitize_url(
        url, remove_authority=True, remove_query_values=sanitize, split=True
    )

    base_url = urlunsplit(
        Components(
            scheme=parsed_url.scheme,  # type: ignore
            netloc=parsed_url.netloc,  # type: ignore
            query="",
            path=parsed_url.path,  # type: ignore
            fragment="",
        )
    )

    return ParsedUrl(
        url=base_url,
        query=parsed_url.query,  # type: ignore
        fragment=parsed_url.fragment,  # type: ignore
    )


def is_valid_sample_rate(rate, source):
    # type: (Any, str) -> bool
    """
    Checks the given sample rate to make sure it is valid type and value (a
    boolean or a number between 0 and 1, inclusive).
    """

    # both booleans and NaN are instances of Real, so a) checking for Real
    # checks for the possibility of a boolean also, and b) we have to check
    # separately for NaN and Decimal does not derive from Real so need to check that too
    if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
        logger.warning(
            "{source} Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
                source=source, rate=rate, type=type(rate)
            )
        )
        return False

    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
    rate = float(rate)
    if rate < 0 or rate > 1:
        logger.warning(
            "{source} Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
                source=source, rate=rate
            )
        )
        return False

    return True


def match_regex_list(item, regex_list=None, substring_matching=False):
    # type: (str, Optional[List[str]], bool) -> bool
    if regex_list is None:
        return False

    for item_matcher in regex_list:
        if not substring_matching and item_matcher[-1] != "$":
            item_matcher += "$"

        matched = re.search(item_matcher, item)
        if matched:
            return True

    return False


def is_sentry_url(hub, url):
    # type: (sentry_sdk.Hub, str) -> bool
    """
    Determines whether the given URL matches the Sentry DSN.
    """
    return (
        hub.client is not None
        and hub.client.transport is not None
        and hub.client.transport.parsed_dsn is not None
        and hub.client.transport.parsed_dsn.netloc in url
    )


def _generate_installed_modules():
    # type: () -> Iterator[Tuple[str, str]]
    try:
        from importlib import metadata

        for dist in metadata.distributions():
            name = dist.metadata["Name"]
            # `metadata` values may be `None`, see:
            # https://github.com/python/cpython/issues/91216
            # and
            # https://github.com/python/importlib_metadata/issues/371
            if name is not None:
                version = metadata.version(name)
                if version is not None:
                    yield _normalize_module_name(name), version

    except ImportError:
        # < py3.8
        try:
            import pkg_resources
        except ImportError:
            return

        for info in pkg_resources.working_set:
            yield _normalize_module_name(info.key), info.version


def _normalize_module_name(name):
    # type: (str) -> str
    return name.lower()


def _get_installed_modules():
    # type: () -> Dict[str, str]
    global _installed_modules
    if _installed_modules is None:
        _installed_modules = dict(_generate_installed_modules())
    return _installed_modules


def package_version(package):
    # type: (str) -> Optional[Tuple[int, ...]]
    installed_packages = _get_installed_modules()
    version = installed_packages.get(package)
    if version is None:
        return None

    return parse_version(version)


if PY37:

    def nanosecond_time():
        # type: () -> int
        return time.perf_counter_ns()

elif PY33:

    def nanosecond_time():
        # type: () -> int
        return int(time.perf_counter() * 1e9)

else:

    def nanosecond_time():
        # type: () -> int
        return int(time.time() * 1e9)


if PY2:

    def now():
        # type: () -> float
        return time.time()

else:

    def now():
        # type: () -> float
        return time.perf_counter()
sentry-python-1.39.2/sentry_sdk/worker.py000066400000000000000000000107301454744723200205360ustar00rootroot00000000000000import os
import threading

from time import sleep, time
from sentry_sdk._compat import check_thread_support
from sentry_sdk._queue import Queue, FullError
from sentry_sdk.utils import logger
from sentry_sdk.consts import DEFAULT_QUEUE_SIZE

from sentry_sdk._types import TYPE_CHECKING

if TYPE_CHECKING:
    from typing import Any
    from typing import Optional
    from typing import Callable


_TERMINATOR = object()


class BackgroundWorker(object):
    def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
        # type: (int) -> None
        check_thread_support()
        self._queue = Queue(queue_size)  # type: Queue
        self._lock = threading.Lock()
        self._thread = None  # type: Optional[threading.Thread]
        self._thread_for_pid = None  # type: Optional[int]

    @property
    def is_alive(self):
        # type: () -> bool
        if self._thread_for_pid != os.getpid():
            return False
        if not self._thread:
            return False
        return self._thread.is_alive()

    def _ensure_thread(self):
        # type: () -> None
        if not self.is_alive:
            self.start()

    def _timed_queue_join(self, timeout):
        # type: (float) -> bool
        deadline = time() + timeout
        queue = self._queue

        queue.all_tasks_done.acquire()

        try:
            while queue.unfinished_tasks:
                delay = deadline - time()
                if delay <= 0:
                    return False
                queue.all_tasks_done.wait(timeout=delay)

            return True
        finally:
            queue.all_tasks_done.release()

    def start(self):
        # type: () -> None
        with self._lock:
            if not self.is_alive:
                self._thread = threading.Thread(
                    target=self._target, name="raven-sentry.BackgroundWorker"
                )
                self._thread.daemon = True
                try:
                    self._thread.start()
                    self._thread_for_pid = os.getpid()
                except RuntimeError:
                    # At this point we can no longer start because the interpreter
                    # is already shutting down.  Sadly at this point we can no longer
                    # send out events.
                    self._thread = None

    def kill(self):
        # type: () -> None
        """
        Kill worker thread. Returns immediately. Not useful for
        waiting on shutdown for events, use `flush` for that.
        """
        logger.debug("background worker got kill request")
        with self._lock:
            if self._thread:
                try:
                    self._queue.put_nowait(_TERMINATOR)
                except FullError:
                    logger.debug("background worker queue full, kill failed")

                self._thread = None
                self._thread_for_pid = None

    def flush(self, timeout, callback=None):
        # type: (float, Optional[Any]) -> None
        logger.debug("background worker got flush request")
        with self._lock:
            if self.is_alive and timeout > 0.0:
                self._wait_flush(timeout, callback)
        logger.debug("background worker flushed")

    def full(self):
        # type: () -> bool
        return self._queue.full()

    def _wait_flush(self, timeout, callback):
        # type: (float, Optional[Any]) -> None
        initial_timeout = min(0.1, timeout)
        if not self._timed_queue_join(initial_timeout):
            pending = self._queue.qsize() + 1
            logger.debug("%d event(s) pending on flush", pending)
            if callback is not None:
                callback(pending, timeout)

            if not self._timed_queue_join(timeout - initial_timeout):
                pending = self._queue.qsize() + 1
                logger.error("flush timed out, dropped %s events", pending)

    def submit(self, callback):
        # type: (Callable[[], None]) -> bool
        self._ensure_thread()
        try:
            self._queue.put_nowait(callback)
            return True
        except FullError:
            return False

    def _target(self):
        # type: () -> None
        while True:
            callback = self._queue.get()
            try:
                if callback is _TERMINATOR:
                    break
                try:
                    callback()
                except Exception:
                    logger.error("Failed processing job", exc_info=True)
            finally:
                self._queue.task_done()
            sleep(0)
sentry-python-1.39.2/setup.py000066400000000000000000000076611454744723200162110ustar00rootroot00000000000000#!/usr/bin/env python

"""
Sentry-Python - Sentry SDK for Python
=====================================

**Sentry-Python is an SDK for Sentry.** Check out `GitHub
`_ to find out more.
"""

import os
from setuptools import setup, find_packages

here = os.path.abspath(os.path.dirname(__file__))


def get_file_text(file_name):
    with open(os.path.join(here, file_name)) as in_file:
        return in_file.read()


setup(
    name="sentry-sdk",
    version="1.39.2",
    author="Sentry Team and Contributors",
    author_email="hello@sentry.io",
    url="https://github.com/getsentry/sentry-python",
    project_urls={
        "Documentation": "https://docs.sentry.io/platforms/python/",
        "Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md",
    },
    description="Python client for Sentry (https://sentry.io)",
    long_description=get_file_text("README.md"),
    long_description_content_type="text/markdown",
    packages=find_packages(exclude=("tests", "tests.*")),
    # PEP 561
    package_data={"sentry_sdk": ["py.typed"]},
    zip_safe=False,
    license="MIT",
    install_requires=[
        'urllib3>=1.25.7; python_version<="3.4"',
        'urllib3>=1.26.9; python_version=="3.5"',
        'urllib3>=1.26.11; python_version>="3.6"',
        "certifi",
    ],
    extras_require={
        "aiohttp": ["aiohttp>=3.5"],
        "arq": ["arq>=0.23"],
        "asyncpg": ["asyncpg>=0.23"],
        "beam": ["apache-beam>=2.12"],
        "bottle": ["bottle>=0.12.13"],
        "celery": ["celery>=3"],
        "chalice": ["chalice>=1.16.0"],
        "clickhouse-driver": ["clickhouse-driver>=0.2.0"],
        "django": ["django>=1.8"],
        "falcon": ["falcon>=1.4"],
        "fastapi": ["fastapi>=0.79.0"],
        "flask": ["flask>=0.11", "blinker>=1.1", "markupsafe"],
        "grpcio": ["grpcio>=1.21.1"],
        "httpx": ["httpx>=0.16.0"],
        "huey": ["huey>=2"],
        "loguru": ["loguru>=0.5"],
        "opentelemetry": ["opentelemetry-distro>=0.35b0"],
        "opentelemetry-experimental": [
            "opentelemetry-distro~=0.40b0",
            "opentelemetry-instrumentation-aiohttp-client~=0.40b0",
            "opentelemetry-instrumentation-django~=0.40b0",
            "opentelemetry-instrumentation-fastapi~=0.40b0",
            "opentelemetry-instrumentation-flask~=0.40b0",
            "opentelemetry-instrumentation-requests~=0.40b0",
            "opentelemetry-instrumentation-sqlite3~=0.40b0",
            "opentelemetry-instrumentation-urllib~=0.40b0",
        ],
        "pure_eval": ["pure_eval", "executing", "asttokens"],
        "pymongo": ["pymongo>=3.1"],
        "pyspark": ["pyspark>=2.4.4"],
        "quart": ["quart>=0.16.1", "blinker>=1.1"],
        "rq": ["rq>=0.6"],
        "sanic": ["sanic>=0.8"],
        "sqlalchemy": ["sqlalchemy>=1.2"],
        "starlette": ["starlette>=0.19.1"],
        "starlite": ["starlite>=1.48"],
        "tornado": ["tornado>=5"],
    },
    classifiers=[
        "Development Status :: 5 - Production/Stable",
        "Environment :: Web Environment",
        "Intended Audience :: Developers",
        "License :: OSI Approved :: BSD License",
        "Operating System :: OS Independent",
        "Programming Language :: Python",
        "Programming Language :: Python :: 2",
        "Programming Language :: Python :: 2.7",
        "Programming Language :: Python :: 3",
        "Programming Language :: Python :: 3.4",
        "Programming Language :: Python :: 3.5",
        "Programming Language :: Python :: 3.6",
        "Programming Language :: Python :: 3.7",
        "Programming Language :: Python :: 3.8",
        "Programming Language :: Python :: 3.9",
        "Programming Language :: Python :: 3.10",
        "Programming Language :: Python :: 3.11",
        "Programming Language :: Python :: 3.12",
        "Topic :: Software Development :: Libraries :: Python Modules",
    ],
    options={"bdist_wheel": {"universal": "1"}},
)
sentry-python-1.39.2/test-requirements.txt000066400000000000000000000007401454744723200207270ustar00rootroot00000000000000pip  # always use newest pip
mock ; python_version<'3.3'
pytest
pytest-cov==2.8.1
pytest-forked<=1.4.0
pytest-localserver==0.5.1  # TODO(py3): 0.6.0 drops 2.7 support: https://github.com/pytest-dev/pytest-localserver/releases/tag/v0.6.0
pytest-watch==4.2.0
tox==3.7.0
jsonschema==3.2.0
pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
executing<2.0.0  # TODO(py3): 2.0.0 requires python3
asttokens
responses
pysocks
ipdb
sentry-python-1.39.2/tests/000077500000000000000000000000001454744723200156275ustar00rootroot00000000000000sentry-python-1.39.2/tests/__init__.py000066400000000000000000000006361454744723200177450ustar00rootroot00000000000000import sys

import pytest

# This is used in _capture_internal_warnings. We need to run this at import
# time because that's where many deprecation warnings might get thrown.
#
# This lives in tests/__init__.py because apparently even tests/conftest.py
# gets loaded too late.
assert "sentry_sdk" not in sys.modules

_warning_recorder_mgr = pytest.warns(None)
_warning_recorder = _warning_recorder_mgr.__enter__()
sentry-python-1.39.2/tests/conftest.py000066400000000000000000000462021454744723200200320ustar00rootroot00000000000000import json
import os
import socket
from threading import Thread
from contextlib import contextmanager

import pytest
import jsonschema

try:
    import gevent
except ImportError:
    gevent = None

try:
    import eventlet
except ImportError:
    eventlet = None

try:
    # Python 2
    import BaseHTTPServer

    HTTPServer = BaseHTTPServer.HTTPServer
    BaseHTTPRequestHandler = BaseHTTPServer.BaseHTTPRequestHandler
except Exception:
    # Python 3
    from http.server import BaseHTTPRequestHandler, HTTPServer


try:
    from unittest import mock
except ImportError:
    import mock

import sentry_sdk
from sentry_sdk._compat import iteritems, reraise, string_types, PY2
from sentry_sdk.envelope import Envelope
from sentry_sdk.integrations import _processed_integrations  # noqa: F401
from sentry_sdk.profiler import teardown_profiler
from sentry_sdk.transport import Transport
from sentry_sdk.utils import capture_internal_exceptions

from tests import _warning_recorder, _warning_recorder_mgr

from sentry_sdk._types import TYPE_CHECKING

if TYPE_CHECKING:
    from typing import Optional
    from collections.abc import Iterator


SENTRY_EVENT_SCHEMA = "./checkouts/data-schemas/relay/event.schema.json"

if not os.path.isfile(SENTRY_EVENT_SCHEMA):
    SENTRY_EVENT_SCHEMA = None
else:
    with open(SENTRY_EVENT_SCHEMA) as f:
        SENTRY_EVENT_SCHEMA = json.load(f)

try:
    import pytest_benchmark
except ImportError:

    @pytest.fixture
    def benchmark():
        return lambda x: x()

else:
    del pytest_benchmark


@pytest.fixture(autouse=True)
def internal_exceptions(request, monkeypatch):
    errors = []
    if "tests_internal_exceptions" in request.keywords:
        return

    def _capture_internal_exception(self, exc_info):
        errors.append(exc_info)

    @request.addfinalizer
    def _():
        # reraise the errors so that this just acts as a pass-through (that
        # happens to keep track of the errors which pass through it)
        for e in errors:
            reraise(*e)

    monkeypatch.setattr(
        sentry_sdk.Hub, "_capture_internal_exception", _capture_internal_exception
    )

    return errors


@pytest.fixture(autouse=True, scope="session")
def _capture_internal_warnings():
    yield

    _warning_recorder_mgr.__exit__(None, None, None)
    recorder = _warning_recorder

    for warning in recorder:
        try:
            if isinstance(warning.message, ResourceWarning):
                continue
        except NameError:
            pass

        if "sentry_sdk" not in str(warning.filename) and "sentry-sdk" not in str(
            warning.filename
        ):
            continue

        # pytest-django
        if "getfuncargvalue" in str(warning.message):
            continue

        # Happens when re-initializing the SDK
        if "but it was only enabled on init()" in str(warning.message):
            continue

        # sanic's usage of aiohttp for test client
        if "verify_ssl is deprecated, use ssl=False instead" in str(warning.message):
            continue

        if "getargspec" in str(warning.message) and warning.filename.endswith(
            ("pyramid/config/util.py", "pyramid/config/views.py")
        ):
            continue

        if "isAlive() is deprecated" in str(
            warning.message
        ) and warning.filename.endswith("celery/utils/timer2.py"):
            continue

        if "collections.abc" in str(warning.message) and warning.filename.endswith(
            ("celery/canvas.py", "werkzeug/datastructures.py", "tornado/httputil.py")
        ):
            continue

        # Django 1.7 emits a (seemingly) false-positive warning for our test
        # app and suggests to use a middleware that does not exist in later
        # Django versions.
        if "SessionAuthenticationMiddleware" in str(warning.message):
            continue

        if "Something has already installed a non-asyncio" in str(warning.message):
            continue

        if "dns.hash" in str(warning.message) or "dns/namedict" in warning.filename:
            continue

        raise AssertionError(warning)


@pytest.fixture
def monkeypatch_test_transport(monkeypatch, validate_event_schema):
    def check_event(event):
        def check_string_keys(map):
            for key, value in iteritems(map):
                assert isinstance(key, string_types)
                if isinstance(value, dict):
                    check_string_keys(value)

        with capture_internal_exceptions():
            check_string_keys(event)
            validate_event_schema(event)

    def check_envelope(envelope):
        with capture_internal_exceptions():
            # There used to be a check here for errors are not sent in envelopes.
            # We changed the behaviour to send errors in envelopes when tracing is enabled.
            # This is checked in test_client.py::test_sending_events_with_tracing
            # and test_client.py::test_sending_events_with_no_tracing
            pass

    def inner(client):
        monkeypatch.setattr(
            client, "transport", TestTransport(check_event, check_envelope)
        )

    return inner


@pytest.fixture
def validate_event_schema(tmpdir):
    def inner(event):
        if SENTRY_EVENT_SCHEMA:
            jsonschema.validate(instance=event, schema=SENTRY_EVENT_SCHEMA)

    return inner


@pytest.fixture
def reset_integrations():
    """
    Use with caution, sometimes we really need to start
    with a clean slate to ensure monkeypatching works well,
    but this also means some other stuff will be monkeypatched twice.
    """
    global _processed_integrations
    _processed_integrations.clear()


@pytest.fixture
def sentry_init(monkeypatch_test_transport, request):
    def inner(*a, **kw):
        hub = sentry_sdk.Hub.current
        client = sentry_sdk.Client(*a, **kw)
        hub.bind_client(client)
        if "transport" not in kw:
            monkeypatch_test_transport(sentry_sdk.Hub.current.client)

    if request.node.get_closest_marker("forked"):
        # Do not run isolation if the test is already running in
        # ultimate isolation (seems to be required for celery tests that
        # fork)
        yield inner
    else:
        with sentry_sdk.Hub(None):
            yield inner


class TestTransport(Transport):
    def __init__(self, capture_event_callback, capture_envelope_callback):
        Transport.__init__(self)
        self.capture_event = capture_event_callback
        self.capture_envelope = capture_envelope_callback
        self._queue = None


@pytest.fixture
def capture_events(monkeypatch):
    def inner():
        events = []
        test_client = sentry_sdk.Hub.current.client
        old_capture_event = test_client.transport.capture_event
        old_capture_envelope = test_client.transport.capture_envelope

        def append_event(event):
            events.append(event)
            return old_capture_event(event)

        def append_envelope(envelope):
            for item in envelope:
                if item.headers.get("type") in ("event", "transaction"):
                    test_client.transport.capture_event(item.payload.json)
            return old_capture_envelope(envelope)

        monkeypatch.setattr(test_client.transport, "capture_event", append_event)
        monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
        return events

    return inner


@pytest.fixture
def capture_envelopes(monkeypatch):
    def inner():
        envelopes = []
        test_client = sentry_sdk.Hub.current.client
        old_capture_event = test_client.transport.capture_event
        old_capture_envelope = test_client.transport.capture_envelope

        def append_event(event):
            envelope = Envelope()
            envelope.add_event(event)
            envelopes.append(envelope)
            return old_capture_event(event)

        def append_envelope(envelope):
            envelopes.append(envelope)
            return old_capture_envelope(envelope)

        monkeypatch.setattr(test_client.transport, "capture_event", append_event)
        monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
        return envelopes

    return inner


@pytest.fixture
def capture_client_reports(monkeypatch):
    def inner():
        reports = []
        test_client = sentry_sdk.Hub.current.client

        def record_lost_event(reason, data_category=None, item=None):
            if data_category is None:
                data_category = item.data_category
            return reports.append((reason, data_category))

        monkeypatch.setattr(
            test_client.transport, "record_lost_event", record_lost_event
        )
        return reports

    return inner


@pytest.fixture
def capture_events_forksafe(monkeypatch, capture_events, request):
    def inner():
        capture_events()

        events_r, events_w = os.pipe()
        events_r = os.fdopen(events_r, "rb", 0)
        events_w = os.fdopen(events_w, "wb", 0)

        test_client = sentry_sdk.Hub.current.client

        old_capture_event = test_client.transport.capture_event

        def append(event):
            events_w.write(json.dumps(event).encode("utf-8"))
            events_w.write(b"\n")
            return old_capture_event(event)

        def flush(timeout=None, callback=None):
            events_w.write(b"flush\n")

        monkeypatch.setattr(test_client.transport, "capture_event", append)
        monkeypatch.setattr(test_client, "flush", flush)

        return EventStreamReader(events_r, events_w)

    return inner


class EventStreamReader(object):
    def __init__(self, read_file, write_file):
        self.read_file = read_file
        self.write_file = write_file

    def read_event(self):
        return json.loads(self.read_file.readline().decode("utf-8"))

    def read_flush(self):
        assert self.read_file.readline() == b"flush\n"


# scope=session ensures that fixture is run earlier
@pytest.fixture(
    scope="session",
    params=[None, "eventlet", "gevent"],
    ids=("threads", "eventlet", "greenlet"),
)
def maybe_monkeypatched_threading(request):
    if request.param == "eventlet":
        if eventlet is None:
            pytest.skip("no eventlet installed")

        try:
            eventlet.monkey_patch()
        except AttributeError as e:
            if "'thread.RLock' object has no attribute" in str(e):
                # https://bitbucket.org/pypy/pypy/issues/2962/gevent-cannot-patch-rlock-under-pypy-27-7
                pytest.skip("https://github.com/eventlet/eventlet/issues/546")
            else:
                raise
    elif request.param == "gevent":
        if gevent is None:
            pytest.skip("no gevent installed")
        try:
            gevent.monkey.patch_all()
        except Exception as e:
            if "_RLock__owner" in str(e):
                pytest.skip("https://github.com/gevent/gevent/issues/1380")
            else:
                raise
    else:
        assert request.param is None

    return request.param


@pytest.fixture
def render_span_tree():
    def inner(event):
        assert event["type"] == "transaction"

        by_parent = {}
        for span in event["spans"]:
            by_parent.setdefault(span["parent_span_id"], []).append(span)

        def render_span(span):
            yield "- op={}: description={}".format(
                json.dumps(span.get("op")), json.dumps(span.get("description"))
            )
            for subspan in by_parent.get(span["span_id"]) or ():
                for line in render_span(subspan):
                    yield "  {}".format(line)

        root_span = event["contexts"]["trace"]

        # Return a list instead of a multiline string because black will know better how to format that
        return "\n".join(render_span(root_span))

    return inner


@pytest.fixture(name="StringContaining")
def string_containing_matcher():
    """
    An object which matches any string containing the substring passed to the
    object at instantiation time.

    Useful for assert_called_with, assert_any_call, etc.

    Used like this:

    >>> f = mock.Mock()
    >>> f("dogs are great")
    >>> f.assert_any_call("dogs") # will raise AssertionError
    Traceback (most recent call last):
        ...
    AssertionError: mock('dogs') call not found
    >>> f.assert_any_call(StringContaining("dogs")) # no AssertionError

    """

    class StringContaining(object):
        def __init__(self, substring):
            self.substring = substring

            try:
                # the `unicode` type only exists in python 2, so if this blows up,
                # we must be in py3 and have the `bytes` type
                self.valid_types = (str, unicode)
            except NameError:
                self.valid_types = (str, bytes)

        def __eq__(self, test_string):
            if not isinstance(test_string, self.valid_types):
                return False

            # this is safe even in py2 because as of 2.6, `bytes` exists in py2
            # as an alias for `str`
            if isinstance(test_string, bytes):
                test_string = test_string.decode()

            if len(self.substring) > len(test_string):
                return False

            return self.substring in test_string

        def __ne__(self, test_string):
            return not self.__eq__(test_string)

    return StringContaining


def _safe_is_equal(x, y):
    """
    Compares two values, preferring to use the first's __eq__ method if it
    exists and is implemented.

    Accounts for py2/py3 differences (like ints in py2 not having a __eq__
    method), as well as the incomparability of certain types exposed by using
    raw __eq__ () rather than ==.
    """

    # Prefer using __eq__ directly to ensure that examples like
    #
    #   maisey = Dog()
    #   maisey.name = "Maisey the Dog"
    #   maisey == ObjectDescribedBy(attrs={"name": StringContaining("Maisey")})
    #
    # evaluate to True (in other words, examples where the values in self.attrs
    # might also have custom __eq__ methods; this makes sure those methods get
    # used if possible)
    try:
        is_equal = x.__eq__(y)
    except AttributeError:
        is_equal = NotImplemented

    # this can happen on its own, too (i.e. without an AttributeError being
    # thrown), which is why this is separate from the except block above
    if is_equal == NotImplemented:
        # using == smoothes out weird variations exposed by raw __eq__
        return x == y

    return is_equal


@pytest.fixture(name="DictionaryContaining")
def dictionary_containing_matcher():
    """
    An object which matches any dictionary containing all key-value pairs from
    the dictionary passed to the object at instantiation time.

    Useful for assert_called_with, assert_any_call, etc.

    Used like this:

    >>> f = mock.Mock()
    >>> f({"dogs": "yes", "cats": "maybe"})
    >>> f.assert_any_call({"dogs": "yes"}) # will raise AssertionError
    Traceback (most recent call last):
        ...
    AssertionError: mock({'dogs': 'yes'}) call not found
    >>> f.assert_any_call(DictionaryContaining({"dogs": "yes"})) # no AssertionError
    """

    class DictionaryContaining(object):
        def __init__(self, subdict):
            self.subdict = subdict

        def __eq__(self, test_dict):
            if not isinstance(test_dict, dict):
                return False

            if len(self.subdict) > len(test_dict):
                return False

            for key, value in self.subdict.items():
                try:
                    test_value = test_dict[key]
                except KeyError:  # missing key
                    return False

                if not _safe_is_equal(value, test_value):
                    return False

            return True

        def __ne__(self, test_dict):
            return not self.__eq__(test_dict)

    return DictionaryContaining


@pytest.fixture(name="ObjectDescribedBy")
def object_described_by_matcher():
    """
    An object which matches any other object with the given properties.

    Available properties currently are "type" (a type object) and "attrs" (a
    dictionary).

    Useful for assert_called_with, assert_any_call, etc.

    Used like this:

    >>> class Dog(object):
    ...     pass
    ...
    >>> maisey = Dog()
    >>> maisey.name = "Maisey"
    >>> maisey.age = 7
    >>> f = mock.Mock()
    >>> f(maisey)
    >>> f.assert_any_call(ObjectDescribedBy(type=Dog)) # no AssertionError
    >>> f.assert_any_call(ObjectDescribedBy(attrs={"name": "Maisey"})) # no AssertionError
    """

    class ObjectDescribedBy(object):
        def __init__(self, type=None, attrs=None):
            self.type = type
            self.attrs = attrs

        def __eq__(self, test_obj):
            if self.type:
                if not isinstance(test_obj, self.type):
                    return False

            if self.attrs:
                for attr_name, attr_value in self.attrs.items():
                    try:
                        test_value = getattr(test_obj, attr_name)
                    except AttributeError:  # missing attribute
                        return False

                    if not _safe_is_equal(attr_value, test_value):
                        return False

            return True

        def __ne__(self, test_obj):
            return not self.__eq__(test_obj)

    return ObjectDescribedBy


@pytest.fixture
def teardown_profiling():
    yield
    teardown_profiler()


class MockServerRequestHandler(BaseHTTPRequestHandler):
    def do_GET(self):  # noqa: N802
        # Process an HTTP GET request and return a response with an HTTP 200 status.
        self.send_response(200)
        self.end_headers()
        return


def get_free_port():
    s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
    s.bind(("localhost", 0))
    _, port = s.getsockname()
    s.close()
    return port


def create_mock_http_server():
    # Start a mock server to test outgoing http requests
    mock_server_port = get_free_port()
    mock_server = HTTPServer(("localhost", mock_server_port), MockServerRequestHandler)
    mock_server_thread = Thread(target=mock_server.serve_forever)
    mock_server_thread.setDaemon(True)
    mock_server_thread.start()

    return mock_server_port


def unpack_werkzeug_response(response):
    # werkzeug < 2.1 returns a tuple as client response, newer versions return
    # an object
    try:
        return response.get_data(), response.status, response.headers
    except AttributeError:
        content, status, headers = response
        return b"".join(content), status, headers


def werkzeug_set_cookie(client, servername, key, value):
    # client.set_cookie has a different signature in different werkzeug versions
    try:
        client.set_cookie(servername, key, value)
    except TypeError:
        client.set_cookie(key, value)


@contextmanager
def patch_start_tracing_child(fake_transaction_is_none=False):
    # type: (bool) -> Iterator[Optional[mock.MagicMock]]
    if not fake_transaction_is_none:
        fake_transaction = mock.MagicMock()
        fake_start_child = mock.MagicMock()
        fake_transaction.start_child = fake_start_child
    else:
        fake_transaction = None
        fake_start_child = None

    version = "2" if PY2 else "3"

    with mock.patch(
        "sentry_sdk.tracing_utils_py%s.get_current_span" % version,
        return_value=fake_transaction,
    ):
        yield fake_start_child
sentry-python-1.39.2/tests/integrations/000077500000000000000000000000001454744723200203355ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/__init__.py000066400000000000000000000000001454744723200224340ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/aiohttp/000077500000000000000000000000001454744723200220055ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/aiohttp/__init__.py000066400000000000000000000000561454744723200241170ustar00rootroot00000000000000import pytest

pytest.importorskip("aiohttp")
sentry-python-1.39.2/tests/integrations/aiohttp/test_aiohttp.py000066400000000000000000000354401454744723200250740ustar00rootroot00000000000000import asyncio
import json
from contextlib import suppress

import pytest
from aiohttp import web
from aiohttp.client import ServerDisconnectedError
from aiohttp.web_request import Request

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.aiohttp import AioHttpIntegration

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


@pytest.mark.asyncio
async def test_basic(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 500

    (event,) = events

    assert (
        event["transaction"]
        == "tests.integrations.aiohttp.test_aiohttp.test_basic..hello"
    )

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    request = event["request"]
    host = request["headers"]["Host"]

    assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
    assert request["method"] == "GET"
    assert request["query_string"] == ""
    assert request.get("data") is None
    assert request["url"] == "http://{host}/".format(host=host)
    assert request["headers"] == {
        "Accept": "*/*",
        "Accept-Encoding": "gzip, deflate",
        "Host": host,
        "User-Agent": request["headers"]["User-Agent"],
        "baggage": mock.ANY,
        "sentry-trace": mock.ANY,
    }


@pytest.mark.asyncio
async def test_post_body_not_read(sentry_init, aiohttp_client, capture_events):
    from sentry_sdk.integrations.aiohttp import BODY_NOT_READ_MESSAGE

    sentry_init(integrations=[AioHttpIntegration()])

    body = {"some": "value"}

    async def hello(request):
        1 / 0

    app = web.Application()
    app.router.add_post("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.post("/", json=body)
    assert resp.status == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    request = event["request"]

    assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
    assert request["method"] == "POST"
    assert request["data"] == BODY_NOT_READ_MESSAGE


@pytest.mark.asyncio
async def test_post_body_read(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()])

    body = {"some": "value"}

    async def hello(request):
        await request.json()
        1 / 0

    app = web.Application()
    app.router.add_post("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.post("/", json=body)
    assert resp.status == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    request = event["request"]

    assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
    assert request["method"] == "POST"
    assert request["data"] == json.dumps(body)


@pytest.mark.asyncio
async def test_403_not_captured(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        raise web.HTTPForbidden()

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 403

    assert not events


@pytest.mark.asyncio
async def test_cancelled_error_not_captured(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        raise asyncio.CancelledError()

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()
    client = await aiohttp_client(app)

    with suppress(ServerDisconnectedError):
        # Intended `aiohttp` interaction: server will disconnect if it
        # encounters `asyncio.CancelledError`
        await client.get("/")

    assert not events


@pytest.mark.asyncio
async def test_half_initialized(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()])
    sentry_init()

    async def hello(request):
        return web.Response(text="hello")

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 200

    assert events == []


@pytest.mark.asyncio
async def test_tracing(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)

    async def hello(request):
        return web.Response(text="hello")

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 200

    (event,) = events

    assert event["type"] == "transaction"
    assert (
        event["transaction"]
        == "tests.integrations.aiohttp.test_aiohttp.test_tracing..hello"
    )


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        (
            "/message",
            "handler_name",
            "tests.integrations.aiohttp.test_aiohttp.test_transaction_style..hello",
            "component",
        ),
        (
            "/message",
            "method_and_path_pattern",
            "GET /{var}",
            "route",
        ),
    ],
)
async def test_transaction_style(
    sentry_init,
    aiohttp_client,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[AioHttpIntegration(transaction_style=transaction_style)],
        traces_sample_rate=1.0,
    )

    async def hello(request):
        return web.Response(text="hello")

    app = web.Application()
    app.router.add_get(r"/{var}", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get(url)
    assert resp.status == 200

    (event,) = events

    assert event["type"] == "transaction"
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


@pytest.mark.asyncio
async def test_traces_sampler_gets_request_object_in_sampling_context(
    sentry_init,
    aiohttp_client,
    DictionaryContaining,  # noqa:N803
    ObjectDescribedBy,
):
    traces_sampler = mock.Mock()
    sentry_init(
        integrations=[AioHttpIntegration()],
        traces_sampler=traces_sampler,
    )

    async def kangaroo_handler(request):
        return web.Response(text="dogs are great")

    app = web.Application()
    app.router.add_get("/tricks/kangaroo", kangaroo_handler)

    client = await aiohttp_client(app)
    await client.get("/tricks/kangaroo")

    traces_sampler.assert_any_call(
        DictionaryContaining(
            {
                "aiohttp_request": ObjectDescribedBy(
                    type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"}
                )
            }
        )
    )


@pytest.mark.asyncio
async def test_has_trace_if_performance_enabled(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)

    async def hello(request):
        capture_message("It's a good day to try dividing by 0")
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 500

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == msg_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
async def test_has_trace_if_performance_disabled(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        capture_message("It's a good day to try dividing by 0")
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 500

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        error_event["contexts"]["trace"]["trace_id"]
        == msg_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_enabled(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)

    async def hello(request):
        capture_message("It's a good day to try dividing by 0")
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    # The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
    # Get the sentry-trace header from the request so we can later compare with transaction events.
    client = await aiohttp_client(app)
    resp = await client.get("/")
    sentry_trace_header = resp.request_info.headers.get("sentry-trace")
    trace_id = sentry_trace_header.split("-")[0]

    assert resp.status == 500

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_disabled(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        capture_message("It's a good day to try dividing by 0")
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    # The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
    # Get the sentry-trace header from the request so we can later compare with transaction events.
    client = await aiohttp_client(app)
    resp = await client.get("/")
    sentry_trace_header = resp.request_info.headers.get("sentry-trace")
    trace_id = sentry_trace_header.split("-")[0]

    assert resp.status == 500

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.asyncio
async def test_crumb_capture(
    sentry_init, aiohttp_raw_server, aiohttp_client, loop, capture_events
):
    def before_breadcrumb(crumb, hint):
        crumb["data"]["extra"] = "foo"
        return crumb

    sentry_init(
        integrations=[AioHttpIntegration()], before_breadcrumb=before_breadcrumb
    )

    async def handler(request):
        return web.Response(text="OK")

    raw_server = await aiohttp_raw_server(handler)

    with start_transaction():
        events = capture_events()

        client = await aiohttp_client(raw_server)
        resp = await client.get("/")
        assert resp.status == 200
        capture_message("Testing!")

        (event,) = events

        crumb = event["breadcrumbs"]["values"][0]
        assert crumb["type"] == "http"
        assert crumb["category"] == "httplib"
        assert crumb["data"] == {
            "url": "http://127.0.0.1:{}/".format(raw_server.port),
            "http.fragment": "",
            "http.method": "GET",
            "http.query": "",
            "http.response.status_code": 200,
            "reason": "OK",
            "extra": "foo",
        }


@pytest.mark.asyncio
async def test_outgoing_trace_headers(sentry_init, aiohttp_raw_server, aiohttp_client):
    sentry_init(
        integrations=[AioHttpIntegration()],
        traces_sample_rate=1.0,
    )

    async def handler(request):
        return web.Response(text="OK")

    raw_server = await aiohttp_raw_server(handler)

    with start_transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        # make trace_id difference between transactions
        trace_id="0123456789012345678901234567890",
    ) as transaction:
        client = await aiohttp_client(raw_server)
        resp = await client.get("/")
        request_span = transaction._span_recorder.spans[-1]

        assert resp.request_info.headers[
            "sentry-trace"
        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )


@pytest.mark.asyncio
async def test_outgoing_trace_headers_append_to_baggage(
    sentry_init, aiohttp_raw_server, aiohttp_client
):
    sentry_init(
        integrations=[AioHttpIntegration()],
        traces_sample_rate=1.0,
        release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
    )

    async def handler(request):
        return web.Response(text="OK")

    raw_server = await aiohttp_raw_server(handler)

    with start_transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="0123456789012345678901234567890",
    ):
        client = await aiohttp_client(raw_server)
        resp = await client.get("/", headers={"bagGage": "custom=value"})

        assert (
            resp.request_info.headers["baggage"]
            == "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
        )
sentry-python-1.39.2/tests/integrations/argv/000077500000000000000000000000001454744723200212745ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/argv/test_argv.py000066400000000000000000000006441454744723200236500ustar00rootroot00000000000000import sys

from sentry_sdk import capture_message
from sentry_sdk.integrations.argv import ArgvIntegration


def test_basic(sentry_init, capture_events, monkeypatch):
    sentry_init(integrations=[ArgvIntegration()])

    argv = ["foo", "bar", "baz"]
    monkeypatch.setattr(sys, "argv", argv)

    events = capture_events()
    capture_message("hi")
    (event,) = events
    assert event["extra"]["sys.argv"] == argv
sentry-python-1.39.2/tests/integrations/ariadne/000077500000000000000000000000001454744723200217405ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/ariadne/__init__.py000066400000000000000000000001521454744723200240470ustar00rootroot00000000000000import pytest

pytest.importorskip("ariadne")
pytest.importorskip("fastapi")
pytest.importorskip("flask")
sentry-python-1.39.2/tests/integrations/ariadne/test_ariadne.py000066400000000000000000000165041454744723200247620ustar00rootroot00000000000000from ariadne import gql, graphql_sync, ObjectType, QueryType, make_executable_schema
from ariadne.asgi import GraphQL
from fastapi import FastAPI
from fastapi.testclient import TestClient
from flask import Flask, request, jsonify

from sentry_sdk.integrations.ariadne import AriadneIntegration
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration


def schema_factory():
    type_defs = gql(
        """
        type Query {
            greeting(name: String): Greeting
            error: String
        }

        type Greeting {
            name: String
        }
    """
    )

    query = QueryType()
    greeting = ObjectType("Greeting")

    @query.field("greeting")
    def resolve_greeting(*_, **kwargs):
        name = kwargs.pop("name")
        return {"name": name}

    @query.field("error")
    def resolve_error(obj, *_):
        raise RuntimeError("resolver failed")

    @greeting.field("name")
    def resolve_name(obj, *_):
        return "Hello, {}!".format(obj["name"])

    return make_executable_schema(type_defs, query)


def test_capture_request_and_response_if_send_pii_is_on_async(
    sentry_init, capture_events
):
    sentry_init(
        send_default_pii=True,
        integrations=[
            AriadneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = schema_factory()

    async_app = FastAPI()
    async_app.mount("/graphql/", GraphQL(schema))

    query = {"query": "query ErrorQuery {error}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert event["contexts"]["response"] == {
        "data": {
            "data": {"error": None},
            "errors": [
                {
                    "locations": [{"column": 19, "line": 1}],
                    "message": "resolver failed",
                    "path": ["error"],
                }
            ],
        }
    }
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_capture_request_and_response_if_send_pii_is_on_sync(
    sentry_init, capture_events
):
    sentry_init(
        send_default_pii=True,
        integrations=[AriadneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = schema_factory()

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server():
        data = request.get_json()
        success, result = graphql_sync(schema, data)
        return jsonify(result), 200

    query = {"query": "query ErrorQuery {error}"}
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert event["contexts"]["response"] == {
        "data": {
            "data": {"error": None},
            "errors": [
                {
                    "locations": [{"column": 19, "line": 1}],
                    "message": "resolver failed",
                    "path": ["error"],
                }
            ],
        }
    }
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_do_not_capture_request_and_response_if_send_pii_is_off_async(
    sentry_init, capture_events
):
    sentry_init(
        integrations=[
            AriadneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = schema_factory()

    async_app = FastAPI()
    async_app.mount("/graphql/", GraphQL(schema))

    query = {"query": "query ErrorQuery {error}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_do_not_capture_request_and_response_if_send_pii_is_off_sync(
    sentry_init, capture_events
):
    sentry_init(
        integrations=[AriadneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = schema_factory()

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server():
        data = request.get_json()
        success, result = graphql_sync(schema, data)
        return jsonify(result), 200

    query = {"query": "query ErrorQuery {error}"}
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_capture_validation_error(sentry_init, capture_events):
    sentry_init(
        send_default_pii=True,
        integrations=[
            AriadneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = schema_factory()

    async_app = FastAPI()
    async_app.mount("/graphql/", GraphQL(schema))

    query = {"query": "query ErrorQuery {doesnt_exist}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert event["contexts"]["response"] == {
        "data": {
            "errors": [
                {
                    "locations": [{"column": 19, "line": 1}],
                    "message": "Cannot query field 'doesnt_exist' on type 'Query'.",
                }
            ]
        }
    }
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_no_event_if_no_errors_async(sentry_init, capture_events):
    sentry_init(
        integrations=[
            AriadneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = schema_factory()

    async_app = FastAPI()
    async_app.mount("/graphql/", GraphQL(schema))

    query = {
        "query": "query GreetingQuery($name: String) { greeting(name: $name) {name} }",
        "variables": {"name": "some name"},
    }
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 0


def test_no_event_if_no_errors_sync(sentry_init, capture_events):
    sentry_init(
        integrations=[AriadneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = schema_factory()

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server():
        data = request.get_json()
        success, result = graphql_sync(schema, data)
        return jsonify(result), 200

    query = {
        "query": "query GreetingQuery($name: String) { greeting(name: $name) {name} }",
        "variables": {"name": "some name"},
    }
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 0
sentry-python-1.39.2/tests/integrations/arq/000077500000000000000000000000001454744723200211205ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/arq/__init__.py000066400000000000000000000000521454744723200232260ustar00rootroot00000000000000import pytest

pytest.importorskip("arq")
sentry-python-1.39.2/tests/integrations/arq/test_arq.py000066400000000000000000000164211454744723200233200ustar00rootroot00000000000000import asyncio
import pytest

from sentry_sdk import start_transaction, Hub
from sentry_sdk.integrations.arq import ArqIntegration

import arq.worker
from arq import cron
from arq.connections import ArqRedis
from arq.jobs import Job
from arq.utils import timestamp_ms

from fakeredis.aioredis import FakeRedis


def async_partial(async_fn, *args, **kwargs):
    # asyncio.iscoroutinefunction (Used in the integration code) in Python < 3.8
    # does not detect async functions in functools.partial objects.
    # This partial implementation returns a coroutine instead.
    async def wrapped(ctx):
        return await async_fn(ctx, *args, **kwargs)

    return wrapped


@pytest.fixture(autouse=True)
def patch_fakeredis_info_command():
    from fakeredis._fakesocket import FakeSocket

    if not hasattr(FakeSocket, "info"):
        from fakeredis._commands import command
        from fakeredis._helpers import SimpleString

        @command((SimpleString,), name="info")
        def info(self, section):
            return section

        FakeSocket.info = info


@pytest.fixture
def init_arq(sentry_init):
    def inner(
        cls_functions=None,
        cls_cron_jobs=None,
        kw_functions=None,
        kw_cron_jobs=None,
        allow_abort_jobs_=False,
    ):
        cls_functions = cls_functions or []
        cls_cron_jobs = cls_cron_jobs or []

        kwargs = {}
        if kw_functions is not None:
            kwargs["functions"] = kw_functions
        if kw_cron_jobs is not None:
            kwargs["cron_jobs"] = kw_cron_jobs

        sentry_init(
            integrations=[ArqIntegration()],
            traces_sample_rate=1.0,
            send_default_pii=True,
            debug=True,
        )

        server = FakeRedis()
        pool = ArqRedis(pool_or_conn=server.connection_pool)

        class WorkerSettings:
            functions = cls_functions
            cron_jobs = cls_cron_jobs
            redis_pool = pool
            allow_abort_jobs = allow_abort_jobs_

        if not WorkerSettings.functions:
            del WorkerSettings.functions
        if not WorkerSettings.cron_jobs:
            del WorkerSettings.cron_jobs

        worker = arq.worker.create_worker(WorkerSettings, **kwargs)

        return pool, worker

    return inner


@pytest.mark.asyncio
async def test_job_result(init_arq):
    async def increase(ctx, num):
        return num + 1

    increase.__qualname__ = increase.__name__

    pool, worker = init_arq([increase])

    job = await pool.enqueue_job("increase", 3)

    assert isinstance(job, Job)

    await worker.run_job(job.job_id, timestamp_ms())
    result = await job.result()
    job_result = await job.result_info()

    assert result == 4
    assert job_result.result == 4


@pytest.mark.asyncio
async def test_job_retry(capture_events, init_arq):
    async def retry_job(ctx):
        if ctx["job_try"] < 2:
            raise arq.worker.Retry

    retry_job.__qualname__ = retry_job.__name__

    pool, worker = init_arq([retry_job])

    job = await pool.enqueue_job("retry_job")

    events = capture_events()

    await worker.run_job(job.job_id, timestamp_ms())

    event = events.pop(0)
    assert event["contexts"]["trace"]["status"] == "aborted"
    assert event["transaction"] == "retry_job"
    assert event["tags"]["arq_task_id"] == job.job_id
    assert event["extra"]["arq-job"]["retry"] == 1

    await worker.run_job(job.job_id, timestamp_ms())

    event = events.pop(0)
    assert event["contexts"]["trace"]["status"] == "ok"
    assert event["transaction"] == "retry_job"
    assert event["tags"]["arq_task_id"] == job.job_id
    assert event["extra"]["arq-job"]["retry"] == 2


@pytest.mark.parametrize(
    "source", [("cls_functions", "cls_cron_jobs"), ("kw_functions", "kw_cron_jobs")]
)
@pytest.mark.parametrize("job_fails", [True, False], ids=["error", "success"])
@pytest.mark.asyncio
async def test_job_transaction(capture_events, init_arq, source, job_fails):
    async def division(_, a, b=0):
        return a / b

    division.__qualname__ = division.__name__

    cron_func = async_partial(division, a=1, b=int(not job_fails))
    cron_func.__qualname__ = division.__name__

    cron_job = cron(cron_func, minute=0, run_at_startup=True)

    functions_key, cron_jobs_key = source
    pool, worker = init_arq(**{functions_key: [division], cron_jobs_key: [cron_job]})

    events = capture_events()

    job = await pool.enqueue_job("division", 1, b=int(not job_fails))
    await worker.run_job(job.job_id, timestamp_ms())

    loop = asyncio.get_event_loop()
    task = loop.create_task(worker.async_run())
    await asyncio.sleep(1)

    task.cancel()

    await worker.close()

    if job_fails:
        error_func_event = events.pop(0)
        error_cron_event = events.pop(1)

        assert error_func_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
        assert error_func_event["exception"]["values"][0]["mechanism"]["type"] == "arq"

        func_extra = error_func_event["extra"]["arq-job"]
        assert func_extra["task"] == "division"

        assert error_cron_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
        assert error_cron_event["exception"]["values"][0]["mechanism"]["type"] == "arq"

        cron_extra = error_cron_event["extra"]["arq-job"]
        assert cron_extra["task"] == "cron:division"

    [func_event, cron_event] = events

    assert func_event["type"] == "transaction"
    assert func_event["transaction"] == "division"
    assert func_event["transaction_info"] == {"source": "task"}

    assert "arq_task_id" in func_event["tags"]
    assert "arq_task_retry" in func_event["tags"]

    func_extra = func_event["extra"]["arq-job"]

    assert func_extra["task"] == "division"
    assert func_extra["kwargs"] == {"b": int(not job_fails)}
    assert func_extra["retry"] == 1

    assert cron_event["type"] == "transaction"
    assert cron_event["transaction"] == "cron:division"
    assert cron_event["transaction_info"] == {"source": "task"}

    assert "arq_task_id" in cron_event["tags"]
    assert "arq_task_retry" in cron_event["tags"]

    cron_extra = cron_event["extra"]["arq-job"]

    assert cron_extra["task"] == "cron:division"
    assert cron_extra["kwargs"] == {}
    assert cron_extra["retry"] == 1


@pytest.mark.parametrize("source", ["cls_functions", "kw_functions"])
@pytest.mark.asyncio
async def test_enqueue_job(capture_events, init_arq, source):
    async def dummy_job(_):
        pass

    pool, _ = init_arq(**{source: [dummy_job]})

    events = capture_events()

    with start_transaction() as transaction:
        await pool.enqueue_job("dummy_job")

    (event,) = events

    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert event["contexts"]["trace"]["span_id"] == transaction.span_id

    assert len(event["spans"])
    assert event["spans"][0]["op"] == "queue.submit.arq"
    assert event["spans"][0]["description"] == "dummy_job"


@pytest.mark.asyncio
async def test_execute_job_without_integration(init_arq):
    async def dummy_job(_ctx):
        pass

    dummy_job.__qualname__ = dummy_job.__name__

    pool, worker = init_arq([dummy_job])
    # remove the integration to trigger the edge case
    Hub.current.client.integrations.pop("arq")

    job = await pool.enqueue_job("dummy_job")

    await worker.run_job(job.job_id, timestamp_ms())

    assert await job.result() is None
sentry-python-1.39.2/tests/integrations/asgi/000077500000000000000000000000001454744723200212605ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/asgi/__init__.py000066400000000000000000000002011454744723200233620ustar00rootroot00000000000000import pytest

pytest.importorskip("asyncio")
pytest.importorskip("pytest_asyncio")
pytest.importorskip("async_asgi_testclient")
sentry-python-1.39.2/tests/integrations/asgi/test_asgi.py000066400000000000000000000455051454744723200236250ustar00rootroot00000000000000import sys

from collections import Counter

import pytest
import sentry_sdk
from sentry_sdk import capture_message
from sentry_sdk.integrations._asgi_common import _get_ip, _get_headers
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware, _looks_like_asgi3

from async_asgi_testclient import TestClient


minimum_python_36 = pytest.mark.skipif(
    sys.version_info < (3, 6), reason="ASGI is only supported in Python >= 3.6"
)


@pytest.fixture
def asgi3_app():
    async def app(scope, receive, send):
        if scope["type"] == "lifespan":
            while True:
                message = await receive()
                if message["type"] == "lifespan.startup":
                    await send({"type": "lifespan.startup.complete"})
                elif message["type"] == "lifespan.shutdown":
                    await send({"type": "lifespan.shutdown.complete"})
                    return
        elif (
            scope["type"] == "http"
            and "route" in scope
            and scope["route"] == "/trigger/error"
        ):
            1 / 0

        await send(
            {
                "type": "http.response.start",
                "status": 200,
                "headers": [
                    [b"content-type", b"text/plain"],
                ],
            }
        )

        await send(
            {
                "type": "http.response.body",
                "body": b"Hello, world!",
            }
        )

    return app


@pytest.fixture
def asgi3_app_with_error():
    async def send_with_error(event):
        1 / 0

    async def app(scope, receive, send):
        if scope["type"] == "lifespan":
            while True:
                message = await receive()
                if message["type"] == "lifespan.startup":
                    ...  # Do some startup here!
                    await send({"type": "lifespan.startup.complete"})
                elif message["type"] == "lifespan.shutdown":
                    ...  # Do some shutdown here!
                    await send({"type": "lifespan.shutdown.complete"})
                    return
        else:
            await send_with_error(
                {
                    "type": "http.response.start",
                    "status": 200,
                    "headers": [
                        [b"content-type", b"text/plain"],
                    ],
                }
            )
            await send_with_error(
                {
                    "type": "http.response.body",
                    "body": b"Hello, world!",
                }
            )

    return app


@pytest.fixture
def asgi3_app_with_error_and_msg():
    async def app(scope, receive, send):
        await send(
            {
                "type": "http.response.start",
                "status": 200,
                "headers": [
                    [b"content-type", b"text/plain"],
                ],
            }
        )

        capture_message("Let's try dividing by 0")
        1 / 0

        await send(
            {
                "type": "http.response.body",
                "body": b"Hello, world!",
            }
        )

    return app


@pytest.fixture
def asgi3_ws_app():
    def message():
        capture_message("Some message to the world!")
        raise ValueError("Oh no")

    async def app(scope, receive, send):
        await send(
            {
                "type": "websocket.send",
                "text": message(),
            }
        )

    return app


@minimum_python_36
def test_invalid_transaction_style(asgi3_app):
    with pytest.raises(ValueError) as exp:
        SentryAsgiMiddleware(asgi3_app, transaction_style="URL")

    assert (
        str(exp.value)
        == "Invalid value for transaction_style: URL (must be in ('endpoint', 'url'))"
    )


@minimum_python_36
@pytest.mark.asyncio
async def test_capture_transaction(
    sentry_init,
    asgi3_app,
    capture_events,
):
    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app)

    async with TestClient(app) as client:
        events = capture_events()
        await client.get("/some_url?somevalue=123")

    (transaction_event,) = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "/some_url"
    assert transaction_event["transaction_info"] == {"source": "url"}
    assert transaction_event["contexts"]["trace"]["op"] == "http.server"
    assert transaction_event["request"] == {
        "headers": {
            "host": "localhost",
            "remote-addr": "127.0.0.1",
            "user-agent": "ASGI-Test-Client",
        },
        "method": "GET",
        "query_string": "somevalue=123",
        "url": "http://localhost/some_url",
    }


@minimum_python_36
@pytest.mark.asyncio
async def test_capture_transaction_with_error(
    sentry_init,
    asgi3_app_with_error,
    capture_events,
    DictionaryContaining,  # noqa: N803
):
    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app_with_error)

    events = capture_events()
    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            await client.get("/some_url")

    (
        error_event,
        transaction_event,
    ) = events

    assert error_event["transaction"] == "/some_url"
    assert error_event["transaction_info"] == {"source": "url"}
    assert error_event["contexts"]["trace"]["op"] == "http.server"
    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
    assert error_event["exception"]["values"][0]["value"] == "division by zero"
    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asgi"

    assert transaction_event["type"] == "transaction"
    assert transaction_event["contexts"]["trace"] == DictionaryContaining(
        error_event["contexts"]["trace"]
    )
    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
    assert transaction_event["transaction"] == error_event["transaction"]
    assert transaction_event["request"] == error_event["request"]


@minimum_python_36
@pytest.mark.asyncio
async def test_has_trace_if_performance_enabled(
    sentry_init,
    asgi3_app_with_error_and_msg,
    capture_events,
):
    sentry_init(traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)

    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            events = capture_events()
            await client.get("/")

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == msg_event["contexts"]["trace"]["trace_id"]
    )


@minimum_python_36
@pytest.mark.asyncio
async def test_has_trace_if_performance_disabled(
    sentry_init,
    asgi3_app_with_error_and_msg,
    capture_events,
):
    sentry_init()
    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)

    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            events = capture_events()
            await client.get("/")

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]


@minimum_python_36
@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_enabled(
    sentry_init,
    asgi3_app_with_error_and_msg,
    capture_events,
):
    sentry_init(traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            events = capture_events()
            await client.get("/", headers={"sentry-trace": sentry_trace_header})

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


@minimum_python_36
@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_disabled(
    sentry_init,
    asgi3_app_with_error_and_msg,
    capture_events,
):
    sentry_init()
    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            events = capture_events()
            await client.get("/", headers={"sentry-trace": sentry_trace_header})

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]
    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


@minimum_python_36
@pytest.mark.asyncio
async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
    sentry_init(debug=True, send_default_pii=True)

    events = capture_events()

    asgi3_ws_app = SentryAsgiMiddleware(asgi3_ws_app)

    scope = {
        "type": "websocket",
        "endpoint": asgi3_app,
        "client": ("127.0.0.1", 60457),
        "route": "some_url",
        "headers": [
            ("accept", "*/*"),
        ],
    }

    with pytest.raises(ValueError):
        async with TestClient(asgi3_ws_app, scope=scope) as client:
            async with client.websocket_connect("/ws") as ws:
                await ws.receive_text()

    msg_event, error_event = events

    assert msg_event["message"] == "Some message to the world!"

    (exc,) = error_event["exception"]["values"]
    assert exc["type"] == "ValueError"
    assert exc["value"] == "Oh no"


@minimum_python_36
@pytest.mark.asyncio
async def test_auto_session_tracking_with_aggregates(
    sentry_init, asgi3_app, capture_envelopes
):
    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app)

    scope = {
        "endpoint": asgi3_app,
        "client": ("127.0.0.1", 60457),
    }
    with pytest.raises(ZeroDivisionError):
        envelopes = capture_envelopes()
        async with TestClient(app, scope=scope) as client:
            scope["route"] = "/some/fine/url"
            await client.get("/some/fine/url")
            scope["route"] = "/some/fine/url"
            await client.get("/some/fine/url")
            scope["route"] = "/trigger/error"
            await client.get("/trigger/error")

    sentry_sdk.flush()

    count_item_types = Counter()
    for envelope in envelopes:
        count_item_types[envelope.items[0].type] += 1

    assert count_item_types["transaction"] == 3
    assert count_item_types["event"] == 1
    assert count_item_types["sessions"] == 1
    assert len(envelopes) == 5

    session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
    assert session_aggregates[0]["exited"] == 2
    assert session_aggregates[0]["crashed"] == 1
    assert len(session_aggregates) == 1


@minimum_python_36
@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        (
            "/message",
            "url",
            "generic ASGI request",
            "route",
        ),
        (
            "/message",
            "endpoint",
            "tests.integrations.asgi.test_asgi.asgi3_app..app",
            "component",
        ),
    ],
)
@pytest.mark.asyncio
async def test_transaction_style(
    sentry_init,
    asgi3_app,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)

    scope = {
        "endpoint": asgi3_app,
        "route": url,
        "client": ("127.0.0.1", 60457),
    }

    async with TestClient(app, scope=scope) as client:
        events = capture_events()
        await client.get(url)

    (transaction_event,) = events

    assert transaction_event["transaction"] == expected_transaction
    assert transaction_event["transaction_info"] == {"source": expected_source}


def mock_asgi2_app():
    pass


class MockAsgi2App:
    def __call__():
        pass


class MockAsgi3App(MockAsgi2App):
    def __await__():
        pass

    async def __call__():
        pass


@minimum_python_36
def test_looks_like_asgi3(asgi3_app):
    # branch: inspect.isclass(app)
    assert _looks_like_asgi3(MockAsgi3App)
    assert not _looks_like_asgi3(MockAsgi2App)

    # branch: inspect.isfunction(app)
    assert _looks_like_asgi3(asgi3_app)
    assert not _looks_like_asgi3(mock_asgi2_app)

    # breanch: else
    asgi3 = MockAsgi3App()
    assert _looks_like_asgi3(asgi3)
    asgi2 = MockAsgi2App()
    assert not _looks_like_asgi3(asgi2)


@minimum_python_36
def test_get_ip_x_forwarded_for():
    headers = [
        (b"x-forwarded-for", b"8.8.8.8"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "8.8.8.8"

    # x-forwarded-for overrides x-real-ip
    headers = [
        (b"x-forwarded-for", b"8.8.8.8"),
        (b"x-real-ip", b"10.10.10.10"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "8.8.8.8"

    # when multiple x-forwarded-for headers are, the first is taken
    headers = [
        (b"x-forwarded-for", b"5.5.5.5"),
        (b"x-forwarded-for", b"6.6.6.6"),
        (b"x-forwarded-for", b"7.7.7.7"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "5.5.5.5"


@minimum_python_36
def test_get_ip_x_real_ip():
    headers = [
        (b"x-real-ip", b"10.10.10.10"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "10.10.10.10"

    # x-forwarded-for overrides x-real-ip
    headers = [
        (b"x-forwarded-for", b"8.8.8.8"),
        (b"x-real-ip", b"10.10.10.10"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "8.8.8.8"


@minimum_python_36
def test_get_ip():
    # if now headers are provided the ip is taken from the client.
    headers = []
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "127.0.0.1"

    # x-forwarded-for header overides the ip from client
    headers = [
        (b"x-forwarded-for", b"8.8.8.8"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "8.8.8.8"

    # x-real-for header overides the ip from client
    headers = [
        (b"x-real-ip", b"10.10.10.10"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "10.10.10.10"


@minimum_python_36
def test_get_headers():
    headers = [
        (b"x-real-ip", b"10.10.10.10"),
        (b"some_header", b"123"),
        (b"some_header", b"abc"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    headers = _get_headers(scope)
    assert headers == {
        "x-real-ip": "10.10.10.10",
        "some_header": "123, abc",
    }


@minimum_python_36
@pytest.mark.asyncio
@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "/message/123456",
            "url",
        ),
        (
            "/message/123456",
            "url",
            "/message/123456",
            "url",
        ),
    ],
)
async def test_transaction_name(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    asgi3_app,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        traces_sample_rate=1.0,
        debug=True,
    )

    envelopes = capture_envelopes()

    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)

    async with TestClient(app) as client:
        await client.get(request_url)

    (transaction_envelope,) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )


@minimum_python_36
@pytest.mark.asyncio
@pytest.mark.parametrize(
    "request_url, transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "/message/123456",
            "url",
        ),
        (
            "/message/123456",
            "url",
            "/message/123456",
            "url",
        ),
    ],
)
async def test_transaction_name_in_traces_sampler(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    asgi3_app,
):
    """
    Tests that a custom traces_sampler has a meaningful transaction name.
    In this case the URL or endpoint, because we do not have the route yet.
    """

    def dummy_traces_sampler(sampling_context):
        assert (
            sampling_context["transaction_context"]["name"] == expected_transaction_name
        )
        assert (
            sampling_context["transaction_context"]["source"]
            == expected_transaction_source
        )

    sentry_init(
        traces_sampler=dummy_traces_sampler,
        traces_sample_rate=1.0,
        debug=True,
    )

    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)

    async with TestClient(app) as client:
        await client.get(request_url)
sentry-python-1.39.2/tests/integrations/asyncio/000077500000000000000000000000001454744723200220025ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/asyncio/__init__.py000066400000000000000000000000001454744723200241010ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/asyncio/test_asyncio_py3.py000066400000000000000000000247211454744723200256610ustar00rootroot00000000000000import asyncio
import inspect
import sys

import pytest

import sentry_sdk
from sentry_sdk.consts import OP
from sentry_sdk.integrations.asyncio import AsyncioIntegration, patch_asyncio

try:
    from unittest.mock import MagicMock, patch
except ImportError:
    from mock import MagicMock, patch

try:
    from contextvars import Context, ContextVar
except ImportError:
    pass  # All tests will be skipped with incompatible versions


minimum_python_37 = pytest.mark.skipif(
    sys.version_info < (3, 7), reason="Asyncio tests need Python >= 3.7"
)


minimum_python_311 = pytest.mark.skipif(
    sys.version_info < (3, 11),
    reason="Asyncio task context parameter was introduced in Python 3.11",
)


async def foo():
    await asyncio.sleep(0.01)


async def bar():
    await asyncio.sleep(0.01)


async def boom():
    1 / 0


@pytest.fixture(scope="session")
def event_loop(request):
    """Create an instance of the default event loop for each test case."""
    loop = asyncio.get_event_loop_policy().new_event_loop()
    yield loop
    loop.close()


def get_sentry_task_factory(mock_get_running_loop):
    """
    Patches (mocked) asyncio and gets the sentry_task_factory.
    """
    mock_loop = mock_get_running_loop.return_value
    patch_asyncio()
    patched_factory = mock_loop.set_task_factory.call_args[0][0]

    return patched_factory


@minimum_python_37
@pytest.mark.asyncio
async def test_create_task(
    sentry_init,
    capture_events,
    event_loop,
):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=True,
        debug=True,
        integrations=[
            AsyncioIntegration(),
        ],
    )

    events = capture_events()

    with sentry_sdk.start_transaction(name="test_transaction_for_create_task"):
        with sentry_sdk.start_span(op="root", description="not so important"):
            tasks = [event_loop.create_task(foo()), event_loop.create_task(bar())]
            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)

            sentry_sdk.flush()

    (transaction_event,) = events

    assert transaction_event["spans"][0]["op"] == "root"
    assert transaction_event["spans"][0]["description"] == "not so important"

    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
    assert transaction_event["spans"][1]["description"] == "foo"
    assert (
        transaction_event["spans"][1]["parent_span_id"]
        == transaction_event["spans"][0]["span_id"]
    )

    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
    assert transaction_event["spans"][2]["description"] == "bar"
    assert (
        transaction_event["spans"][2]["parent_span_id"]
        == transaction_event["spans"][0]["span_id"]
    )


@minimum_python_37
@pytest.mark.asyncio
async def test_gather(
    sentry_init,
    capture_events,
):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=True,
        debug=True,
        integrations=[
            AsyncioIntegration(),
        ],
    )

    events = capture_events()

    with sentry_sdk.start_transaction(name="test_transaction_for_gather"):
        with sentry_sdk.start_span(op="root", description="not so important"):
            await asyncio.gather(foo(), bar(), return_exceptions=True)

        sentry_sdk.flush()

    (transaction_event,) = events

    assert transaction_event["spans"][0]["op"] == "root"
    assert transaction_event["spans"][0]["description"] == "not so important"

    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
    assert transaction_event["spans"][1]["description"] == "foo"
    assert (
        transaction_event["spans"][1]["parent_span_id"]
        == transaction_event["spans"][0]["span_id"]
    )

    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
    assert transaction_event["spans"][2]["description"] == "bar"
    assert (
        transaction_event["spans"][2]["parent_span_id"]
        == transaction_event["spans"][0]["span_id"]
    )


@minimum_python_37
@pytest.mark.asyncio
async def test_exception(
    sentry_init,
    capture_events,
    event_loop,
):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=True,
        debug=True,
        integrations=[
            AsyncioIntegration(),
        ],
    )

    events = capture_events()

    with sentry_sdk.start_transaction(name="test_exception"):
        with sentry_sdk.start_span(op="root", description="not so important"):
            tasks = [event_loop.create_task(boom()), event_loop.create_task(bar())]
            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)

            sentry_sdk.flush()

    (error_event, _) = events

    assert error_event["transaction"] == "test_exception"
    assert error_event["contexts"]["trace"]["op"] == "function"
    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
    assert error_event["exception"]["values"][0]["value"] == "division by zero"
    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asyncio"


@minimum_python_37
@pytest.mark.asyncio
async def test_task_result(sentry_init):
    sentry_init(
        integrations=[
            AsyncioIntegration(),
        ],
    )

    async def add(a, b):
        return a + b

    result = await asyncio.create_task(add(1, 2))
    assert result == 3, result


@minimum_python_311
@pytest.mark.asyncio
async def test_task_with_context(sentry_init):
    """
    Integration test to ensure working context parameter in Python 3.11+
    """
    sentry_init(
        integrations=[
            AsyncioIntegration(),
        ],
    )

    var = ContextVar("var")
    var.set("original value")

    async def change_value():
        var.set("changed value")

    async def retrieve_value():
        return var.get()

    # Create a context and run both tasks within the context
    ctx = Context()
    async with asyncio.TaskGroup() as tg:
        tg.create_task(change_value(), context=ctx)
        retrieve_task = tg.create_task(retrieve_value(), context=ctx)

    assert retrieve_task.result() == "changed value"


@minimum_python_37
@patch("asyncio.get_running_loop")
def test_patch_asyncio(mock_get_running_loop):
    """
    Test that the patch_asyncio function will patch the task factory.
    """
    mock_loop = mock_get_running_loop.return_value

    patch_asyncio()

    assert mock_loop.set_task_factory.called

    set_task_factory_args, _ = mock_loop.set_task_factory.call_args
    assert len(set_task_factory_args) == 1

    sentry_task_factory, *_ = set_task_factory_args
    assert callable(sentry_task_factory)


@minimum_python_37
@pytest.mark.forked
@patch("asyncio.get_running_loop")
@patch("sentry_sdk.integrations.asyncio.Task")
def test_sentry_task_factory_no_factory(MockTask, mock_get_running_loop):  # noqa: N803
    mock_loop = mock_get_running_loop.return_value
    mock_coro = MagicMock()

    # Set the original task factory to None
    mock_loop.get_task_factory.return_value = None

    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)

    # The call we are testing
    ret_val = sentry_task_factory(mock_loop, mock_coro)

    assert MockTask.called
    assert ret_val == MockTask.return_value

    task_args, task_kwargs = MockTask.call_args
    assert len(task_args) == 1

    coro_param, *_ = task_args
    assert inspect.iscoroutine(coro_param)

    assert "loop" in task_kwargs
    assert task_kwargs["loop"] == mock_loop


@minimum_python_37
@pytest.mark.forked
@patch("asyncio.get_running_loop")
def test_sentry_task_factory_with_factory(mock_get_running_loop):
    mock_loop = mock_get_running_loop.return_value
    mock_coro = MagicMock()

    # The original task factory will be mocked out here, let's retrieve the value for later
    orig_task_factory = mock_loop.get_task_factory.return_value

    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)

    # The call we are testing
    ret_val = sentry_task_factory(mock_loop, mock_coro)

    assert orig_task_factory.called
    assert ret_val == orig_task_factory.return_value

    task_factory_args, _ = orig_task_factory.call_args
    assert len(task_factory_args) == 2

    loop_arg, coro_arg = task_factory_args
    assert loop_arg == mock_loop
    assert inspect.iscoroutine(coro_arg)


@minimum_python_311
@patch("asyncio.get_running_loop")
@patch("sentry_sdk.integrations.asyncio.Task")
def test_sentry_task_factory_context_no_factory(
    MockTask, mock_get_running_loop  # noqa: N803
):
    mock_loop = mock_get_running_loop.return_value
    mock_coro = MagicMock()
    mock_context = MagicMock()

    # Set the original task factory to None
    mock_loop.get_task_factory.return_value = None

    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)

    # The call we are testing
    ret_val = sentry_task_factory(mock_loop, mock_coro, context=mock_context)

    assert MockTask.called
    assert ret_val == MockTask.return_value

    task_args, task_kwargs = MockTask.call_args
    assert len(task_args) == 1

    coro_param, *_ = task_args
    assert inspect.iscoroutine(coro_param)

    assert "loop" in task_kwargs
    assert task_kwargs["loop"] == mock_loop
    assert "context" in task_kwargs
    assert task_kwargs["context"] == mock_context


@minimum_python_311
@patch("asyncio.get_running_loop")
def test_sentry_task_factory_context_with_factory(mock_get_running_loop):
    mock_loop = mock_get_running_loop.return_value
    mock_coro = MagicMock()
    mock_context = MagicMock()

    # The original task factory will be mocked out here, let's retrieve the value for later
    orig_task_factory = mock_loop.get_task_factory.return_value

    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)

    # The call we are testing
    ret_val = sentry_task_factory(mock_loop, mock_coro, context=mock_context)

    assert orig_task_factory.called
    assert ret_val == orig_task_factory.return_value

    task_factory_args, task_factory_kwargs = orig_task_factory.call_args
    assert len(task_factory_args) == 2

    loop_arg, coro_arg = task_factory_args
    assert loop_arg == mock_loop
    assert inspect.iscoroutine(coro_arg)

    assert "context" in task_factory_kwargs
    assert task_factory_kwargs["context"] == mock_context
sentry-python-1.39.2/tests/integrations/asyncpg/000077500000000000000000000000001454744723200220015ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/asyncpg/__init__.py000066400000000000000000000001241454744723200241070ustar00rootroot00000000000000import pytest

pytest.importorskip("asyncpg")
pytest.importorskip("pytest_asyncio")
sentry-python-1.39.2/tests/integrations/asyncpg/test_asyncpg.py000066400000000000000000000356311454744723200250660ustar00rootroot00000000000000"""
Tests need pytest-asyncio installed.

Tests need a local postgresql instance running, this can best be done using
```sh
docker run --rm --name some-postgres -e POSTGRES_USER=foo -e POSTGRES_PASSWORD=bar -d -p 5432:5432 postgres
```

The tests use the following credentials to establish a database connection.
"""
import os


PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres")
PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "foo")
PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "bar")
PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost")
PG_PORT = 5432


import datetime

import asyncpg
import pytest

import pytest_asyncio

from asyncpg import connect, Connection

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.asyncpg import AsyncPGIntegration
from sentry_sdk.consts import SPANDATA


PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format(
    PG_USER, PG_PASSWORD, PG_HOST, PG_NAME
)
CRUMBS_CONNECT = {
    "category": "query",
    "data": {
        "db.name": PG_NAME,
        "db.system": "postgresql",
        "db.user": PG_USER,
        "server.address": PG_HOST,
        "server.port": PG_PORT,
    },
    "message": "connect",
    "type": "default",
}


@pytest_asyncio.fixture(autouse=True)
async def _clean_pg():
    conn = await connect(PG_CONNECTION_URI)
    await conn.execute("DROP TABLE IF EXISTS users")
    await conn.execute(
        """
            CREATE TABLE users(
                id serial PRIMARY KEY,
                name text,
                password text,
                dob date
            )
        """
    )
    await conn.close()


@pytest.mark.asyncio
async def test_connect(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [CRUMBS_CONNECT]


@pytest.mark.asyncio
async def test_execute(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.execute(
        "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')",
    )

    await conn.execute(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        "Bob",
        "secret_pw",
        datetime.date(1984, 3, 1),
    )

    row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob")
    assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1))

    row = await conn.fetchrow("SELECT * FROM users WHERE name = 'Bob'")
    assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1))

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {},
            "message": "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE name = $1",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE name = 'Bob'",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_execute_many(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.executemany(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        [
            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
            ("Alice", "pw", datetime.date(1990, 12, 25)),
        ],
    )

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {"db.executemany": True},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_record_params(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration(record_params=True)],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.execute(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        "Bob",
        "secret_pw",
        datetime.date(1984, 3, 1),
    )

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {
                "db.params": ["Bob", "secret_pw", "datetime.date(1984, 3, 1)"],
                "db.paramstyle": "format",
            },
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_cursor(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.executemany(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        [
            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
            ("Alice", "pw", datetime.date(1990, 12, 25)),
        ],
    )

    async with conn.transaction():
        # Postgres requires non-scrollable cursors to be created
        # and used in a transaction.
        async for record in conn.cursor(
            "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1)
        ):
            print(record)

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {"db.executemany": True},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"},
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE dob > $1",
            "type": "default",
        },
        {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"},
    ]


@pytest.mark.asyncio
async def test_cursor_manual(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.executemany(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        [
            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
            ("Alice", "pw", datetime.date(1990, 12, 25)),
        ],
    )
    #
    async with conn.transaction():
        # Postgres requires non-scrollable cursors to be created
        # and used in a transaction.
        cur = await conn.cursor(
            "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1)
        )
        record = await cur.fetchrow()
        print(record)
        while await cur.forward(1):
            record = await cur.fetchrow()
            print(record)

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {"db.executemany": True},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"},
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE dob > $1",
            "type": "default",
        },
        {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"},
    ]


@pytest.mark.asyncio
async def test_prepared_stmt(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.executemany(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        [
            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
            ("Alice", "pw", datetime.date(1990, 12, 25)),
        ],
    )

    stmt = await conn.prepare("SELECT * FROM users WHERE name = $1")

    print(await stmt.fetchval("Bob"))
    print(await stmt.fetchval("Alice"))

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {"db.executemany": True},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE name = $1",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_connection_pool(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    pool_size = 2

    pool = await asyncpg.create_pool(
        PG_CONNECTION_URI, min_size=pool_size, max_size=pool_size
    )

    async with pool.acquire() as conn:
        await conn.execute(
            "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "Bob",
            "secret_pw",
            datetime.date(1984, 3, 1),
        )

    async with pool.acquire() as conn:
        row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob")
        assert row == (1, "Bob", "secret_pw", datetime.date(1984, 3, 1))

    await pool.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        # The connection pool opens pool_size connections so we have the crumbs pool_size times
        *[CRUMBS_CONNECT] * pool_size,
        {
            "category": "query",
            "data": {},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT pg_advisory_unlock_all();\n"
            "CLOSE ALL;\n"
            "UNLISTEN *;\n"
            "RESET ALL;",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE name = $1",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT pg_advisory_unlock_all();\n"
            "CLOSE ALL;\n"
            "UNLISTEN *;\n"
            "RESET ALL;",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
@pytest.mark.parametrize("enable_db_query_source", [None, False])
async def test_query_source_disabled(
    sentry_init, capture_events, enable_db_query_source
):
    sentry_options = {
        "integrations": [AsyncPGIntegration()],
        "enable_tracing": True,
    }
    if enable_db_query_source is not None:
        sentry_options["enable_db_query_source"] = enable_db_query_source
        sentry_options["db_query_source_threshold_ms"] = 0

    sentry_init(**sentry_options)

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        conn: Connection = await connect(PG_CONNECTION_URI)

        await conn.execute(
            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
        )

        await conn.close()

    (event,) = events

    span = event["spans"][-1]
    assert span["description"].startswith("INSERT INTO")

    data = span.get("data", {})

    assert SPANDATA.CODE_LINENO not in data
    assert SPANDATA.CODE_NAMESPACE not in data
    assert SPANDATA.CODE_FILEPATH not in data
    assert SPANDATA.CODE_FUNCTION not in data


@pytest.mark.asyncio
async def test_query_source(sentry_init, capture_events):
    sentry_init(
        integrations=[AsyncPGIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        conn: Connection = await connect(PG_CONNECTION_URI)

        await conn.execute(
            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
        )

        await conn.close()

    (event,) = events

    span = event["spans"][-1]
    assert span["description"].startswith("INSERT INTO")

    data = span.get("data", {})

    assert SPANDATA.CODE_LINENO in data
    assert SPANDATA.CODE_NAMESPACE in data
    assert SPANDATA.CODE_FILEPATH in data
    assert SPANDATA.CODE_FUNCTION in data

    assert type(data.get(SPANDATA.CODE_LINENO)) == int
    assert data.get(SPANDATA.CODE_LINENO) > 0
    assert (
        data.get(SPANDATA.CODE_NAMESPACE) == "tests.integrations.asyncpg.test_asyncpg"
    )
    assert data.get(SPANDATA.CODE_FILEPATH).endswith(
        "tests/integrations/asyncpg/test_asyncpg.py"
    )

    is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
    assert is_relative_path

    assert data.get(SPANDATA.CODE_FUNCTION) == "test_query_source"
sentry-python-1.39.2/tests/integrations/aws_lambda/000077500000000000000000000000001454744723200224275ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/aws_lambda/__init__.py000066400000000000000000000000541454744723200245370ustar00rootroot00000000000000import pytest

pytest.importorskip("boto3")
sentry-python-1.39.2/tests/integrations/aws_lambda/client.py000066400000000000000000000302461454744723200242640ustar00rootroot00000000000000import base64
import boto3
import glob
import hashlib
import os
import subprocess
import sys
import tempfile

from sentry_sdk.consts import VERSION as SDK_VERSION
from sentry_sdk.utils import get_git_revision

AWS_REGION_NAME = "us-east-1"
AWS_CREDENTIALS = {
    "aws_access_key_id": os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
    "aws_secret_access_key": os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
}
AWS_LAMBDA_EXECUTION_ROLE_NAME = "lambda-ex"
AWS_LAMBDA_EXECUTION_ROLE_ARN = None


def _install_dependencies(base_dir, subprocess_kwargs):
    """
    Installs dependencies for AWS Lambda function
    """
    setup_cfg = os.path.join(base_dir, "setup.cfg")
    with open(setup_cfg, "w") as f:
        f.write("[install]\nprefix=")

    # Install requirements for Lambda Layer (these are more limited than the SDK requirements,
    # because Lambda does not support the newest versions of some packages)
    subprocess.check_call(
        [
            sys.executable,
            "-m",
            "pip",
            "install",
            "-r",
            "aws-lambda-layer-requirements.txt",
            "--target",
            base_dir,
        ],
        **subprocess_kwargs,
    )
    # Install requirements used for testing
    subprocess.check_call(
        [
            sys.executable,
            "-m",
            "pip",
            "install",
            "mock==3.0.0",
            "funcsigs",
            "--target",
            base_dir,
        ],
        **subprocess_kwargs,
    )
    # Create a source distribution of the Sentry SDK (in parent directory of base_dir)
    subprocess.check_call(
        [
            sys.executable,
            "setup.py",
            "sdist",
            "--dist-dir",
            os.path.dirname(base_dir),
        ],
        **subprocess_kwargs,
    )
    # Install the created Sentry SDK source distribution into the target directory
    # Do not install the dependencies of the SDK, because they where installed by aws-lambda-layer-requirements.txt above
    source_distribution_archive = glob.glob(
        "{}/*.tar.gz".format(os.path.dirname(base_dir))
    )[0]
    subprocess.check_call(
        [
            sys.executable,
            "-m",
            "pip",
            "install",
            source_distribution_archive,
            "--no-deps",
            "--target",
            base_dir,
        ],
        **subprocess_kwargs,
    )


def _create_lambda_function_zip(base_dir):
    """
    Zips the given base_dir omitting Python cache files
    """
    subprocess.run(
        [
            "zip",
            "-q",
            "-x",
            "**/__pycache__/*",
            "-r",
            "lambda-function-package.zip",
            "./",
        ],
        cwd=base_dir,
        check=True,
    )


def _create_lambda_package(
    base_dir, code, initial_handler, layer, syntax_check, subprocess_kwargs
):
    """
    Creates deployable packages (as zip files) for AWS Lambda function
    and optional the accompanying Sentry Lambda layer
    """
    if initial_handler:
        # If Initial handler value is provided i.e. it is not the default
        # `test_lambda.test_handler`, then create another dir level so that our path is
        # test_dir.test_lambda.test_handler
        test_dir_path = os.path.join(base_dir, "test_dir")
        python_init_file = os.path.join(test_dir_path, "__init__.py")
        os.makedirs(test_dir_path)
        with open(python_init_file, "w"):
            # Create __init__ file to make it a python package
            pass

        test_lambda_py = os.path.join(base_dir, "test_dir", "test_lambda.py")
    else:
        test_lambda_py = os.path.join(base_dir, "test_lambda.py")

    with open(test_lambda_py, "w") as f:
        f.write(code)

    if syntax_check:
        # Check file for valid syntax first, and that the integration does not
        # crash when not running in Lambda (but rather a local deployment tool
        # such as chalice's)
        subprocess.check_call([sys.executable, test_lambda_py])

    if layer is None:
        _install_dependencies(base_dir, subprocess_kwargs)
        _create_lambda_function_zip(base_dir)

    else:
        _create_lambda_function_zip(base_dir)

        # Create Lambda layer zip package
        from scripts.build_aws_lambda_layer import build_packaged_zip

        build_packaged_zip(
            base_dir=base_dir,
            make_dist=True,
            out_zip_filename="lambda-layer-package.zip",
        )


def _get_or_create_lambda_execution_role():
    global AWS_LAMBDA_EXECUTION_ROLE_ARN

    policy = """{
        "Version": "2012-10-17",
        "Statement": [
            {
                "Effect": "Allow",
                "Principal": {
                    "Service": "lambda.amazonaws.com"
                },
                "Action": "sts:AssumeRole"
            }
        ]
    }
    """
    iam_client = boto3.client(
        "iam",
        region_name=AWS_REGION_NAME,
        **AWS_CREDENTIALS,
    )

    try:
        response = iam_client.get_role(RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME)
        AWS_LAMBDA_EXECUTION_ROLE_ARN = response["Role"]["Arn"]
    except iam_client.exceptions.NoSuchEntityException:
        # create role for lambda execution
        response = iam_client.create_role(
            RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME,
            AssumeRolePolicyDocument=policy,
        )
        AWS_LAMBDA_EXECUTION_ROLE_ARN = response["Role"]["Arn"]

        # attach policy to role
        iam_client.attach_role_policy(
            RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME,
            PolicyArn="arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole",
        )


def get_boto_client():
    _get_or_create_lambda_execution_role()

    return boto3.client(
        "lambda",
        region_name=AWS_REGION_NAME,
        **AWS_CREDENTIALS,
    )


def run_lambda_function(
    client,
    runtime,
    code,
    payload,
    add_finalizer,
    syntax_check=True,
    timeout=30,
    layer=None,
    initial_handler=None,
    subprocess_kwargs=(),
):
    """
    Creates a Lambda function with the given code, and invokes it.

    If the same code is run multiple times the function will NOT be
    created anew each time but the existing function will be reused.
    """
    subprocess_kwargs = dict(subprocess_kwargs)

    # Making a unique function name depending on all the code that is run in it (function code plus SDK version)
    # The name needs to be short so the generated event/envelope json blobs are small enough to be output
    # in the log result of the Lambda function.
    rev = get_git_revision() or SDK_VERSION
    function_hash = hashlib.shake_256((code + rev).encode("utf-8")).hexdigest(6)
    fn_name = "test_{}".format(function_hash)
    full_fn_name = "{}_{}".format(
        fn_name, runtime.replace(".", "").replace("python", "py")
    )

    function_exists_in_aws = True
    try:
        client.get_function(
            FunctionName=full_fn_name,
        )
        print(
            "Lambda function in AWS already existing, taking it (and do not create a local one)"
        )
    except client.exceptions.ResourceNotFoundException:
        function_exists_in_aws = False

    if not function_exists_in_aws:
        tmp_base_dir = tempfile.gettempdir()
        base_dir = os.path.join(tmp_base_dir, fn_name)
        dir_already_existing = os.path.isdir(base_dir)

        if dir_already_existing:
            print("Local Lambda function directory already exists, skipping creation")

        if not dir_already_existing:
            os.mkdir(base_dir)
            _create_lambda_package(
                base_dir, code, initial_handler, layer, syntax_check, subprocess_kwargs
            )

            @add_finalizer
            def clean_up():
                # this closes the web socket so we don't get a
                #   ResourceWarning: unclosed 
                # warning on every test
                # based on https://github.com/boto/botocore/pull/1810
                # (if that's ever merged, this can just become client.close())
                session = client._endpoint.http_session
                managers = [session._manager] + list(session._proxy_managers.values())
                for manager in managers:
                    manager.clear()

        layers = []
        environment = {}
        handler = initial_handler or "test_lambda.test_handler"

        if layer is not None:
            with open(
                os.path.join(base_dir, "lambda-layer-package.zip"), "rb"
            ) as lambda_layer_zip:
                response = client.publish_layer_version(
                    LayerName="python-serverless-sdk-test",
                    Description="Created as part of testsuite for getsentry/sentry-python",
                    Content={"ZipFile": lambda_layer_zip.read()},
                )

            layers = [response["LayerVersionArn"]]
            handler = (
                "sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler"
            )
            environment = {
                "Variables": {
                    "SENTRY_INITIAL_HANDLER": initial_handler
                    or "test_lambda.test_handler",
                    "SENTRY_DSN": "https://123abc@example.com/123",
                    "SENTRY_TRACES_SAMPLE_RATE": "1.0",
                }
            }

        try:
            with open(
                os.path.join(base_dir, "lambda-function-package.zip"), "rb"
            ) as lambda_function_zip:
                client.create_function(
                    Description="Created as part of testsuite for getsentry/sentry-python",
                    FunctionName=full_fn_name,
                    Runtime=runtime,
                    Timeout=timeout,
                    Role=AWS_LAMBDA_EXECUTION_ROLE_ARN,
                    Handler=handler,
                    Code={"ZipFile": lambda_function_zip.read()},
                    Environment=environment,
                    Layers=layers,
                )

                waiter = client.get_waiter("function_active_v2")
                waiter.wait(FunctionName=full_fn_name)
        except client.exceptions.ResourceConflictException:
            print(
                "Lambda function already exists, this is fine, we will just invoke it."
            )

    response = client.invoke(
        FunctionName=full_fn_name,
        InvocationType="RequestResponse",
        LogType="Tail",
        Payload=payload,
    )

    assert 200 <= response["StatusCode"] < 300, response
    return response


# This is for inspecting new Python runtime environments in AWS Lambda
# If you need to debug a new runtime, use this REPL to run arbitrary Python or bash commands
# in that runtime in a Lambda function:
#
#    pip3 install click
#    python3 tests/integrations/aws_lambda/client.py --runtime=python4.0
#


_REPL_CODE = """
import os

def test_handler(event, context):
    line = {line!r}
    if line.startswith(">>> "):
        exec(line[4:])
    elif line.startswith("$ "):
        os.system(line[2:])
    else:
        print("Start a line with $ or >>>")

    return b""
"""

try:
    import click
except ImportError:
    pass
else:

    @click.command()
    @click.option(
        "--runtime", required=True, help="name of the runtime to use, eg python3.11"
    )
    @click.option("--verbose", is_flag=True, default=False)
    def repl(runtime, verbose):
        """
        Launch a "REPL" against AWS Lambda to inspect their runtime.
        """

        cleanup = []
        client = get_boto_client()

        print("Start a line with `$ ` to run shell commands, or `>>> ` to run Python")

        while True:
            line = input()

            response = run_lambda_function(
                client,
                runtime,
                _REPL_CODE.format(line=line),
                b"",
                cleanup.append,
                subprocess_kwargs={
                    "stdout": subprocess.DEVNULL,
                    "stderr": subprocess.DEVNULL,
                }
                if not verbose
                else {},
            )

            for line in base64.b64decode(response["LogResult"]).splitlines():
                print(line.decode("utf8"))

            for f in cleanup:
                f()

            cleanup = []

    if __name__ == "__main__":
        repl()
sentry-python-1.39.2/tests/integrations/aws_lambda/test_aws.py000066400000000000000000000670751454744723200246510ustar00rootroot00000000000000"""
# AWS Lambda System Tests

This testsuite uses boto3 to upload actual Lambda functions to AWS Lambda and invoke them.

For running test locally you need to set these env vars:
(You can find the values in the Sentry password manager by searching for "AWS Lambda for Python SDK Tests").

    export SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID="..."
    export SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY="..."


You can use `scripts/aws-cleanup.sh` to delete all files generated by this test suite.


If you need to debug a new runtime, use this REPL to run arbitrary Python or bash commands
in that runtime in a Lambda function: (see the bottom of client.py for more information.)

    pip3 install click
    python3 tests/integrations/aws_lambda/client.py --runtime=python4.0

IMPORTANT:

During running of this test suite temporary folders will be created for compiling the Lambda functions.
This temporary folders will not be cleaned up. This is because in CI generated files have to be shared
between tests and thus the folders can not be deleted right after use.

If you run your tests locally, you need to clean up the temporary folders manually. The location of
the temporary folders is printed when running a test.
"""

import base64
import json
import re
from textwrap import dedent

import pytest


LAMBDA_PRELUDE = """
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration, get_lambda_bootstrap
import sentry_sdk
import json
import time

from sentry_sdk.transport import HttpTransport

def truncate_data(data):
    # AWS Lambda truncates the log output to 4kb, which is small enough to miss
    # parts of even a single error-event/transaction-envelope pair if considered
    # in full, so only grab the data we need.

    cleaned_data = {}

    if data.get("type") is not None:
        cleaned_data["type"] = data["type"]

    if data.get("contexts") is not None:
        cleaned_data["contexts"] = {}

        if data["contexts"].get("trace") is not None:
            cleaned_data["contexts"]["trace"] = data["contexts"].get("trace")

    if data.get("transaction") is not None:
        cleaned_data["transaction"] = data.get("transaction")

    if data.get("request") is not None:
        cleaned_data["request"] = data.get("request")

    if data.get("tags") is not None:
        cleaned_data["tags"] = data.get("tags")

    if data.get("exception") is not None:
        cleaned_data["exception"] = data.get("exception")

        for value in cleaned_data["exception"]["values"]:
            for frame in value.get("stacktrace", {}).get("frames", []):
                del frame["vars"]
                del frame["pre_context"]
                del frame["context_line"]
                del frame["post_context"]

    if data.get("extra") is not None:
        cleaned_data["extra"] = {}

        for key in data["extra"].keys():
            if key == "lambda":
                for lambda_key in data["extra"]["lambda"].keys():
                    if lambda_key in ["function_name"]:
                        cleaned_data["extra"].setdefault("lambda", {})[lambda_key] = data["extra"]["lambda"][lambda_key]
            elif key == "cloudwatch logs":
                for cloudwatch_key in data["extra"]["cloudwatch logs"].keys():
                    if cloudwatch_key in ["url", "log_group", "log_stream"]:
                        cleaned_data["extra"].setdefault("cloudwatch logs", {})[cloudwatch_key] = data["extra"]["cloudwatch logs"][cloudwatch_key]

    if data.get("level") is not None:
        cleaned_data["level"] = data.get("level")

    if data.get("message") is not None:
        cleaned_data["message"] = data.get("message")

    if "contexts" not in cleaned_data:
        raise Exception(json.dumps(data))

    return cleaned_data

def event_processor(event):
    return truncate_data(event)

def envelope_processor(envelope):
    (item,) = envelope.items
    item_json = json.loads(item.get_bytes())

    return truncate_data(item_json)


class TestTransport(HttpTransport):
    def _send_event(self, event):
        event = event_processor(event)
        print("\\nEVENT: {}\\n".format(json.dumps(event)))

    def _send_envelope(self, envelope):
        envelope = envelope_processor(envelope)
        print("\\nENVELOPE: {}\\n".format(json.dumps(envelope)))

def init_sdk(timeout_warning=False, **extra_init_args):
    sentry_sdk.init(
        dsn="https://123abc@example.com/123",
        transport=TestTransport,
        integrations=[AwsLambdaIntegration(timeout_warning=timeout_warning)],
        shutdown_timeout=10,
        **extra_init_args
    )
"""


@pytest.fixture
def lambda_client():
    from tests.integrations.aws_lambda.client import get_boto_client

    return get_boto_client()


@pytest.fixture(
    params=[
        "python3.7",
        "python3.8",
        "python3.9",
        "python3.10",
        "python3.11",
    ]
)
def lambda_runtime(request):
    return request.param


@pytest.fixture
def run_lambda_function(request, lambda_client, lambda_runtime):
    def inner(
        code, payload, timeout=30, syntax_check=True, layer=None, initial_handler=None
    ):
        from tests.integrations.aws_lambda.client import run_lambda_function

        response = run_lambda_function(
            client=lambda_client,
            runtime=lambda_runtime,
            code=code,
            payload=payload,
            add_finalizer=request.addfinalizer,
            timeout=timeout,
            syntax_check=syntax_check,
            layer=layer,
            initial_handler=initial_handler,
        )

        # Make sure the "ENVELOPE:" and "EVENT:" log entries are always starting a new line. (Sometimes they don't.)
        response["LogResult"] = (
            base64.b64decode(response["LogResult"])
            .replace(b"EVENT:", b"\nEVENT:")
            .replace(b"ENVELOPE:", b"\nENVELOPE:")
            .splitlines()
        )
        response["Payload"] = json.loads(response["Payload"].read().decode("utf-8"))
        del response["ResponseMetadata"]

        events = []
        envelopes = []

        for line in response["LogResult"]:
            print("AWS:", line)
            if line.startswith(b"EVENT: "):
                line = line[len(b"EVENT: ") :]
                events.append(json.loads(line.decode("utf-8")))
            elif line.startswith(b"ENVELOPE: "):
                line = line[len(b"ENVELOPE: ") :]
                envelopes.append(json.loads(line.decode("utf-8")))
            else:
                continue

        return envelopes, events, response

    return inner


def test_basic(run_lambda_function):
    _, events, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk()

        def test_handler(event, context):
            raise Exception("Oh!")
        """
        ),
        b'{"foo": "bar"}',
    )

    assert response["FunctionError"] == "Unhandled"

    (event,) = events
    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh!"

    (frame1,) = exception["stacktrace"]["frames"]
    assert frame1["filename"] == "test_lambda.py"
    assert frame1["abs_path"] == "/var/task/test_lambda.py"
    assert frame1["function"] == "test_handler"

    assert frame1["in_app"] is True

    assert exception["mechanism"]["type"] == "aws_lambda"
    assert not exception["mechanism"]["handled"]

    assert event["extra"]["lambda"]["function_name"].startswith("test_")

    logs_url = event["extra"]["cloudwatch logs"]["url"]
    assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
    assert not re.search("(=;|=$)", logs_url)
    assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
        "/aws/lambda/test_"
    )

    log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
    log_stream = event["extra"]["cloudwatch logs"]["log_stream"]

    assert re.match(log_stream_re, log_stream)


def test_initialization_order(run_lambda_function):
    """Zappa lazily imports our code, so by the time we monkeypatch the handler
    as seen by AWS already runs. At this point at least draining the queue
    should work."""

    _, events, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
            def test_handler(event, context):
                init_sdk()
                sentry_sdk.capture_exception(Exception("Oh!"))
        """
        ),
        b'{"foo": "bar"}',
    )

    (event,) = events

    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh!"


def test_request_data(run_lambda_function):
    _, events, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk()
        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            return "ok"
        """
        ),
        payload=b"""
        {
          "resource": "/asd",
          "path": "/asd",
          "httpMethod": "GET",
          "headers": {
            "Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
            "User-Agent": "custom",
            "X-Forwarded-Proto": "https"
          },
          "queryStringParameters": {
            "bonkers": "true"
          },
          "pathParameters": null,
          "stageVariables": null,
          "requestContext": {
            "identity": {
              "sourceIp": "213.47.147.207",
              "userArn": "42"
            }
          },
          "body": null,
          "isBase64Encoded": false
        }
        """,
    )

    (event,) = events

    assert event["request"] == {
        "headers": {
            "Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
            "User-Agent": "custom",
            "X-Forwarded-Proto": "https",
        },
        "method": "GET",
        "query_string": {"bonkers": "true"},
        "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd",
    }


def test_init_error(run_lambda_function, lambda_runtime):
    _, events, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk()
        func()
        """
        ),
        b'{"foo": "bar"}',
        syntax_check=False,
    )

    (event,) = events
    assert event["exception"]["values"][0]["value"] == "name 'func' is not defined"


def test_timeout_error(run_lambda_function):
    _, events, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(timeout_warning=True)

        def test_handler(event, context):
            time.sleep(10)
            return 0
        """
        ),
        b'{"foo": "bar"}',
        timeout=2,
    )

    (event,) = events
    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ServerlessTimeoutWarning"
    assert exception["value"] in (
        "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds.",
        "WARNING : Function is expected to get timed out. Configured timeout duration = 2 seconds.",
    )

    assert exception["mechanism"]["type"] == "threading"
    assert not exception["mechanism"]["handled"]

    assert event["extra"]["lambda"]["function_name"].startswith("test_")

    logs_url = event["extra"]["cloudwatch logs"]["url"]
    assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
    assert not re.search("(=;|=$)", logs_url)
    assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
        "/aws/lambda/test_"
    )

    log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
    log_stream = event["extra"]["cloudwatch logs"]["log_stream"]

    assert re.match(log_stream_re, log_stream)


def test_performance_no_error(run_lambda_function):
    envelopes, _, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            return "test_string"
        """
        ),
        b'{"foo": "bar"}',
    )

    (envelope,) = envelopes

    assert envelope["type"] == "transaction"
    assert envelope["contexts"]["trace"]["op"] == "function.aws"
    assert envelope["transaction"].startswith("test_")
    assert envelope["transaction"] in envelope["request"]["url"]


def test_performance_error(run_lambda_function):
    envelopes, _, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            raise Exception("Oh!")
        """
        ),
        b'{"foo": "bar"}',
    )

    (
        error_event,
        transaction_event,
    ) = envelopes

    assert error_event["level"] == "error"
    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh!"

    assert transaction_event["type"] == "transaction"
    assert transaction_event["contexts"]["trace"]["op"] == "function.aws"
    assert transaction_event["transaction"].startswith("test_")
    assert transaction_event["transaction"] in transaction_event["request"]["url"]


@pytest.mark.parametrize(
    "aws_event, has_request_data, batch_size",
    [
        (b"1231", False, 1),
        (b"11.21", False, 1),
        (b'"Good dog!"', False, 1),
        (b"true", False, 1),
        (
            b"""
            [
                {"good dog": "Maisey"},
                {"good dog": "Charlie"},
                {"good dog": "Cory"},
                {"good dog": "Bodhi"}
            ]
            """,
            False,
            4,
        ),
        (
            b"""
            [
                {
                    "headers": {
                        "Host": "x.io",
                        "X-Forwarded-Proto": "http"
                    },
                    "httpMethod": "GET",
                    "path": "/somepath",
                    "queryStringParameters": {
                        "done": "true"
                    },
                    "dog": "Maisey"
                },
                {
                    "headers": {
                        "Host": "x.io",
                        "X-Forwarded-Proto": "http"
                    },
                    "httpMethod": "GET",
                    "path": "/somepath",
                    "queryStringParameters": {
                        "done": "true"
                    },
                    "dog": "Charlie"
                }
            ]
            """,
            True,
            2,
        ),
    ],
)
def test_non_dict_event(
    run_lambda_function,
    aws_event,
    has_request_data,
    batch_size,
    DictionaryContaining,  # noqa:N803
):
    envelopes, _, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            raise Exception("Oh?")
        """
        ),
        aws_event,
    )

    assert response["FunctionError"] == "Unhandled"

    (
        error_event,
        transaction_event,
    ) = envelopes
    assert error_event["level"] == "error"
    assert error_event["contexts"]["trace"]["op"] == "function.aws"

    function_name = error_event["extra"]["lambda"]["function_name"]
    assert function_name.startswith("test_")
    assert error_event["transaction"] == function_name

    exception = error_event["exception"]["values"][0]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh?"
    assert exception["mechanism"]["type"] == "aws_lambda"

    assert transaction_event["type"] == "transaction"
    assert transaction_event["contexts"]["trace"] == DictionaryContaining(
        error_event["contexts"]["trace"]
    )
    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
    assert transaction_event["transaction"] == error_event["transaction"]
    assert transaction_event["request"]["url"] == error_event["request"]["url"]

    if has_request_data:
        request_data = {
            "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"},
            "method": "GET",
            "url": "http://x.io/somepath",
            "query_string": {
                "done": "true",
            },
        }
    else:
        request_data = {"url": "awslambda:///{}".format(function_name)}

    assert error_event["request"] == request_data
    assert transaction_event["request"] == request_data

    if batch_size > 1:
        assert error_event["tags"]["batch_size"] == batch_size
        assert error_event["tags"]["batch_request"] is True
        assert transaction_event["tags"]["batch_size"] == batch_size
        assert transaction_event["tags"]["batch_request"] is True


def test_traces_sampler_gets_correct_values_in_sampling_context(
    run_lambda_function,
    DictionaryContaining,  # noqa:N803
    ObjectDescribedBy,
    StringContaining,
):
    # TODO: This whole thing is a little hacky, specifically around the need to
    # get `conftest.py` code into the AWS runtime, which is why there's both
    # `inspect.getsource` and a copy of `_safe_is_equal` included directly in
    # the code below. Ideas which have been discussed to fix this:

    # - Include the test suite as a module installed in the package which is
    #   shot up to AWS
    # - In client.py, copy `conftest.py` (or wherever the necessary code lives)
    #   from the test suite into the main SDK directory so it gets included as
    #   "part of the SDK"

    # It's also worth noting why it's necessary to run the assertions in the AWS
    # runtime rather than asserting on side effects the way we do with events
    # and envelopes. The reasons are two-fold:

    # - We're testing against the `LambdaContext` class, which only exists in
    #   the AWS runtime
    # - If we were to transmit call args data they way we transmit event and
    #   envelope data (through JSON), we'd quickly run into the problem that all
    #   sorts of stuff isn't serializable by `json.dumps` out of the box, up to
    #   and including `datetime` objects (so anything with a timestamp is
    #   automatically out)

    # Perhaps these challenges can be solved in a cleaner and more systematic
    # way if we ever decide to refactor the entire AWS testing apparatus.

    import inspect

    _, _, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(inspect.getsource(StringContaining))
        + dedent(inspect.getsource(DictionaryContaining))
        + dedent(inspect.getsource(ObjectDescribedBy))
        + dedent(
            """
            try:
                from unittest import mock  # python 3.3 and above
            except ImportError:
                import mock  # python < 3.3

            def _safe_is_equal(x, y):
                # copied from conftest.py - see docstring and comments there
                try:
                    is_equal = x.__eq__(y)
                except AttributeError:
                    is_equal = NotImplemented

                if is_equal == NotImplemented:
                    # using == smoothes out weird variations exposed by raw __eq__
                    return x == y

                return is_equal

            def test_handler(event, context):
                # this runs after the transaction has started, which means we
                # can make assertions about traces_sampler
                try:
                    traces_sampler.assert_any_call(
                        DictionaryContaining(
                            {
                                "aws_event": DictionaryContaining({
                                    "httpMethod": "GET",
                                    "path": "/sit/stay/rollover",
                                    "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"},
                                }),
                                "aws_context": ObjectDescribedBy(
                                    type=get_lambda_bootstrap().LambdaContext,
                                    attrs={
                                        'function_name': StringContaining("test_"),
                                        'function_version': '$LATEST',
                                    }
                                )
                            }
                        )
                    )
                except AssertionError:
                    # catch the error and return it because the error itself will
                    # get swallowed by the SDK as an "internal exception"
                    return {"AssertionError raised": True,}

                return {"AssertionError raised": False,}


            traces_sampler = mock.Mock(return_value=True)

            init_sdk(
                traces_sampler=traces_sampler,
            )
        """
        ),
        b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"}}',
    )

    assert response["Payload"]["AssertionError raised"] is False


def test_serverless_no_code_instrumentation(run_lambda_function):
    """
    Test that ensures that just by adding a lambda layer containing the
    python sdk, with no code changes sentry is able to capture errors
    """

    for initial_handler in [
        None,
        "test_dir/test_lambda.test_handler",
        "test_dir.test_lambda.test_handler",
    ]:
        print("Testing Initial Handler ", initial_handler)
        _, _, response = run_lambda_function(
            dedent(
                """
            import sentry_sdk

            def test_handler(event, context):
                current_client = sentry_sdk.Hub.current.client

                assert current_client is not None

                assert len(current_client.options['integrations']) == 1
                assert isinstance(current_client.options['integrations'][0],
                                  sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration)

                raise Exception("Oh!")
            """
            ),
            b'{"foo": "bar"}',
            layer=True,
            initial_handler=initial_handler,
        )
        assert response["FunctionError"] == "Unhandled"
        assert response["StatusCode"] == 200

        assert response["Payload"]["errorType"] != "AssertionError"

        assert response["Payload"]["errorType"] == "Exception"
        assert response["Payload"]["errorMessage"] == "Oh!"

        assert "sentry_handler" in response["LogResult"][3].decode("utf-8")


def test_error_has_new_trace_context_performance_enabled(run_lambda_function):
    envelopes, _, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            raise Exception("Oh!")
        """
        ),
        payload=b'{"foo": "bar"}',
    )

    (msg_event, error_event, transaction_event) = envelopes

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_new_trace_context_performance_disabled(run_lambda_function):
    _, events, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=None) # this is the default, just added for clarity

        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            raise Exception("Oh!")
        """
        ),
        payload=b'{"foo": "bar"}',
    )

    (msg_event, error_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_existing_trace_context_performance_enabled(run_lambda_function):
    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    # We simulate here AWS Api Gateway's behavior of passing HTTP headers
    # as the `headers` dict in the event passed to the Lambda function.
    payload = {
        "headers": {
            "sentry-trace": sentry_trace_header,
        }
    }

    envelopes, _, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            raise Exception("Oh!")
        """
        ),
        payload=json.dumps(payload).encode(),
    )

    (msg_event, error_event, transaction_event) = envelopes

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_error_has_existing_trace_context_performance_disabled(run_lambda_function):
    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    # We simulate here AWS Api Gateway's behavior of passing HTTP headers
    # as the `headers` dict in the event passed to the Lambda function.
    payload = {
        "headers": {
            "sentry-trace": sentry_trace_header,
        }
    }

    _, events, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=None)  # this is the default, just added for clarity

        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            raise Exception("Oh!")
        """
        ),
        payload=json.dumps(payload).encode(),
    )

    (msg_event, error_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_basic_with_eventbridge_source(run_lambda_function):
    _, events, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk()

        def test_handler(event, context):
            raise Exception("Oh!")
        """
        ),
        b'[{"topic":"lps-ranges","partition":1,"offset":0,"timestamp":1701268939207,"timestampType":"CREATE_TIME","key":"REDACTED","value":"REDACTED","headers":[],"eventSourceArn":"REDACTED","bootstrapServers":"REDACTED","eventSource":"aws:kafka","eventSourceKey":"lps-ranges-1"}]',
    )

    assert response["FunctionError"] == "Unhandled"

    (event,) = events
    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh!"
sentry-python-1.39.2/tests/integrations/beam/000077500000000000000000000000001454744723200212415ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/beam/__init__.py000066400000000000000000000000621454744723200233500ustar00rootroot00000000000000import pytest

pytest.importorskip("apache_beam")
sentry-python-1.39.2/tests/integrations/beam/test_beam.py000066400000000000000000000134541454744723200235650ustar00rootroot00000000000000import pytest
import inspect

import dill

from sentry_sdk.integrations.beam import (
    BeamIntegration,
    _wrap_task_call,
    _wrap_inspect_call,
)

from apache_beam.typehints.trivial_inference import instance_to_type
from apache_beam.typehints.decorators import getcallargs_forhints
from apache_beam.transforms.core import DoFn, ParDo, _DoFnParam, CallableWrapperDoFn
from apache_beam.runners.common import DoFnInvoker, DoFnContext
from apache_beam.utils.windowed_value import WindowedValue

try:
    from apache_beam.runners.common import OutputHandler
except ImportError:
    from apache_beam.runners.common import OutputProcessor as OutputHandler


def foo():
    return True


def bar(x, y):
    # print(x + y)
    return True


def baz(x, y=2):
    # print(x + y)
    return True


class A:
    def __init__(self, fn):
        self.r = "We are in A"
        self.fn = fn
        self._inspect_fn = _wrap_inspect_call(self, "fn")

    def process(self):
        return self.fn()


class B(A, object):
    def fa(self, x, element=False, another_element=False):
        if x or (element and not another_element):
            # print(self.r)
            return True
        1 / 0
        return False

    def __init__(self):
        self.r = "We are in B"
        super(B, self).__init__(self.fa)


class SimpleFunc(DoFn):
    def process(self, x):
        if x:
            1 / 0
        return [True]


class PlaceHolderFunc(DoFn):
    def process(self, x, timestamp=DoFn.TimestampParam, wx=DoFn.WindowParam):
        if isinstance(timestamp, _DoFnParam) or isinstance(wx, _DoFnParam):
            raise Exception("Bad instance")
        if x:
            1 / 0
        yield True


def fail(x):
    if x:
        1 / 0
    return [True]


test_parent = A(foo)
test_child = B()
test_simple = SimpleFunc()
test_place_holder = PlaceHolderFunc()
test_callable = CallableWrapperDoFn(fail)


# Cannot call simple functions or placeholder test.
@pytest.mark.parametrize(
    "obj,f,args,kwargs",
    [
        [test_parent, "fn", (), {}],
        [test_child, "fn", (False,), {"element": True}],
        [test_child, "fn", (True,), {}],
        [test_simple, "process", (False,), {}],
        [test_callable, "process", (False,), {}],
    ],
)
def test_monkey_patch_call(obj, f, args, kwargs):
    func = getattr(obj, f)

    assert func(*args, **kwargs)
    assert _wrap_task_call(func)(*args, **kwargs)


@pytest.mark.parametrize("f", [foo, bar, baz, test_parent.fn, test_child.fn])
def test_monkey_patch_pickle(f):
    f_temp = _wrap_task_call(f)
    assert dill.pickles(f_temp), "{} is not pickling correctly!".format(f)

    # Pickle everything
    s1 = dill.dumps(f_temp)
    s2 = dill.loads(s1)
    dill.dumps(s2)


@pytest.mark.parametrize(
    "f,args,kwargs",
    [
        [foo, (), {}],
        [bar, (1, 5), {}],
        [baz, (1,), {}],
        [test_parent.fn, (), {}],
        [test_child.fn, (False,), {"element": True}],
        [test_child.fn, (True,), {}],
    ],
)
def test_monkey_patch_signature(f, args, kwargs):
    arg_types = [instance_to_type(v) for v in args]
    kwargs_types = {k: instance_to_type(v) for (k, v) in kwargs.items()}
    f_temp = _wrap_task_call(f)
    try:
        getcallargs_forhints(f, *arg_types, **kwargs_types)
    except Exception:
        print("Failed on {} with parameters {}, {}".format(f, args, kwargs))
        raise
    try:
        getcallargs_forhints(f_temp, *arg_types, **kwargs_types)
    except Exception:
        print("Failed on {} with parameters {}, {}".format(f_temp, args, kwargs))
        raise
    try:
        expected_signature = inspect.signature(f)
        test_signature = inspect.signature(f_temp)
        assert (
            expected_signature == test_signature
        ), "Failed on {}, signature {} does not match {}".format(
            f, expected_signature, test_signature
        )
    except Exception:
        # expected to pass for py2.7
        pass


class _OutputHandler(OutputHandler):
    def process_outputs(
        self, windowed_input_element, results, watermark_estimator=None
    ):
        self.handle_process_outputs(
            windowed_input_element, results, watermark_estimator
        )

    def handle_process_outputs(
        self, windowed_input_element, results, watermark_estimator=None
    ):
        print(windowed_input_element)
        try:
            for result in results:
                assert result
        except StopIteration:
            print("In here")


@pytest.fixture
def init_beam(sentry_init):
    def inner(fn):
        sentry_init(default_integrations=False, integrations=[BeamIntegration()])
        # Little hack to avoid having to run the whole pipeline.
        pardo = ParDo(fn)
        signature = pardo._signature
        output_processor = _OutputHandler()
        return DoFnInvoker.create_invoker(
            signature, output_processor, DoFnContext("test")
        )

    return inner


@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
def test_invoker_normal(init_beam, fn):
    invoker = init_beam(fn)
    print("Normal testing {} with {} invoker.".format(fn, invoker))
    windowed_value = WindowedValue(False, 0, [None])
    invoker.invoke_process(windowed_value)


@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
def test_invoker_exception(init_beam, capture_events, capture_exceptions, fn):
    invoker = init_beam(fn)
    events = capture_events()

    print("Exception testing {} with {} invoker.".format(fn, invoker))
    # Window value will always have one value for the process to run.
    windowed_value = WindowedValue(True, 0, [None])
    try:
        invoker.invoke_process(windowed_value)
    except Exception:
        pass

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "beam"
sentry-python-1.39.2/tests/integrations/boto3/000077500000000000000000000000001454744723200213635ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/boto3/__init__.py000066400000000000000000000003461454744723200234770ustar00rootroot00000000000000import pytest
import os

pytest.importorskip("boto3")
xml_fixture_path = os.path.dirname(os.path.abspath(__file__))


def read_fixture(name):
    with open(os.path.join(xml_fixture_path, name), "rb") as f:
        return f.read()
sentry-python-1.39.2/tests/integrations/boto3/aws_mock.py000066400000000000000000000015631454744723200235450ustar00rootroot00000000000000from io import BytesIO
from botocore.awsrequest import AWSResponse


class Body(BytesIO):
    def stream(self, **kwargs):
        contents = self.read()
        while contents:
            yield contents
            contents = self.read()


class MockResponse(object):
    def __init__(self, client, status_code, headers, body):
        self._client = client
        self._status_code = status_code
        self._headers = headers
        self._body = body

    def __enter__(self):
        self._client.meta.events.register("before-send", self)
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self._client.meta.events.unregister("before-send", self)

    def __call__(self, request, **kwargs):
        return AWSResponse(
            request.url,
            self._status_code,
            self._headers,
            Body(self._body),
        )
sentry-python-1.39.2/tests/integrations/boto3/s3_list.xml000066400000000000000000000015451454744723200234720ustar00rootroot00000000000000
marshalls-furious-bucket1000urlfalsefoo.txt2020-10-24T00:13:39.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARDbar.txt2020-10-02T15:15:20.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARD
sentry-python-1.39.2/tests/integrations/boto3/test_s3.py000066400000000000000000000077011454744723200233260ustar00rootroot00000000000000import pytest

import boto3

from sentry_sdk import Hub
from sentry_sdk.integrations.boto3 import Boto3Integration
from tests.integrations.boto3.aws_mock import MockResponse
from tests.integrations.boto3 import read_fixture

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


session = boto3.Session(
    aws_access_key_id="-",
    aws_secret_access_key="-",
)


def test_basic(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")
    with Hub.current.start_transaction() as transaction, MockResponse(
        s3.meta.client, 200, {}, read_fixture("s3_list.xml")
    ):
        bucket = s3.Bucket("bucket")
        items = [obj for obj in bucket.objects.all()]
        assert len(items) == 2
        assert items[0].key == "foo.txt"
        assert items[1].key == "bar.txt"
        transaction.finish()

    (event,) = events
    assert event["type"] == "transaction"
    assert len(event["spans"]) == 1
    (span,) = event["spans"]
    assert span["op"] == "http.client"
    assert span["description"] == "aws.s3.ListObjects"


def test_streaming(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")
    with Hub.current.start_transaction() as transaction, MockResponse(
        s3.meta.client, 200, {}, b"hello"
    ):
        obj = s3.Bucket("bucket").Object("foo.pdf")
        body = obj.get()["Body"]
        assert body.read(1) == b"h"
        assert body.read(2) == b"el"
        assert body.read(3) == b"lo"
        assert body.read(1) == b""
        transaction.finish()

    (event,) = events
    assert event["type"] == "transaction"
    assert len(event["spans"]) == 2

    span1 = event["spans"][0]
    assert span1["op"] == "http.client"
    assert span1["description"] == "aws.s3.GetObject"
    assert span1["data"] == {
        "http.method": "GET",
        "aws.request.url": "https://bucket.s3.amazonaws.com/foo.pdf",
        "http.fragment": "",
        "http.query": "",
    }

    span2 = event["spans"][1]
    assert span2["op"] == "http.client.stream"
    assert span2["description"] == "aws.s3.GetObject"
    assert span2["parent_span_id"] == span1["span_id"]


def test_streaming_close(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")
    with Hub.current.start_transaction() as transaction, MockResponse(
        s3.meta.client, 200, {}, b"hello"
    ):
        obj = s3.Bucket("bucket").Object("foo.pdf")
        body = obj.get()["Body"]
        assert body.read(1) == b"h"
        body.close()  # close partially-read stream
        transaction.finish()

    (event,) = events
    assert event["type"] == "transaction"
    assert len(event["spans"]) == 2
    span1 = event["spans"][0]
    assert span1["op"] == "http.client"
    span2 = event["spans"][1]
    assert span2["op"] == "http.client.stream"


@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")

    with mock.patch(
        "sentry_sdk.integrations.boto3.parse_url",
        side_effect=ValueError,
    ):
        with Hub.current.start_transaction() as transaction, MockResponse(
            s3.meta.client, 200, {}, read_fixture("s3_list.xml")
        ):
            bucket = s3.Bucket("bucket")
            items = [obj for obj in bucket.objects.all()]
            assert len(items) == 2
            assert items[0].key == "foo.txt"
            assert items[1].key == "bar.txt"
            transaction.finish()

    (event,) = events
    assert event["spans"][0]["data"] == {
        "http.method": "GET",
        # no url data
    }
sentry-python-1.39.2/tests/integrations/bottle/000077500000000000000000000000001454744723200216265ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/bottle/__init__.py000066400000000000000000000000551454744723200237370ustar00rootroot00000000000000import pytest

pytest.importorskip("bottle")
sentry-python-1.39.2/tests/integrations/bottle/test_bottle.py000066400000000000000000000276461454744723200245470ustar00rootroot00000000000000import json
import pytest
import logging

from io import BytesIO
from bottle import Bottle, debug as set_debug, abort, redirect
from sentry_sdk import capture_message
from sentry_sdk.serializer import MAX_DATABAG_BREADTH

from sentry_sdk.integrations.logging import LoggingIntegration
from werkzeug.test import Client

import sentry_sdk.integrations.bottle as bottle_sentry


@pytest.fixture(scope="function")
def app(sentry_init):
    app = Bottle()

    @app.route("/message")
    def hi():
        capture_message("hi")
        return "ok"

    @app.route("/message/")
    def hi_with_id(message_id):
        capture_message("hi")
        return "ok"

    @app.route("/message-named-route", name="hi")
    def named_hi():
        capture_message("hi")
        return "ok"

    yield app


@pytest.fixture
def get_client(app):
    def inner():
        return Client(app)

    return inner


def test_has_context(sentry_init, app, capture_events, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
    events = capture_events()

    client = get_client()
    response = client.get("/message")
    assert response[1] == "200 OK"

    (event,) = events
    assert event["message"] == "hi"
    assert "data" not in event["request"]
    assert event["request"]["url"] == "http://localhost/message"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "endpoint", "hi", "component"),
        ("/message", "url", "/message", "route"),
        ("/message/123456", "url", "/message/", "route"),
        ("/message-named-route", "endpoint", "hi", "component"),
    ],
)
def test_transaction_style(
    sentry_init,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
    capture_events,
    get_client,
):
    sentry_init(
        integrations=[
            bottle_sentry.BottleIntegration(transaction_style=transaction_style)
        ]
    )
    events = capture_events()

    client = get_client()
    response = client.get(url)
    assert response[1] == "200 OK"

    (event,) = events
    # We use endswith() because in Python 2.7 it is "test_bottle.hi"
    # and in later Pythons "test_bottle.app..hi"
    assert event["transaction"].endswith(expected_transaction)
    assert event["transaction_info"] == {"source": expected_source}


@pytest.mark.parametrize("debug", (True, False), ids=["debug", "nodebug"])
@pytest.mark.parametrize("catchall", (True, False), ids=["catchall", "nocatchall"])
def test_errors(
    sentry_init, capture_exceptions, capture_events, app, debug, catchall, get_client
):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    app.catchall = catchall
    set_debug(mode=debug)

    exceptions = capture_exceptions()
    events = capture_events()

    @app.route("/")
    def index():
        1 / 0

    client = get_client()
    try:
        client.get("/")
    except ZeroDivisionError:
        pass

    (exc,) = exceptions
    assert isinstance(exc, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
    assert event["exception"]["values"][0]["mechanism"]["handled"] is False


def test_large_json_request(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    data = {"foo": {"bar": "a" * 2000}}

    @app.route("/", method="POST")
    def index():
        import bottle

        assert bottle.request.json == data
        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.get("/")

    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response[1] == "200 OK"

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]["bar"]) == 1024


@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_empty_json_request(sentry_init, capture_events, app, data, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    @app.route("/", method="POST")
    def index():
        import bottle

        assert bottle.request.json == data
        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
        # assert not bottle.request.forms
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response[1] == "200 OK"

    (event,) = events
    assert event["request"]["data"] == data


def test_medium_formdata_request(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    data = {"foo": "a" * 2000}

    @app.route("/", method="POST")
    def index():
        import bottle

        assert bottle.request.forms["foo"] == data["foo"]
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.post("/", data=data)
    assert response[1] == "200 OK"

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024


@pytest.mark.parametrize("input_char", ["a", b"a"])
def test_too_large_raw_request(
    sentry_init, input_char, capture_events, app, get_client
):
    sentry_init(
        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="small"
    )

    data = input_char * 2000

    @app.route("/", method="POST")
    def index():
        import bottle

        if isinstance(data, bytes):
            assert bottle.request.body.read() == data
        else:
            assert bottle.request.body.read() == data.encode("ascii")
        assert not bottle.request.json
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.post("/", data=data)
    assert response[1] == "200 OK"

    (event,) = events
    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
    assert not event["request"]["data"]


def test_files_and_form(sentry_init, capture_events, app, get_client):
    sentry_init(
        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
    )

    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}

    @app.route("/", method="POST")
    def index():
        import bottle

        assert list(bottle.request.forms) == ["foo"]
        assert list(bottle.request.files) == ["file"]
        assert not bottle.request.json
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.post("/", data=data)
    assert response[1] == "200 OK"

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024

    assert event["_meta"]["request"]["data"]["file"] == {
        "": {
            "rem": [["!raw", "x"]],
        }
    }
    assert not event["request"]["data"]["file"]


def test_json_not_truncated_if_max_request_body_size_is_always(
    sentry_init, capture_events, app, get_client
):
    sentry_init(
        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
    )

    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }

    @app.route("/", method="POST")
    def index():
        import bottle

        assert bottle.request.json == data
        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()

    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response[1] == "200 OK"

    (event,) = events
    assert event["request"]["data"] == data


@pytest.mark.parametrize(
    "integrations",
    [
        [bottle_sentry.BottleIntegration()],
        [bottle_sentry.BottleIntegration(), LoggingIntegration(event_level="ERROR")],
    ],
)
def test_errors_not_reported_twice(
    sentry_init, integrations, capture_events, app, get_client
):
    sentry_init(integrations=integrations)

    app.catchall = False

    logger = logging.getLogger("bottle.app")

    @app.route("/")
    def index():
        try:
            1 / 0
        except Exception as e:
            logger.exception(e)
            raise e

    events = capture_events()

    client = get_client()
    with pytest.raises(ZeroDivisionError):
        client.get("/")

    assert len(events) == 1


def test_logging(sentry_init, capture_events, app, get_client):
    # ensure that Bottle's logger magic doesn't break ours
    sentry_init(
        integrations=[
            bottle_sentry.BottleIntegration(),
            LoggingIntegration(event_level="ERROR"),
        ]
    )

    @app.route("/")
    def index():
        app.logger.error("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    client.get("/")

    (event,) = events
    assert event["level"] == "error"


def test_mount(app, capture_exceptions, capture_events, sentry_init, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    app.catchall = False

    def crashing_app(environ, start_response):
        1 / 0

    app.mount("/wsgi/", crashing_app)

    client = Client(app)

    exceptions = capture_exceptions()
    events = capture_events()

    with pytest.raises(ZeroDivisionError) as exc:
        client.get("/wsgi/")

    (error,) = exceptions

    assert error is exc.value

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
    assert event["exception"]["values"][0]["mechanism"]["handled"] is False


def test_500(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    set_debug(False)
    app.catchall = True

    @app.route("/")
    def index():
        1 / 0

    @app.error(500)
    def error_handler(err):
        capture_message("error_msg")
        return "My error"

    events = capture_events()

    client = get_client()
    response = client.get("/")
    assert response[1] == "500 Internal Server Error"

    _, event = events
    assert event["message"] == "error_msg"


def test_error_in_errorhandler(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    set_debug(False)
    app.catchall = True

    @app.route("/")
    def index():
        raise ValueError()

    @app.error(500)
    def error_handler(err):
        1 / 0

    events = capture_events()

    client = get_client()

    with pytest.raises(ZeroDivisionError):
        client.get("/")

    event1, event2 = events

    (exception,) = event1["exception"]["values"]
    assert exception["type"] == "ValueError"

    exception = event2["exception"]["values"][0]
    assert exception["type"] == "ZeroDivisionError"


def test_bad_request_not_captured(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        abort(400, "bad request in")

    client = get_client()

    client.get("/")

    assert not events


def test_no_exception_on_redirect(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        redirect("/here")

    @app.route("/here")
    def here():
        return "here"

    client = get_client()

    client.get("/")

    assert not events
sentry-python-1.39.2/tests/integrations/celery/000077500000000000000000000000001454744723200216205ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/celery/__init__.py000066400000000000000000000000551454744723200237310ustar00rootroot00000000000000import pytest

pytest.importorskip("celery")
sentry-python-1.39.2/tests/integrations/celery/test_celery.py000066400000000000000000000442441454744723200245240ustar00rootroot00000000000000import threading

import pytest

from sentry_sdk import Hub, configure_scope, start_transaction, get_current_span
from sentry_sdk.integrations.celery import (
    CeleryIntegration,
    _get_headers,
    _wrap_apply_async,
)

from sentry_sdk._compat import text_type

from celery import Celery, VERSION
from celery.bin import worker

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


@pytest.fixture
def connect_signal(request):
    def inner(signal, f):
        signal.connect(f)
        request.addfinalizer(lambda: signal.disconnect(f))

    return inner


@pytest.fixture
def init_celery(sentry_init, request):
    def inner(propagate_traces=True, backend="always_eager", **kwargs):
        sentry_init(
            integrations=[CeleryIntegration(propagate_traces=propagate_traces)],
            **kwargs
        )
        celery = Celery(__name__)

        if backend == "always_eager":
            if VERSION < (4,):
                celery.conf.CELERY_ALWAYS_EAGER = True
            else:
                celery.conf.task_always_eager = True
        elif backend == "redis":
            # broken on celery 3
            if VERSION < (4,):
                pytest.skip("Redis backend broken for some reason")

            # this backend requires capture_events_forksafe
            celery.conf.worker_max_tasks_per_child = 1
            celery.conf.worker_concurrency = 1
            celery.conf.broker_url = "redis://127.0.0.1:6379"
            celery.conf.result_backend = "redis://127.0.0.1:6379"
            celery.conf.task_always_eager = False

            Hub.main.bind_client(Hub.current.client)
            request.addfinalizer(lambda: Hub.main.bind_client(None))

            # Once we drop celery 3 we can use the celery_worker fixture
            if VERSION < (5,):
                worker_fn = worker.worker(app=celery).run
            else:
                from celery.bin.base import CLIContext

                worker_fn = lambda: worker.worker(
                    obj=CLIContext(app=celery, no_color=True, workdir=".", quiet=False),
                    args=[],
                )

            worker_thread = threading.Thread(target=worker_fn)
            worker_thread.daemon = True
            worker_thread.start()
        else:
            raise ValueError(backend)

        return celery

    return inner


@pytest.fixture
def celery(init_celery):
    return init_celery()


@pytest.fixture(
    params=[
        lambda task, x, y: (
            task.delay(x, y),
            {"args": [x, y], "kwargs": {}},
        ),
        lambda task, x, y: (
            task.apply_async((x, y)),
            {"args": [x, y], "kwargs": {}},
        ),
        lambda task, x, y: (
            task.apply_async(args=(x, y)),
            {"args": [x, y], "kwargs": {}},
        ),
        lambda task, x, y: (
            task.apply_async(kwargs=dict(x=x, y=y)),
            {"args": [], "kwargs": {"x": x, "y": y}},
        ),
    ]
)
def celery_invocation(request):
    """
    Invokes a task in multiple ways Celery allows you to (testing our apply_async monkeypatch).

    Currently limited to a task signature of the form foo(x, y)
    """
    return request.param


def test_simple_with_performance(capture_events, init_celery, celery_invocation):
    celery = init_celery(traces_sample_rate=1.0)
    events = capture_events()

    @celery.task(name="dummy_task")
    def dummy_task(x, y):
        foo = 42  # noqa
        return x / y

    with start_transaction(op="unit test transaction") as transaction:
        celery_invocation(dummy_task, 1, 2)
        _, expected_context = celery_invocation(dummy_task, 1, 0)

    (_, error_event, _, _) = events

    assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert error_event["contexts"]["trace"]["span_id"] != transaction.span_id
    assert error_event["transaction"] == "dummy_task"
    assert "celery_task_id" in error_event["tags"]
    assert error_event["extra"]["celery-job"] == dict(
        task_name="dummy_task", **expected_context
    )

    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "celery"
    assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"


def test_simple_without_performance(capture_events, init_celery, celery_invocation):
    celery = init_celery(traces_sample_rate=None)
    events = capture_events()

    @celery.task(name="dummy_task")
    def dummy_task(x, y):
        foo = 42  # noqa
        return x / y

    with configure_scope() as scope:
        celery_invocation(dummy_task, 1, 2)
        _, expected_context = celery_invocation(dummy_task, 1, 0)

        (error_event,) = events

        assert (
            error_event["contexts"]["trace"]["trace_id"]
            == scope._propagation_context["trace_id"]
        )
        assert (
            error_event["contexts"]["trace"]["span_id"]
            != scope._propagation_context["span_id"]
        )
        assert error_event["transaction"] == "dummy_task"
        assert "celery_task_id" in error_event["tags"]
        assert error_event["extra"]["celery-job"] == dict(
            task_name="dummy_task", **expected_context
        )

        (exception,) = error_event["exception"]["values"]
        assert exception["type"] == "ZeroDivisionError"
        assert exception["mechanism"]["type"] == "celery"
        assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"


@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
def test_transaction_events(capture_events, init_celery, celery_invocation, task_fails):
    celery = init_celery(traces_sample_rate=1.0)

    @celery.task(name="dummy_task")
    def dummy_task(x, y):
        return x / y

    # XXX: For some reason the first call does not get instrumented properly.
    celery_invocation(dummy_task, 1, 1)

    events = capture_events()

    with start_transaction(name="submission") as transaction:
        celery_invocation(dummy_task, 1, 0 if task_fails else 1)

    if task_fails:
        error_event = events.pop(0)
        assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
        assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"

    execution_event, submission_event = events
    assert execution_event["transaction"] == "dummy_task"
    assert execution_event["transaction_info"] == {"source": "task"}

    assert submission_event["transaction"] == "submission"
    assert submission_event["transaction_info"] == {"source": "custom"}

    assert execution_event["type"] == submission_event["type"] == "transaction"
    assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id

    if task_fails:
        assert execution_event["contexts"]["trace"]["status"] == "internal_error"
    else:
        assert execution_event["contexts"]["trace"]["status"] == "ok"

    assert execution_event["spans"] == []
    assert submission_event["spans"] == [
        {
            "description": "dummy_task",
            "op": "queue.submit.celery",
            "parent_span_id": submission_event["contexts"]["trace"]["span_id"],
            "same_process_as_parent": True,
            "span_id": submission_event["spans"][0]["span_id"],
            "start_timestamp": submission_event["spans"][0]["start_timestamp"],
            "timestamp": submission_event["spans"][0]["timestamp"],
            "trace_id": text_type(transaction.trace_id),
        }
    ]


def test_no_stackoverflows(celery):
    """We used to have a bug in the Celery integration where its monkeypatching
    was repeated for every task invocation, leading to stackoverflows.

    See https://github.com/getsentry/sentry-python/issues/265
    """

    results = []

    @celery.task(name="dummy_task")
    def dummy_task():
        with configure_scope() as scope:
            scope.set_tag("foo", "bar")

        results.append(42)

    for _ in range(10000):
        dummy_task.delay()

    assert results == [42] * 10000

    with configure_scope() as scope:
        assert not scope._tags


def test_simple_no_propagation(capture_events, init_celery):
    celery = init_celery(propagate_traces=False)
    events = capture_events()

    @celery.task(name="dummy_task")
    def dummy_task():
        1 / 0

    with start_transaction() as transaction:
        dummy_task.delay()

    (event,) = events
    assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id
    assert event["transaction"] == "dummy_task"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"


def test_ignore_expected(capture_events, celery):
    events = capture_events()

    @celery.task(name="dummy_task", throws=(ZeroDivisionError,))
    def dummy_task(x, y):
        return x / y

    dummy_task.delay(1, 2)
    dummy_task.delay(1, 0)
    assert not events


def test_broken_prerun(init_celery, connect_signal):
    from celery.signals import task_prerun

    stack_lengths = []

    def crash(*args, **kwargs):
        # scope should exist in prerun
        stack_lengths.append(len(Hub.current._stack))
        1 / 0

    # Order here is important to reproduce the bug: In Celery 3, a crashing
    # prerun would prevent other preruns from running.

    connect_signal(task_prerun, crash)
    celery = init_celery()

    assert len(Hub.current._stack) == 1

    @celery.task(name="dummy_task")
    def dummy_task(x, y):
        stack_lengths.append(len(Hub.current._stack))
        return x / y

    if VERSION >= (4,):
        dummy_task.delay(2, 2)
    else:
        with pytest.raises(ZeroDivisionError):
            dummy_task.delay(2, 2)

    assert len(Hub.current._stack) == 1
    if VERSION < (4,):
        assert stack_lengths == [2]
    else:
        assert stack_lengths == [2, 2]


@pytest.mark.xfail(
    (4, 2, 0) <= VERSION < (4, 4, 3),
    strict=True,
    reason="https://github.com/celery/celery/issues/4661",
)
def test_retry(celery, capture_events):
    events = capture_events()
    failures = [True, True, False]
    runs = []

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self):
        runs.append(1)
        try:
            if failures.pop(0):
                1 / 0
        except Exception as exc:
            self.retry(max_retries=2, exc=exc)

    dummy_task.delay()

    assert len(runs) == 3
    assert not events

    failures = [True, True, True]
    runs = []

    dummy_task.delay()

    assert len(runs) == 3
    (event,) = events
    exceptions = event["exception"]["values"]

    for e in exceptions:
        assert e["type"] == "ZeroDivisionError"


# TODO: This test is hanging when running test with `tox --parallel auto`. Find out why and fix it!
@pytest.mark.skip
@pytest.mark.forked
def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe):
    celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True)

    events = capture_events_forksafe()

    runs = []

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self):
        runs.append(1)
        1 / 0

    with start_transaction(name="submit_celery"):
        # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes
        res = dummy_task.apply_async()

    with pytest.raises(Exception):  # noqa: B017
        # Celery 4.1 raises a gibberish exception
        res.wait()

    # if this is nonempty, the worker never really forked
    assert not runs

    submit_transaction = events.read_event()
    assert submit_transaction["type"] == "transaction"
    assert submit_transaction["transaction"] == "submit_celery"

    assert len(
        submit_transaction["spans"]
    ), 4  # Because redis integration was auto enabled
    span = submit_transaction["spans"][0]
    assert span["op"] == "queue.submit.celery"
    assert span["description"] == "dummy_task"

    event = events.read_event()
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    transaction = events.read_event()
    assert (
        transaction["contexts"]["trace"]["trace_id"]
        == event["contexts"]["trace"]["trace_id"]
        == submit_transaction["contexts"]["trace"]["trace_id"]
    )

    events.read_flush()

    # if this is nonempty, the worker never really forked
    assert not runs


@pytest.mark.forked
@pytest.mark.parametrize("newrelic_order", ["sentry_first", "sentry_last"])
def test_newrelic_interference(init_celery, newrelic_order, celery_invocation):
    def instrument_newrelic():
        import celery.app.trace as celery_mod
        from newrelic.hooks.application_celery import instrument_celery_execute_trace

        assert hasattr(celery_mod, "build_tracer")
        instrument_celery_execute_trace(celery_mod)

    if newrelic_order == "sentry_first":
        celery = init_celery()
        instrument_newrelic()
    elif newrelic_order == "sentry_last":
        instrument_newrelic()
        celery = init_celery()
    else:
        raise ValueError(newrelic_order)

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self, x, y):
        return x / y

    assert dummy_task.apply(kwargs={"x": 1, "y": 1}).wait() == 1
    assert celery_invocation(dummy_task, 1, 1)[0].wait() == 1


def test_traces_sampler_gets_task_info_in_sampling_context(
    init_celery, celery_invocation, DictionaryContaining  # noqa:N803
):
    traces_sampler = mock.Mock()
    celery = init_celery(traces_sampler=traces_sampler)

    @celery.task(name="dog_walk")
    def walk_dogs(x, y):
        dogs, route = x
        num_loops = y
        return dogs, route, num_loops

    _, args_kwargs = celery_invocation(
        walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1
    )

    traces_sampler.assert_any_call(
        # depending on the iteration of celery_invocation, the data might be
        # passed as args or as kwargs, so make this generic
        DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)})
    )


def test_abstract_task(capture_events, celery, celery_invocation):
    events = capture_events()

    class AbstractTask(celery.Task):
        abstract = True

        def __call__(self, *args, **kwargs):
            try:
                return self.run(*args, **kwargs)
            except ZeroDivisionError:
                return None

    @celery.task(name="dummy_task", base=AbstractTask)
    def dummy_task(x, y):
        return x / y

    with start_transaction():
        celery_invocation(dummy_task, 1, 0)

    assert not events


def test_task_headers(celery):
    """
    Test that the headers set in the Celery Beat auto-instrumentation are passed to the celery signal handlers
    """
    sentry_crons_setup = {
        "sentry-monitor-slug": "some-slug",
        "sentry-monitor-config": {"some": "config"},
        "sentry-monitor-check-in-id": "123abc",
    }

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self, x, y):
        return _get_headers(self)

    # This is how the Celery Beat auto-instrumentation starts a task
    # in the monkey patched version of `apply_async`
    # in `sentry_sdk/integrations/celery.py::_wrap_apply_async()`
    result = dummy_task.apply_async(args=(1, 0), headers=sentry_crons_setup)
    assert result.get() == sentry_crons_setup


def test_baggage_propagation(init_celery):
    celery = init_celery(traces_sample_rate=1.0, release="abcdef")

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self, x, y):
        return _get_headers(self)

    with start_transaction() as transaction:
        result = dummy_task.apply_async(
            args=(1, 0),
            headers={"baggage": "custom=value"},
        ).get()

        assert sorted(result["baggage"].split(",")) == sorted(
            [
                "sentry-release=abcdef",
                "sentry-trace_id={}".format(transaction.trace_id),
                "sentry-environment=production",
                "sentry-sample_rate=1.0",
                "sentry-sampled=true",
                "custom=value",
            ]
        )


def test_sentry_propagate_traces_override(init_celery):
    """
    Test if the `sentry-propagate-traces` header given to `apply_async`
    overrides the `propagate_traces` parameter in the integration constructor.
    """
    celery = init_celery(
        propagate_traces=True, traces_sample_rate=1.0, release="abcdef"
    )

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self, message):
        trace_id = get_current_span().trace_id
        return trace_id

    with start_transaction() as transaction:
        transaction_trace_id = transaction.trace_id

        # should propagate trace
        task_transaction_id = dummy_task.apply_async(
            args=("some message",),
        ).get()
        assert transaction_trace_id == task_transaction_id

        # should NOT propagate trace (overrides `propagate_traces` parameter in integration constructor)
        task_transaction_id = dummy_task.apply_async(
            args=("another message",),
            headers={"sentry-propagate-traces": False},
        ).get()
        assert transaction_trace_id != task_transaction_id


def test_apply_async_manually_span(sentry_init):
    sentry_init(
        integrations=[CeleryIntegration()],
    )

    def dummy_function(*args, **kwargs):
        headers = kwargs.get("headers")
        assert "sentry-trace" in headers
        assert "baggage" in headers

    wrapped = _wrap_apply_async(dummy_function)
    wrapped(mock.MagicMock(), (), headers={})


def test_apply_async_from_beat_no_span(sentry_init):
    sentry_init(
        integrations=[CeleryIntegration()],
    )

    def dummy_function(*args, **kwargs):
        headers = kwargs.get("headers")
        assert "sentry-trace" not in headers
        assert "baggage" not in headers

    wrapped = _wrap_apply_async(dummy_function)
    wrapped(
        mock.MagicMock(),
        [
            "BEAT",
        ],
        headers={},
    )


def test_apply_async_no_args(init_celery):
    celery = init_celery()

    @celery.task
    def example_task():
        return "success"

    try:
        result = example_task.apply_async(None, {})
    except TypeError:
        pytest.fail("Calling `apply_async` without arguments raised a TypeError")

    assert result.get() == "success"
sentry-python-1.39.2/tests/integrations/celery/test_celery_beat_crons.py000066400000000000000000000331761454744723200267250ustar00rootroot00000000000000import datetime
import sys

import pytest

from sentry_sdk.integrations.celery import (
    _get_headers,
    _get_humanized_interval,
    _get_monitor_config,
    _patch_beat_apply_entry,
    crons_task_success,
    crons_task_failure,
    crons_task_retry,
)
from sentry_sdk.crons import MonitorStatus
from celery.schedules import crontab, schedule

try:
    from unittest import mock  # python 3.3 and above
    from unittest.mock import MagicMock
except ImportError:
    import mock  # python < 3.3
    from mock import MagicMock


def test_get_headers():
    fake_task = MagicMock()
    fake_task.request = {
        "bla": "blub",
        "foo": "bar",
    }

    assert _get_headers(fake_task) == {}

    fake_task.request.update(
        {
            "headers": {
                "bla": "blub",
            },
        }
    )

    assert _get_headers(fake_task) == {"bla": "blub"}

    fake_task.request.update(
        {
            "headers": {
                "headers": {
                    "tri": "blub",
                    "bar": "baz",
                },
                "bla": "blub",
            },
        }
    )

    assert _get_headers(fake_task) == {"bla": "blub", "tri": "blub", "bar": "baz"}


@pytest.mark.parametrize(
    "seconds, expected_tuple",
    [
        (0, (0, "second")),
        (1, (1, "second")),
        (0.00001, (0, "second")),
        (59, (59, "second")),
        (60, (1, "minute")),
        (100, (1, "minute")),
        (1000, (16, "minute")),
        (10000, (2, "hour")),
        (100000, (1, "day")),
        (100000000, (1157, "day")),
    ],
)
def test_get_humanized_interval(seconds, expected_tuple):
    assert _get_humanized_interval(seconds) == expected_tuple


def test_crons_task_success():
    fake_task = MagicMock()
    fake_task.request = {
        "headers": {
            "sentry-monitor-slug": "test123",
            "sentry-monitor-check-in-id": "1234567890",
            "sentry-monitor-start-timestamp-s": 200.1,
            "sentry-monitor-config": {
                "schedule": {
                    "type": "interval",
                    "value": 3,
                    "unit": "day",
                },
                "timezone": "Europe/Vienna",
            },
            "sentry-monitor-some-future-key": "some-future-value",
        },
    }

    with mock.patch(
        "sentry_sdk.integrations.celery.capture_checkin"
    ) as mock_capture_checkin:
        with mock.patch(
            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
            return_value=500.5,
        ):
            crons_task_success(fake_task)

            mock_capture_checkin.assert_called_once_with(
                monitor_slug="test123",
                monitor_config={
                    "schedule": {
                        "type": "interval",
                        "value": 3,
                        "unit": "day",
                    },
                    "timezone": "Europe/Vienna",
                },
                duration=300.4,
                check_in_id="1234567890",
                status=MonitorStatus.OK,
            )


def test_crons_task_failure():
    fake_task = MagicMock()
    fake_task.request = {
        "headers": {
            "sentry-monitor-slug": "test123",
            "sentry-monitor-check-in-id": "1234567890",
            "sentry-monitor-start-timestamp-s": 200.1,
            "sentry-monitor-config": {
                "schedule": {
                    "type": "interval",
                    "value": 3,
                    "unit": "day",
                },
                "timezone": "Europe/Vienna",
            },
            "sentry-monitor-some-future-key": "some-future-value",
        },
    }

    with mock.patch(
        "sentry_sdk.integrations.celery.capture_checkin"
    ) as mock_capture_checkin:
        with mock.patch(
            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
            return_value=500.5,
        ):
            crons_task_failure(fake_task)

            mock_capture_checkin.assert_called_once_with(
                monitor_slug="test123",
                monitor_config={
                    "schedule": {
                        "type": "interval",
                        "value": 3,
                        "unit": "day",
                    },
                    "timezone": "Europe/Vienna",
                },
                duration=300.4,
                check_in_id="1234567890",
                status=MonitorStatus.ERROR,
            )


def test_crons_task_retry():
    fake_task = MagicMock()
    fake_task.request = {
        "headers": {
            "sentry-monitor-slug": "test123",
            "sentry-monitor-check-in-id": "1234567890",
            "sentry-monitor-start-timestamp-s": 200.1,
            "sentry-monitor-config": {
                "schedule": {
                    "type": "interval",
                    "value": 3,
                    "unit": "day",
                },
                "timezone": "Europe/Vienna",
            },
            "sentry-monitor-some-future-key": "some-future-value",
        },
    }

    with mock.patch(
        "sentry_sdk.integrations.celery.capture_checkin"
    ) as mock_capture_checkin:
        with mock.patch(
            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
            return_value=500.5,
        ):
            crons_task_retry(fake_task)

            mock_capture_checkin.assert_called_once_with(
                monitor_slug="test123",
                monitor_config={
                    "schedule": {
                        "type": "interval",
                        "value": 3,
                        "unit": "day",
                    },
                    "timezone": "Europe/Vienna",
                },
                duration=300.4,
                check_in_id="1234567890",
                status=MonitorStatus.ERROR,
            )


def test_get_monitor_config_crontab():
    app = MagicMock()
    app.timezone = "Europe/Vienna"

    # schedule with the default timezone
    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "crontab",
            "value": "*/10 12 3 * *",
        },
        "timezone": "UTC",  # the default because `crontab` does not know about the app
    }
    assert "unit" not in monitor_config["schedule"]

    # schedule with the timezone from the app
    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10", app=app)

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "crontab",
            "value": "*/10 12 3 * *",
        },
        "timezone": "Europe/Vienna",  # the timezone from the app
    }

    # schedule without a timezone, the celery integration will read the config from the app
    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
    celery_schedule.tz = None

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "crontab",
            "value": "*/10 12 3 * *",
        },
        "timezone": "Europe/Vienna",  # the timezone from the app
    }

    # schedule without a timezone, and an app without timezone, the celery integration will fall back to UTC
    app = MagicMock()
    app.timezone = None

    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
    celery_schedule.tz = None
    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "crontab",
            "value": "*/10 12 3 * *",
        },
        "timezone": "UTC",  # default timezone from celery integration
    }


def test_get_monitor_config_seconds():
    app = MagicMock()
    app.timezone = "Europe/Vienna"

    celery_schedule = schedule(run_every=3)  # seconds

    with mock.patch(
        "sentry_sdk.integrations.celery.logger.warning"
    ) as mock_logger_warning:
        monitor_config = _get_monitor_config(celery_schedule, app, "foo")
        mock_logger_warning.assert_called_with(
            "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
            "foo",
            3,
        )
        assert monitor_config == {}


def test_get_monitor_config_minutes():
    app = MagicMock()
    app.timezone = "Europe/Vienna"

    # schedule with the default timezone
    celery_schedule = schedule(run_every=60)  # seconds

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "interval",
            "value": 1,
            "unit": "minute",
        },
        "timezone": "UTC",
    }

    # schedule with the timezone from the app
    celery_schedule = schedule(run_every=60, app=app)  # seconds

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "interval",
            "value": 1,
            "unit": "minute",
        },
        "timezone": "Europe/Vienna",  # the timezone from the app
    }

    # schedule without a timezone, the celery integration will read the config from the app
    celery_schedule = schedule(run_every=60)  # seconds
    celery_schedule.tz = None

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "interval",
            "value": 1,
            "unit": "minute",
        },
        "timezone": "Europe/Vienna",  # the timezone from the app
    }

    # schedule without a timezone, and an app without timezone, the celery integration will fall back to UTC
    app = MagicMock()
    app.timezone = None

    celery_schedule = schedule(run_every=60)  # seconds
    celery_schedule.tz = None

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "interval",
            "value": 1,
            "unit": "minute",
        },
        "timezone": "UTC",  # default timezone from celery integration
    }


def test_get_monitor_config_unknown():
    app = MagicMock()
    app.timezone = "Europe/Vienna"

    unknown_celery_schedule = MagicMock()
    monitor_config = _get_monitor_config(unknown_celery_schedule, app, "foo")
    assert monitor_config == {}


def test_get_monitor_config_default_timezone():
    app = MagicMock()
    app.timezone = None

    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")

    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")

    assert monitor_config["timezone"] == "UTC"


def test_get_monitor_config_timezone_in_app_conf():
    app = MagicMock()
    app.timezone = "Asia/Karachi"

    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
    celery_schedule.tz = None

    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")

    assert monitor_config["timezone"] == "Asia/Karachi"


@pytest.mark.skipif(
    sys.version_info < (3, 0),
    reason="no datetime.timezone for Python 2, so skipping this test.",
)
def test_get_monitor_config_timezone_in_celery_schedule():
    app = MagicMock()
    app.timezone = "Asia/Karachi"

    panama_tz = datetime.timezone(datetime.timedelta(hours=-5), name="America/Panama")

    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
    celery_schedule.tz = panama_tz

    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")

    assert monitor_config["timezone"] == str(panama_tz)


@pytest.mark.parametrize(
    "task_name,exclude_beat_tasks,task_in_excluded_beat_tasks",
    [
        ["some_task_name", ["xxx", "some_task.*"], True],
        ["some_task_name", ["xxx", "some_other_task.*"], False],
    ],
)
def test_exclude_beat_tasks_option(
    task_name, exclude_beat_tasks, task_in_excluded_beat_tasks
):
    """
    Test excluding Celery Beat tasks from automatic instrumentation.
    """
    fake_apply_entry = MagicMock()

    fake_scheduler = MagicMock()
    fake_scheduler.apply_entry = fake_apply_entry

    fake_integration = MagicMock()
    fake_integration.exclude_beat_tasks = exclude_beat_tasks

    fake_schedule_entry = MagicMock()
    fake_schedule_entry.name = task_name

    fake_get_monitor_config = MagicMock()

    with mock.patch(
        "sentry_sdk.integrations.celery.Scheduler", fake_scheduler
    ) as Scheduler:  # noqa: N806
        with mock.patch(
            "sentry_sdk.integrations.celery.Hub.current.get_integration",
            return_value=fake_integration,
        ):
            with mock.patch(
                "sentry_sdk.integrations.celery._get_monitor_config",
                fake_get_monitor_config,
            ) as _get_monitor_config:
                # Mimic CeleryIntegration patching of Scheduler.apply_entry()
                _patch_beat_apply_entry()
                # Mimic Celery Beat calling a task from the Beat schedule
                Scheduler.apply_entry(fake_scheduler, fake_schedule_entry)

                if task_in_excluded_beat_tasks:
                    # Only the original Scheduler.apply_entry() is called, _get_monitor_config is NOT called.
                    assert fake_apply_entry.call_count == 1
                    _get_monitor_config.assert_not_called()

                else:
                    # The original Scheduler.apply_entry() is called, AND _get_monitor_config is called.
                    assert fake_apply_entry.call_count == 1
                    assert _get_monitor_config.call_count == 1
sentry-python-1.39.2/tests/integrations/chalice/000077500000000000000000000000001454744723200217255ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/chalice/__init__.py000066400000000000000000000000561454744723200240370ustar00rootroot00000000000000import pytest

pytest.importorskip("chalice")
sentry-python-1.39.2/tests/integrations/chalice/test_chalice.py000066400000000000000000000104451454744723200247320ustar00rootroot00000000000000import pytest
import time
from chalice import Chalice, BadRequestError
from chalice.local import LambdaContext, LocalGateway

from sentry_sdk import capture_message
from sentry_sdk.integrations.chalice import CHALICE_VERSION, ChaliceIntegration
from sentry_sdk.utils import parse_version

from pytest_chalice.handlers import RequestHandler


def _generate_lambda_context(self):
    # Monkeypatch of the function _generate_lambda_context
    # from the class LocalGateway
    # for mock the timeout
    # type: () -> LambdaContext
    if self._config.lambda_timeout is None:
        timeout = 10 * 1000
    else:
        timeout = self._config.lambda_timeout * 1000
    return LambdaContext(
        function_name=self._config.function_name,
        memory_size=self._config.lambda_memory_size,
        max_runtime_ms=timeout,
    )


@pytest.fixture
def app(sentry_init):
    sentry_init(integrations=[ChaliceIntegration()])
    app = Chalice(app_name="sentry_chalice")

    @app.route("/boom")
    def boom():
        raise Exception("boom goes the dynamite!")

    @app.route("/context")
    def has_request():
        raise Exception("boom goes the dynamite!")

    @app.route("/badrequest")
    def badrequest():
        raise BadRequestError("bad-request")

    @app.route("/message")
    def hi():
        capture_message("hi")
        return {"status": "ok"}

    @app.route("/message/{message_id}")
    def hi_with_id(message_id):
        capture_message("hi again")
        return {"status": "ok"}

    LocalGateway._generate_lambda_context = _generate_lambda_context

    return app


@pytest.fixture
def lambda_context_args():
    return ["lambda_name", 256]


def test_exception_boom(app, client: RequestHandler) -> None:
    response = client.get("/boom")
    assert response.status_code == 500
    assert response.json == {
        "Code": "InternalServerError",
        "Message": "An internal server error occurred.",
    }


def test_has_request(app, capture_events, client: RequestHandler):
    events = capture_events()

    response = client.get("/context")
    assert response.status_code == 500

    (event,) = events
    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"


def test_scheduled_event(app, lambda_context_args):
    @app.schedule("rate(1 minutes)")
    def every_hour(event):
        raise Exception("schedule event!")

    context = LambdaContext(
        *lambda_context_args, max_runtime_ms=10000, time_source=time
    )

    lambda_event = {
        "version": "0",
        "account": "120987654312",
        "region": "us-west-1",
        "detail": {},
        "detail-type": "Scheduled Event",
        "source": "aws.events",
        "time": "1970-01-01T00:00:00Z",
        "id": "event-id",
        "resources": ["arn:aws:events:us-west-1:120987654312:rule/my-schedule"],
    }
    with pytest.raises(Exception) as exc_info:
        every_hour(lambda_event, context=context)
    assert str(exc_info.value) == "schedule event!"


@pytest.mark.skipif(
    parse_version(CHALICE_VERSION) >= (1, 28),
    reason="different behavior based on chalice version",
)
def test_bad_request_old(client: RequestHandler) -> None:
    response = client.get("/badrequest")

    assert response.status_code == 400
    assert response.json == {
        "Code": "BadRequestError",
        "Message": "BadRequestError: bad-request",
    }


@pytest.mark.skipif(
    parse_version(CHALICE_VERSION) < (1, 28),
    reason="different behavior based on chalice version",
)
def test_bad_request(client: RequestHandler) -> None:
    response = client.get("/badrequest")

    assert response.status_code == 400
    assert response.json == {
        "Code": "BadRequestError",
        "Message": "bad-request",
    }


@pytest.mark.parametrize(
    "url,expected_transaction,expected_source",
    [
        ("/message", "api_handler", "component"),
        ("/message/123456", "api_handler", "component"),
    ],
)
def test_transaction(
    app,
    client: RequestHandler,
    capture_events,
    url,
    expected_transaction,
    expected_source,
):
    events = capture_events()

    response = client.get(url)
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}
sentry-python-1.39.2/tests/integrations/clickhouse_driver/000077500000000000000000000000001454744723200240415ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/clickhouse_driver/__init__.py000066400000000000000000000000701454744723200261470ustar00rootroot00000000000000import pytest

pytest.importorskip("clickhouse_driver")
sentry-python-1.39.2/tests/integrations/clickhouse_driver/test_clickhouse_driver.py000066400000000000000000000670151454744723200311670ustar00rootroot00000000000000"""
Tests need a local clickhouse instance running, this can best be done using
```sh
docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse/clickhouse-server
```
"""
import clickhouse_driver
from clickhouse_driver import Client, connect

from sentry_sdk import start_transaction, capture_message
from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration

EXPECT_PARAMS_IN_SELECT = True
if clickhouse_driver.VERSION < (0, 2, 6):
    EXPECT_PARAMS_IN_SELECT = False


def test_clickhouse_client_breadcrumbs(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    client = Client("localhost")
    client.execute("DROP TABLE IF EXISTS test")
    client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
    client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
    client.execute("INSERT INTO test (x) VALUES", [[170], [200]])

    res = client.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
    assert res[0][0] == 370

    capture_message("hi")

    (event,) = events

    expected_breadcrumbs = [
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "DROP TABLE IF EXISTS test",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "SELECT sum(x) FROM test WHERE x > 150",
            "type": "default",
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_breadcrumbs[-1]["data"].pop("db.params", None)

    for crumb in event["breadcrumbs"]["values"]:
        crumb.pop("timestamp", None)

    assert event["breadcrumbs"]["values"] == expected_breadcrumbs


def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        send_default_pii=True,
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    client = Client("localhost")
    client.execute("DROP TABLE IF EXISTS test")
    client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
    client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
    client.execute("INSERT INTO test (x) VALUES", [[170], [200]])

    res = client.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
    assert res[0][0] == 370

    capture_message("hi")

    (event,) = events

    expected_breadcrumbs = [
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [],
            },
            "message": "DROP TABLE IF EXISTS test",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [],
            },
            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [{"x": 100}],
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [[170], [200]],
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[370]],
                "db.params": {"minv": 150},
            },
            "message": "SELECT sum(x) FROM test WHERE x > 150",
            "type": "default",
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_breadcrumbs[-1]["data"].pop("db.params", None)

    for crumb in event["breadcrumbs"]["values"]:
        crumb.pop("timestamp", None)

    assert event["breadcrumbs"]["values"] == expected_breadcrumbs


def test_clickhouse_client_spans(
    sentry_init, capture_events, capture_envelopes
) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
    )
    events = capture_events()

    transaction_trace_id = None
    transaction_span_id = None

    with start_transaction(name="test_clickhouse_transaction") as transaction:
        transaction_trace_id = transaction.trace_id
        transaction_span_id = transaction.span_id

        client = Client("localhost")
        client.execute("DROP TABLE IF EXISTS test")
        client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
        client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
        client.execute("INSERT INTO test (x) VALUES", [[170], [200]])

        res = client.execute(
            "SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150}
        )
        assert res[0][0] == 370

    (event,) = events

    expected_spans = [
        {
            "op": "db",
            "description": "DROP TABLE IF EXISTS test",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "SELECT sum(x) FROM test WHERE x > 150",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_spans[-1]["data"].pop("db.params", None)

    for span in event["spans"]:
        span.pop("span_id", None)
        span.pop("start_timestamp", None)
        span.pop("timestamp", None)

    assert event["spans"] == expected_spans


def test_clickhouse_client_spans_with_pii(
    sentry_init, capture_events, capture_envelopes
) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    transaction_trace_id = None
    transaction_span_id = None

    with start_transaction(name="test_clickhouse_transaction") as transaction:
        transaction_trace_id = transaction.trace_id
        transaction_span_id = transaction.span_id

        client = Client("localhost")
        client.execute("DROP TABLE IF EXISTS test")
        client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
        client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
        client.execute("INSERT INTO test (x) VALUES", [[170], [200]])

        res = client.execute(
            "SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150}
        )
        assert res[0][0] == 370

    (event,) = events

    expected_spans = [
        {
            "op": "db",
            "description": "DROP TABLE IF EXISTS test",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [{"x": 100}],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [[170], [200]],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "SELECT sum(x) FROM test WHERE x > 150",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": {"minv": 150},
                "db.result": [[370]],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_spans[-1]["data"].pop("db.params", None)

    for span in event["spans"]:
        span.pop("span_id", None)
        span.pop("start_timestamp", None)
        span.pop("timestamp", None)

    assert event["spans"] == expected_spans


def test_clickhouse_dbapi_breadcrumbs(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
    )
    events = capture_events()

    conn = connect("clickhouse://localhost")
    cursor = conn.cursor()
    cursor.execute("DROP TABLE IF EXISTS test")
    cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
    cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
    cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
    cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
    res = cursor.fetchall()

    assert res[0][0] == 370

    capture_message("hi")

    (event,) = events

    expected_breadcrumbs = [
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "DROP TABLE IF EXISTS test",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "SELECT sum(x) FROM test WHERE x > 150",
            "type": "default",
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_breadcrumbs[-1]["data"].pop("db.params", None)

    for crumb in event["breadcrumbs"]["values"]:
        crumb.pop("timestamp", None)

    assert event["breadcrumbs"]["values"] == expected_breadcrumbs


def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        send_default_pii=True,
    )
    events = capture_events()

    conn = connect("clickhouse://localhost")
    cursor = conn.cursor()
    cursor.execute("DROP TABLE IF EXISTS test")
    cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
    cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
    cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
    cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
    res = cursor.fetchall()

    assert res[0][0] == 370

    capture_message("hi")

    (event,) = events

    expected_breadcrumbs = [
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[], []],
            },
            "message": "DROP TABLE IF EXISTS test",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[], []],
            },
            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [{"x": 100}],
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [[170], [200]],
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": {"minv": 150},
                "db.result": [[["370"]], [["'sum(x)'", "'Int64'"]]],
            },
            "message": "SELECT sum(x) FROM test WHERE x > 150",
            "type": "default",
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_breadcrumbs[-1]["data"].pop("db.params", None)

    for crumb in event["breadcrumbs"]["values"]:
        crumb.pop("timestamp", None)

    assert event["breadcrumbs"]["values"] == expected_breadcrumbs


def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
    )
    events = capture_events()

    transaction_trace_id = None
    transaction_span_id = None

    with start_transaction(name="test_clickhouse_transaction") as transaction:
        transaction_trace_id = transaction.trace_id
        transaction_span_id = transaction.span_id

        conn = connect("clickhouse://localhost")
        cursor = conn.cursor()
        cursor.execute("DROP TABLE IF EXISTS test")
        cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
        cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
        cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
        cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
        res = cursor.fetchall()

        assert res[0][0] == 370

    (event,) = events

    expected_spans = [
        {
            "op": "db",
            "description": "DROP TABLE IF EXISTS test",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "SELECT sum(x) FROM test WHERE x > 150",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_spans[-1]["data"].pop("db.params", None)

    for span in event["spans"]:
        span.pop("span_id", None)
        span.pop("start_timestamp", None)
        span.pop("timestamp", None)

    assert event["spans"] == expected_spans


def test_clickhouse_dbapi_spans_with_pii(
    sentry_init, capture_events, capture_envelopes
) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    transaction_trace_id = None
    transaction_span_id = None

    with start_transaction(name="test_clickhouse_transaction") as transaction:
        transaction_trace_id = transaction.trace_id
        transaction_span_id = transaction.span_id

        conn = connect("clickhouse://localhost")
        cursor = conn.cursor()
        cursor.execute("DROP TABLE IF EXISTS test")
        cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
        cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
        cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
        cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
        res = cursor.fetchall()

        assert res[0][0] == 370

    (event,) = events

    expected_spans = [
        {
            "op": "db",
            "description": "DROP TABLE IF EXISTS test",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[], []],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[], []],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [{"x": 100}],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [[170], [200]],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "description": "SELECT sum(x) FROM test WHERE x > 150",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": {"minv": 150},
                "db.result": [[[370]], [["sum(x)", "Int64"]]],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_spans[-1]["data"].pop("db.params", None)

    for span in event["spans"]:
        span.pop("span_id", None)
        span.pop("start_timestamp", None)
        span.pop("timestamp", None)

    assert event["spans"] == expected_spans
sentry-python-1.39.2/tests/integrations/cloud_resource_context/000077500000000000000000000000001454744723200251165ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/cloud_resource_context/__init__.py000066400000000000000000000000001454744723200272150ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/cloud_resource_context/test_cloud_resource_context.py000066400000000000000000000310441454744723200333120ustar00rootroot00000000000000import json

import pytest

try:
    from unittest import mock  # python 3.3 and above
    from unittest.mock import MagicMock
except ImportError:
    import mock  # python < 3.3
    from mock import MagicMock

from sentry_sdk.integrations.cloud_resource_context import (
    CLOUD_PLATFORM,
    CLOUD_PROVIDER,
)

AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD = {
    "accountId": "298817902971",
    "architecture": "x86_64",
    "availabilityZone": "us-east-1b",
    "billingProducts": None,
    "devpayProductCodes": None,
    "marketplaceProductCodes": None,
    "imageId": "ami-00874d747dde344fa",
    "instanceId": "i-07d3301297fe0a55a",
    "instanceType": "t2.small",
    "kernelId": None,
    "pendingTime": "2023-02-08T07:54:05Z",
    "privateIp": "171.131.65.115",
    "ramdiskId": None,
    "region": "us-east-1",
    "version": "2017-09-30",
}

try:
    # Python 3
    AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
        json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD), "utf-8"
    )
except TypeError:
    # Python 2
    AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
        json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD)
    ).encode("utf-8")

GCP_GCE_EXAMPLE_METADATA_PLAYLOAD = {
    "instance": {
        "attributes": {},
        "cpuPlatform": "Intel Broadwell",
        "description": "",
        "disks": [
            {
                "deviceName": "tests-cloud-contexts-in-python-sdk",
                "index": 0,
                "interface": "SCSI",
                "mode": "READ_WRITE",
                "type": "PERSISTENT-BALANCED",
            }
        ],
        "guestAttributes": {},
        "hostname": "tests-cloud-contexts-in-python-sdk.c.client-infra-internal.internal",
        "id": 1535324527892303790,
        "image": "projects/debian-cloud/global/images/debian-11-bullseye-v20221206",
        "licenses": [{"id": "2853224013536823851"}],
        "machineType": "projects/542054129475/machineTypes/e2-medium",
        "maintenanceEvent": "NONE",
        "name": "tests-cloud-contexts-in-python-sdk",
        "networkInterfaces": [
            {
                "accessConfigs": [
                    {"externalIp": "134.30.53.15", "type": "ONE_TO_ONE_NAT"}
                ],
                "dnsServers": ["169.254.169.254"],
                "forwardedIps": [],
                "gateway": "10.188.0.1",
                "ip": "10.188.0.3",
                "ipAliases": [],
                "mac": "42:01:0c:7c:00:13",
                "mtu": 1460,
                "network": "projects/544954029479/networks/default",
                "subnetmask": "255.255.240.0",
                "targetInstanceIps": [],
            }
        ],
        "preempted": "FALSE",
        "remainingCpuTime": -1,
        "scheduling": {
            "automaticRestart": "TRUE",
            "onHostMaintenance": "MIGRATE",
            "preemptible": "FALSE",
        },
        "serviceAccounts": {},
        "tags": ["http-server", "https-server"],
        "virtualClock": {"driftToken": "0"},
        "zone": "projects/142954069479/zones/northamerica-northeast2-b",
    },
    "oslogin": {"authenticate": {"sessions": {}}},
    "project": {
        "attributes": {},
        "numericProjectId": 204954049439,
        "projectId": "my-project-internal",
    },
}

try:
    # Python 3
    GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
        json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD), "utf-8"
    )
except TypeError:
    # Python 2
    GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
        json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD)
    ).encode("utf-8")


def test_is_aws_http_error():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = 405

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._is_aws() is False
    assert CloudResourceContextIntegration.aws_token == ""


def test_is_aws_ok():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = 200
    response.data = b"something"
    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._is_aws() is True
    assert CloudResourceContextIntegration.aws_token == "something"

    CloudResourceContextIntegration.http.request = MagicMock(
        side_effect=Exception("Test")
    )
    assert CloudResourceContextIntegration._is_aws() is False


def test_is_aw_exception():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(
        side_effect=Exception("Test")
    )

    assert CloudResourceContextIntegration._is_aws() is False


@pytest.mark.parametrize(
    "http_status, response_data, expected_context",
    [
        [
            405,
            b"",
            {
                "cloud.provider": CLOUD_PROVIDER.AWS,
                "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
            },
        ],
        [
            200,
            b"something-but-not-json",
            {
                "cloud.provider": CLOUD_PROVIDER.AWS,
                "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
            },
        ],
        [
            200,
            AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES,
            {
                "cloud.provider": "aws",
                "cloud.platform": "aws_ec2",
                "cloud.account.id": "298817902971",
                "cloud.availability_zone": "us-east-1b",
                "cloud.region": "us-east-1",
                "host.id": "i-07d3301297fe0a55a",
                "host.type": "t2.small",
            },
        ],
    ],
)
def test_get_aws_context(http_status, response_data, expected_context):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = http_status
    response.data = response_data

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._get_aws_context() == expected_context


def test_is_gcp_http_error():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = 405
    response.data = b'{"some": "json"}'
    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._is_gcp() is False
    assert CloudResourceContextIntegration.gcp_metadata is None


def test_is_gcp_ok():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = 200
    response.data = b'{"some": "json"}'
    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._is_gcp() is True
    assert CloudResourceContextIntegration.gcp_metadata == {"some": "json"}


def test_is_gcp_exception():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(
        side_effect=Exception("Test")
    )
    assert CloudResourceContextIntegration._is_gcp() is False


@pytest.mark.parametrize(
    "http_status, response_data, expected_context",
    [
        [
            405,
            None,
            {
                "cloud.provider": CLOUD_PROVIDER.GCP,
                "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
            },
        ],
        [
            200,
            b"something-but-not-json",
            {
                "cloud.provider": CLOUD_PROVIDER.GCP,
                "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
            },
        ],
        [
            200,
            GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES,
            {
                "cloud.provider": "gcp",
                "cloud.platform": "gcp_compute_engine",
                "cloud.account.id": "my-project-internal",
                "cloud.availability_zone": "northamerica-northeast2-b",
                "host.id": 1535324527892303790,
            },
        ],
    ],
)
def test_get_gcp_context(http_status, response_data, expected_context):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration.gcp_metadata = None

    response = MagicMock()
    response.status = http_status
    response.data = response_data

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._get_gcp_context() == expected_context


@pytest.mark.parametrize(
    "is_aws, is_gcp, expected_provider",
    [
        [False, False, ""],
        [False, True, CLOUD_PROVIDER.GCP],
        [True, False, CLOUD_PROVIDER.AWS],
        [True, True, CLOUD_PROVIDER.AWS],
    ],
)
def test_get_cloud_provider(is_aws, is_gcp, expected_provider):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration._is_aws = MagicMock(return_value=is_aws)
    CloudResourceContextIntegration._is_gcp = MagicMock(return_value=is_gcp)

    assert CloudResourceContextIntegration._get_cloud_provider() == expected_provider


@pytest.mark.parametrize(
    "cloud_provider",
    [
        CLOUD_PROVIDER.ALIBABA,
        CLOUD_PROVIDER.AZURE,
        CLOUD_PROVIDER.IBM,
        CLOUD_PROVIDER.TENCENT,
    ],
)
def test_get_cloud_resource_context_unsupported_providers(cloud_provider):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration._get_cloud_provider = MagicMock(
        return_value=cloud_provider
    )

    assert CloudResourceContextIntegration._get_cloud_resource_context() == {}


@pytest.mark.parametrize(
    "cloud_provider",
    [
        CLOUD_PROVIDER.AWS,
        CLOUD_PROVIDER.GCP,
    ],
)
def test_get_cloud_resource_context_supported_providers(cloud_provider):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration._get_cloud_provider = MagicMock(
        return_value=cloud_provider
    )

    assert CloudResourceContextIntegration._get_cloud_resource_context() != {}


@pytest.mark.parametrize(
    "cloud_provider, cloud_resource_context, warning_called, set_context_called",
    [
        ["", {}, False, False],
        [CLOUD_PROVIDER.AWS, {}, False, False],
        [CLOUD_PROVIDER.GCP, {}, False, False],
        [CLOUD_PROVIDER.AZURE, {}, True, False],
        [CLOUD_PROVIDER.ALIBABA, {}, True, False],
        [CLOUD_PROVIDER.IBM, {}, True, False],
        [CLOUD_PROVIDER.TENCENT, {}, True, False],
        ["", {"some": "context"}, False, True],
        [CLOUD_PROVIDER.AWS, {"some": "context"}, False, True],
        [CLOUD_PROVIDER.GCP, {"some": "context"}, False, True],
    ],
)
def test_setup_once(
    cloud_provider, cloud_resource_context, warning_called, set_context_called
):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration.cloud_provider = cloud_provider
    CloudResourceContextIntegration._get_cloud_resource_context = MagicMock(
        return_value=cloud_resource_context
    )

    with mock.patch(
        "sentry_sdk.integrations.cloud_resource_context.set_context"
    ) as fake_set_context:
        with mock.patch(
            "sentry_sdk.integrations.cloud_resource_context.logger.warning"
        ) as fake_warning:
            CloudResourceContextIntegration.setup_once()

            if set_context_called:
                fake_set_context.assert_called_once_with(
                    "cloud_resource", cloud_resource_context
                )
            else:
                fake_set_context.assert_not_called()

            if warning_called:
                assert fake_warning.call_count == 1
            else:
                fake_warning.assert_not_called()
sentry-python-1.39.2/tests/integrations/conftest.py000066400000000000000000000010601454744723200225310ustar00rootroot00000000000000import pytest
import sentry_sdk


@pytest.fixture
def capture_exceptions(monkeypatch):
    def inner():
        errors = set()
        old_capture_event = sentry_sdk.Hub.capture_event

        def capture_event(self, event, hint=None):
            if hint:
                if "exc_info" in hint:
                    error = hint["exc_info"][1]
                    errors.add(error)
            return old_capture_event(self, event, hint=hint)

        monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event)
        return errors

    return inner
sentry-python-1.39.2/tests/integrations/django/000077500000000000000000000000001454744723200215775ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/__init__.py000066400000000000000000000000551454744723200237100ustar00rootroot00000000000000import pytest

pytest.importorskip("django")
sentry-python-1.39.2/tests/integrations/django/asgi/000077500000000000000000000000001454744723200225225ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/asgi/__init__.py000066400000000000000000000000571454744723200246350ustar00rootroot00000000000000import pytest

pytest.importorskip("channels")
sentry-python-1.39.2/tests/integrations/django/asgi/image.png000066400000000000000000000004641454744723200243160ustar00rootroot00000000000000PNG


IHDR
	IDATWcHsWT,pƃϟ+e+FQ0}^-//CfR3
VWhgV׵d2ܺlzjVB!H#SM/;'15e0H6$[72iȃM32bXd;PS1KJ04`H2fÌ5b.rfO_`4;PלfŘ
M
fh@ 4x8LIENDB`sentry-python-1.39.2/tests/integrations/django/asgi/test_asgi.py000066400000000000000000000372671454744723200250750ustar00rootroot00000000000000import base64
import json
import os

import django
import pytest
from channels.testing import HttpCommunicator
from sentry_sdk import capture_message
from sentry_sdk.integrations.django import DjangoIntegration
from tests.integrations.django.myapp.asgi import channels_application

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3

APPS = [channels_application]
if django.VERSION >= (3, 0):
    from tests.integrations.django.myapp.asgi import asgi_application

    APPS += [asgi_application]


@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
@pytest.mark.forked
async def test_basic(sentry_init, capture_events, application):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)

    events = capture_events()

    comm = HttpCommunicator(application, "GET", "/view-exc?test=query")
    response = await comm.get_response()
    assert response["status"] == 500

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    # Test that the ASGI middleware got set up correctly. Right now this needs
    # to be installed manually (see myapp/asgi.py)
    assert event["transaction"] == "/view-exc"
    assert event["request"] == {
        "cookies": {},
        "headers": {},
        "method": "GET",
        "query_string": "test=query",
        "url": "/view-exc",
    }

    capture_message("hi")
    event = events[-1]
    assert "request" not in event


@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_views(sentry_init, capture_events, application):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)

    events = capture_events()

    comm = HttpCommunicator(application, "GET", "/async_message")
    response = await comm.get_response()
    assert response["status"] == 200

    (event,) = events

    assert event["transaction"] == "/async_message"
    assert event["request"] == {
        "cookies": {},
        "headers": {},
        "method": "GET",
        "query_string": None,
        "url": "/async_message",
    }


@pytest.mark.parametrize("application", APPS)
@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, application):
    with mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0):
        sentry_init(
            integrations=[DjangoIntegration()],
            traces_sample_rate=1.0,
            _experiments={"profiles_sample_rate": 1.0},
        )

        envelopes = capture_envelopes()

        comm = HttpCommunicator(application, "GET", endpoint)
        response = await comm.get_response()
        assert response["status"] == 200, response["body"]

        await comm.wait()

        data = json.loads(response["body"])

        envelopes = [envelope for envelope in envelopes]
        assert len(envelopes) == 1

        profiles = [item for item in envelopes[0].items if item.type == "profile"]
        assert len(profiles) == 1

        for profile in profiles:
            transactions = profile.payload.json["transactions"]
            assert len(transactions) == 1
            assert str(data["active"]) == transactions[0]["active_thread_id"]


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_views_concurrent_execution(sentry_init, settings):
    import asyncio
    import time

    settings.MIDDLEWARE = []
    asgi_application.load_middleware(is_async=True)

    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)

    comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
    comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")

    loop = asyncio.get_event_loop()

    start = time.time()

    r1 = loop.create_task(comm.get_response(timeout=5))
    r2 = loop.create_task(comm2.get_response(timeout=5))

    (resp1, resp2), _ = await asyncio.wait({r1, r2})

    end = time.time()

    assert resp1.result()["status"] == 200
    assert resp2.result()["status"] == 200

    assert end - start < 1.5


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_middleware_that_is_function_concurrent_execution(
    sentry_init, settings
):
    import asyncio
    import time

    settings.MIDDLEWARE = [
        "tests.integrations.django.myapp.middleware.simple_middleware"
    ]
    asgi_application.load_middleware(is_async=True)

    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)

    comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
    comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")

    loop = asyncio.get_event_loop()

    start = time.time()

    r1 = loop.create_task(comm.get_response(timeout=5))
    r2 = loop.create_task(comm2.get_response(timeout=5))

    (resp1, resp2), _ = await asyncio.wait({r1, r2})

    end = time.time()

    assert resp1.result()["status"] == 200
    assert resp2.result()["status"] == 200

    assert end - start < 1.5


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_middleware_spans(
    sentry_init, render_span_tree, capture_events, settings
):
    settings.MIDDLEWARE = [
        "django.contrib.sessions.middleware.SessionMiddleware",
        "django.contrib.auth.middleware.AuthenticationMiddleware",
        "django.middleware.csrf.CsrfViewMiddleware",
        "tests.integrations.django.myapp.settings.TestMiddleware",
    ]
    asgi_application.load_middleware(is_async=True)

    sentry_init(
        integrations=[DjangoIntegration(middleware_spans=True)],
        traces_sample_rate=1.0,
        _experiments={"record_sql_params": True},
    )

    events = capture_events()

    comm = HttpCommunicator(asgi_application, "GET", "/async_message")
    response = await comm.get_response()
    assert response["status"] == 200

    await comm.wait()

    message, transaction = events

    assert (
        render_span_tree(transaction)
        == """\
- op="http.server": description=null
  - op="event.django": description="django.db.reset_queries"
  - op="event.django": description="django.db.close_old_connections"
  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
          - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
          - op="view.render": description="async_message"
  - op="event.django": description="django.db.close_old_connections"
  - op="event.django": description="django.core.cache.close_caches"
  - op="event.django": description="django.core.handlers.base.reset_urlconf\""""
    )


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_has_trace_if_performance_enabled(sentry_init, capture_events):
    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)

    events = capture_events()

    comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
    response = await comm.get_response()
    assert response["status"] == 500

    # ASGI Django does not create transactions per default,
    # so we do not have a transaction_event here.
    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_has_trace_if_performance_disabled(sentry_init, capture_events):
    sentry_init(integrations=[DjangoIntegration()])

    events = capture_events()

    comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
    response = await comm.get_response()
    assert response["status"] == 500

    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]
    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_events):
    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)

    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    comm = HttpCommunicator(
        asgi_application,
        "GET",
        "/view-exc-with-msg",
        headers=[(b"sentry-trace", sentry_trace_header.encode())],
    )
    response = await comm.get_response()
    assert response["status"] == 500

    # ASGI Django does not create transactions per default,
    # so we do not have a transaction_event here.
    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events):
    sentry_init(integrations=[DjangoIntegration()])

    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    comm = HttpCommunicator(
        asgi_application,
        "GET",
        "/view-exc-with-msg",
        headers=[(b"sentry-trace", sentry_trace_header.encode())],
    )
    response = await comm.get_response()
    assert response["status"] == 500

    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "image.png")
BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="image.png"\r\nContent-Type: image/png\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace(
    "{{image_data}}", base64.b64encode(open(PICTURE, "rb").read()).decode("utf-8")
).encode(
    "utf-8"
)
BODY_FORM_CONTENT_LENGTH = str(len(BODY_FORM)).encode("utf-8")


@pytest.mark.parametrize("application", APPS)
@pytest.mark.parametrize(
    "send_default_pii,method,headers,url_name,body,expected_data",
    [
        (
            True,
            "POST",
            [(b"content-type", b"text/plain")],
            "post_echo_async",
            b"",
            None,
        ),
        (
            True,
            "POST",
            [(b"content-type", b"text/plain")],
            "post_echo_async",
            b"some raw text body",
            "",
        ),
        (
            True,
            "POST",
            [(b"content-type", b"application/json")],
            "post_echo_async",
            b'{"username":"xyz","password":"xyz"}',
            {"username": "xyz", "password": "xyz"},
        ),
        (
            True,
            "POST",
            [(b"content-type", b"application/xml")],
            "post_echo_async",
            b'',
            "",
        ),
        (
            True,
            "POST",
            [
                (b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"),
                (b"content-length", BODY_FORM_CONTENT_LENGTH),
            ],
            "post_echo_async",
            BODY_FORM,
            {"password": "hello123", "photo": "", "username": "Jane"},
        ),
        (
            False,
            "POST",
            [(b"content-type", b"text/plain")],
            "post_echo_async",
            b"",
            None,
        ),
        (
            False,
            "POST",
            [(b"content-type", b"text/plain")],
            "post_echo_async",
            b"some raw text body",
            "",
        ),
        (
            False,
            "POST",
            [(b"content-type", b"application/json")],
            "post_echo_async",
            b'{"username":"xyz","password":"xyz"}',
            {"username": "xyz", "password": "[Filtered]"},
        ),
        (
            False,
            "POST",
            [(b"content-type", b"application/xml")],
            "post_echo_async",
            b'',
            "",
        ),
        (
            False,
            "POST",
            [
                (b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"),
                (b"content-length", BODY_FORM_CONTENT_LENGTH),
            ],
            "post_echo_async",
            BODY_FORM,
            {"password": "[Filtered]", "photo": "", "username": "Jane"},
        ),
    ],
)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_asgi_request_body(
    sentry_init,
    capture_envelopes,
    application,
    send_default_pii,
    method,
    headers,
    url_name,
    body,
    expected_data,
):
    sentry_init(
        send_default_pii=send_default_pii,
        integrations=[
            DjangoIntegration(),
        ],
    )

    envelopes = capture_envelopes()

    comm = HttpCommunicator(
        application,
        method=method,
        headers=headers,
        path=reverse(url_name),
        body=body,
    )
    response = await comm.get_response()
    assert response["status"] == 200

    await comm.wait()
    assert response["body"] == body

    (envelope,) = envelopes
    event = envelope.get_event()

    if expected_data is not None:
        assert event["request"]["data"] == expected_data
    else:
        assert "data" not in event["request"]
sentry-python-1.39.2/tests/integrations/django/myapp/000077500000000000000000000000001454744723200227255ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/myapp/__init__.py000066400000000000000000000000001454744723200250240ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/myapp/asgi.py000066400000000000000000000007471454744723200242320ustar00rootroot00000000000000"""
ASGI entrypoint. Configures Django and then runs the application
defined in the ASGI_APPLICATION setting.
"""

import os
import django
from channels.routing import get_default_application

os.environ.setdefault(
    "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)

django.setup()
channels_application = get_default_application()

if django.VERSION >= (3, 0):
    from django.core.asgi import get_asgi_application

    asgi_application = get_asgi_application()
sentry-python-1.39.2/tests/integrations/django/myapp/custom_urls.py000066400000000000000000000017731454744723200256660ustar00rootroot00000000000000"""myapp URL Configuration

The `urlpatterns` list routes URLs to views. For more information please see:
    https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
    1. Add an import:  from my_app import views
    2. Add a URL to urlpatterns:  path('', views.home, name='home')
Class-based views
    1. Add an import:  from other_app.views import Home
    2. Add a URL to urlpatterns:  path('', Home.as_view(), name='home')
Including another URLconf
    1. Import the include() function: from django.urls import include, path
    2. Add a URL to urlpatterns:  path('blog/', include('blog.urls'))
"""
from __future__ import absolute_import

try:
    from django.urls import path
except ImportError:
    from django.conf.urls import url

    def path(path, *args, **kwargs):
        return url("^{}$".format(path), *args, **kwargs)


from . import views

urlpatterns = [
    path("custom/ok", views.custom_ok, name="custom_ok"),
    path("custom/exc", views.custom_exc, name="custom_exc"),
]
sentry-python-1.39.2/tests/integrations/django/myapp/manage.py000066400000000000000000000004341454744723200245300ustar00rootroot00000000000000#!/usr/bin/env python
import os
import sys

if __name__ == "__main__":
    os.environ.setdefault(
        "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
    )

    from django.core.management import execute_from_command_line

execute_from_command_line(sys.argv)
sentry-python-1.39.2/tests/integrations/django/myapp/management/000077500000000000000000000000001454744723200250415ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/myapp/management/__init__.py000066400000000000000000000000001454744723200271400ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/myapp/management/commands/000077500000000000000000000000001454744723200266425ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/myapp/management/commands/__init__.py000066400000000000000000000000001454744723200307410ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/myapp/management/commands/mycrash.py000066400000000000000000000002731454744723200306640ustar00rootroot00000000000000from django.core.management.base import BaseCommand


class Command(BaseCommand):
    def add_arguments(self, parser):
        pass

    def handle(self, *args, **options):
        1 / 0
sentry-python-1.39.2/tests/integrations/django/myapp/middleware.py000066400000000000000000000014201454744723200254110ustar00rootroot00000000000000import django

if django.VERSION >= (3, 1):
    import asyncio
    from django.utils.decorators import sync_and_async_middleware

    @sync_and_async_middleware
    def simple_middleware(get_response):
        if asyncio.iscoroutinefunction(get_response):

            async def middleware(request):
                response = await get_response(request)
                return response

        else:

            def middleware(request):
                response = get_response(request)
                return response

        return middleware


def custom_urlconf_middleware(get_response):
    def middleware(request):
        request.urlconf = "tests.integrations.django.myapp.custom_urls"
        response = get_response(request)
        return response

    return middleware
sentry-python-1.39.2/tests/integrations/django/myapp/routing.py000066400000000000000000000007561454744723200247760ustar00rootroot00000000000000import channels
from channels.routing import ProtocolTypeRouter

try:
    from channels.http import AsgiHandler

    if channels.__version__ < "3.0.0":
        django_asgi_app = AsgiHandler
    else:
        django_asgi_app = AsgiHandler()

except ModuleNotFoundError:
    # Since channels 4.0 ASGI handling is done by Django itself
    from django.core.asgi import get_asgi_application

    django_asgi_app = get_asgi_application()

application = ProtocolTypeRouter({"http": django_asgi_app})
sentry-python-1.39.2/tests/integrations/django/myapp/settings.py000066400000000000000000000120301454744723200251330ustar00rootroot00000000000000"""
Django settings for myapp project.

Generated by 'django-admin startproject' using Django 2.0.7.

For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/

For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""


# We shouldn't access settings while setting up integrations. Initialize SDK
# here to provoke any errors that might occur.
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration

sentry_sdk.init(integrations=[DjangoIntegration()])


import os

try:
    # Django >= 1.10
    from django.utils.deprecation import MiddlewareMixin
except ImportError:
    # Not required for Django <= 1.9, see:
    # https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
    MiddlewareMixin = object

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))


# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "u95e#xr$t3!vdux)fj11!*q*^w^^r#kiyrvt3kjui-t_k%m3op"

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

ALLOWED_HOSTS = ["localhost"]


# Application definition

INSTALLED_APPS = [
    "django.contrib.auth",
    "django.contrib.contenttypes",
    "django.contrib.sessions",
    "django.contrib.messages",
    "django.contrib.staticfiles",
    "tests.integrations.django.myapp",
]


class TestMiddleware(MiddlewareMixin):
    def process_request(self, request):
        # https://github.com/getsentry/sentry-python/issues/837 -- We should
        # not touch the resolver_match because apparently people rely on it.
        if request.resolver_match:
            assert not getattr(request.resolver_match.callback, "__wrapped__", None)

        if "middleware-exc" in request.path:
            1 / 0

    def process_response(self, request, response):
        return response


def TestFunctionMiddleware(get_response):  # noqa: N802
    def middleware(request):
        return get_response(request)

    return middleware


MIDDLEWARE_CLASSES = [
    "django.contrib.sessions.middleware.SessionMiddleware",
    "django.contrib.auth.middleware.AuthenticationMiddleware",
    "django.middleware.csrf.CsrfViewMiddleware",
    "tests.integrations.django.myapp.settings.TestMiddleware",
]

if MiddlewareMixin is not object:
    MIDDLEWARE = MIDDLEWARE_CLASSES + [
        "tests.integrations.django.myapp.settings.TestFunctionMiddleware"
    ]


ROOT_URLCONF = "tests.integrations.django.myapp.urls"

TEMPLATES = [
    {
        "BACKEND": "django.template.backends.django.DjangoTemplates",
        "DIRS": [],
        "APP_DIRS": True,
        "OPTIONS": {
            "debug": True,
            "context_processors": [
                "django.template.context_processors.debug",
                "django.template.context_processors.request",
                "django.contrib.auth.context_processors.auth",
                "django.contrib.messages.context_processors.messages",
            ],
        },
    }
]

WSGI_APPLICATION = "tests.integrations.django.myapp.wsgi.application"


# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases

DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}

try:
    import psycopg2  # noqa

    db_engine = "django.db.backends.postgresql"
    try:
        from django.db.backends import postgresql  # noqa: F401
    except ImportError:
        db_engine = "django.db.backends.postgresql_psycopg2"

    DATABASES["postgres"] = {
        "ENGINE": db_engine,
        "NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"],
        "USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"],
        "PASSWORD": os.environ["SENTRY_PYTHON_TEST_POSTGRES_PASSWORD"],
        "HOST": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost"),
        "PORT": 5432,
    }
except (ImportError, KeyError):
    from sentry_sdk.utils import logger

    logger.warn("No psycopg2 found, testing with SQLite.")


# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators

AUTH_PASSWORD_VALIDATORS = [
    {
        "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
    },
    {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
    {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
    {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]


# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/

LANGUAGE_CODE = "en-us"

TIME_ZONE = "UTC"

USE_I18N = True

USE_L10N = True

USE_TZ = False

TEMPLATE_DEBUG = True


# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/

STATIC_URL = "/static/"

# django-channels specific
ASGI_APPLICATION = "tests.integrations.django.myapp.routing.application"
sentry-python-1.39.2/tests/integrations/django/myapp/templates/000077500000000000000000000000001454744723200247235ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/django/myapp/templates/error.html000066400000000000000000000001131454744723200267350ustar00rootroot000000000000001
2
3
4
5
6
7
8
9
{% invalid template tag %}
11
12
13
14
15
16
17
18
19
20
sentry-python-1.39.2/tests/integrations/django/myapp/templates/trace_meta.html000066400000000000000000000000301454744723200277060ustar00rootroot00000000000000{{ sentry_trace_meta }}
sentry-python-1.39.2/tests/integrations/django/myapp/templates/user_name.html000066400000000000000000000000431454744723200275640ustar00rootroot00000000000000{{ request.user }}: {{ user_age }}
sentry-python-1.39.2/tests/integrations/django/myapp/urls.py000066400000000000000000000100531454744723200242630ustar00rootroot00000000000000"""myapp URL Configuration

The `urlpatterns` list routes URLs to views. For more information please see:
    https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
    1. Add an import:  from my_app import views
    2. Add a URL to urlpatterns:  path('', views.home, name='home')
Class-based views
    1. Add an import:  from other_app.views import Home
    2. Add a URL to urlpatterns:  path('', Home.as_view(), name='home')
Including another URLconf
    1. Import the include() function: from django.urls import include, path
    2. Add a URL to urlpatterns:  path('blog/', include('blog.urls'))
"""
from __future__ import absolute_import

try:
    from django.urls import path
except ImportError:
    from django.conf.urls import url

    def path(path, *args, **kwargs):
        return url("^{}$".format(path), *args, **kwargs)


from . import views

urlpatterns = [
    path("view-exc", views.view_exc, name="view_exc"),
    path("view-exc-with-msg", views.view_exc_with_msg, name="view_exc_with_msg"),
    path("cached-view", views.cached_view, name="cached_view"),
    path("not-cached-view", views.not_cached_view, name="not_cached_view"),
    path(
        "view-with-cached-template-fragment",
        views.view_with_cached_template_fragment,
        name="view_with_cached_template_fragment",
    ),
    path(
        "read-body-and-view-exc",
        views.read_body_and_view_exc,
        name="read_body_and_view_exc",
    ),
    path("middleware-exc", views.message, name="middleware_exc"),
    path("message", views.message, name="message"),
    path("mylogin", views.mylogin, name="mylogin"),
    path("classbased", views.ClassBasedView.as_view(), name="classbased"),
    path("sentryclass", views.SentryClassBasedView(), name="sentryclass"),
    path(
        "sentryclass-csrf",
        views.SentryClassBasedViewWithCsrf(),
        name="sentryclass_csrf",
    ),
    path("post-echo", views.post_echo, name="post_echo"),
    path("template-exc", views.template_exc, name="template_exc"),
    path("template-test", views.template_test, name="template_test"),
    path("template-test2", views.template_test2, name="template_test2"),
    path("template-test3", views.template_test3, name="template_test3"),
    path("postgres-select", views.postgres_select, name="postgres_select"),
    path("postgres-select-slow", views.postgres_select_orm, name="postgres_select_orm"),
    path(
        "permission-denied-exc",
        views.permission_denied_exc,
        name="permission_denied_exc",
    ),
    path(
        "csrf-hello-not-exempt",
        views.csrf_hello_not_exempt,
        name="csrf_hello_not_exempt",
    ),
    path("sync/thread_ids", views.thread_ids_sync, name="thread_ids_sync"),
]

# async views
if views.async_message is not None:
    urlpatterns.append(path("async_message", views.async_message, name="async_message"))

if views.my_async_view is not None:
    urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view"))

if views.thread_ids_async is not None:
    urlpatterns.append(
        path("async/thread_ids", views.thread_ids_async, name="thread_ids_async")
    )

if views.post_echo_async is not None:
    urlpatterns.append(
        path("post_echo_async", views.post_echo_async, name="post_echo_async")
    )

# rest framework
try:
    urlpatterns.append(
        path("rest-framework-exc", views.rest_framework_exc, name="rest_framework_exc")
    )
    urlpatterns.append(
        path(
            "rest-framework-read-body-and-exc",
            views.rest_framework_read_body_and_exc,
            name="rest_framework_read_body_and_exc",
        )
    )
    urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
    urlpatterns.append(
        path("rest-json-response", views.rest_json_response, name="rest_json_response")
    )
    urlpatterns.append(
        path(
            "rest-permission-denied-exc",
            views.rest_permission_denied_exc,
            name="rest_permission_denied_exc",
        )
    )
except AttributeError:
    pass

handler500 = views.handler500
handler404 = views.handler404
sentry-python-1.39.2/tests/integrations/django/myapp/views.py000066400000000000000000000136271454744723200244450ustar00rootroot00000000000000import json
import threading

from django import VERSION
from django.contrib.auth import login
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError
from django.shortcuts import render
from django.template import Context, Template
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import ListView

try:
    from rest_framework.decorators import api_view
    from rest_framework.response import Response

    @api_view(["POST"])
    def rest_framework_exc(request):
        1 / 0

    @api_view(["POST"])
    def rest_framework_read_body_and_exc(request):
        request.data
        1 / 0

    @api_view(["GET"])
    def rest_hello(request):
        return HttpResponse("ok")

    @api_view(["GET"])
    def rest_permission_denied_exc(request):
        raise PermissionDenied("bye")

    @api_view(["GET"])
    def rest_json_response(request):
        return Response(dict(ok=True))

except ImportError:
    pass


import sentry_sdk
from sentry_sdk import capture_message


@csrf_exempt
def view_exc(request):
    1 / 0


@csrf_exempt
def view_exc_with_msg(request):
    capture_message("oops")
    1 / 0


@cache_page(60)
def cached_view(request):
    return HttpResponse("ok")


def not_cached_view(request):
    return HttpResponse("ok")


def view_with_cached_template_fragment(request):
    template = Template(
        """{% load cache %}
        Not cached content goes here.
        {% cache 500 some_identifier %}
            And here some cached content.
        {% endcache %}
        """
    )
    rendered = template.render(Context({}))
    return HttpResponse(rendered)


# This is a "class based view" as previously found in the sentry codebase. The
# interesting property of this one is that csrf_exempt, as a class attribute,
# is not in __dict__, so regular use of functools.wraps will not forward the
# attribute.
class SentryClassBasedView(object):
    csrf_exempt = True

    def __call__(self, request):
        return HttpResponse("ok")


class SentryClassBasedViewWithCsrf(object):
    def __call__(self, request):
        return HttpResponse("ok")


@csrf_exempt
def read_body_and_view_exc(request):
    request.read()
    1 / 0


@csrf_exempt
def message(request):
    sentry_sdk.capture_message("hi")
    return HttpResponse("ok")


@csrf_exempt
def mylogin(request):
    user = User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
    user.backend = "django.contrib.auth.backends.ModelBackend"
    login(request, user)
    return HttpResponse("ok")


@csrf_exempt
def handler500(request):
    return HttpResponseServerError("Sentry error: %s" % sentry_sdk.last_event_id())


class ClassBasedView(ListView):
    model = None

    @method_decorator(csrf_exempt)
    def dispatch(self, request, *args, **kwargs):
        return super(ClassBasedView, self).dispatch(request, *args, **kwargs)

    def head(self, *args, **kwargs):
        sentry_sdk.capture_message("hi")
        return HttpResponse("")

    def post(self, *args, **kwargs):
        return HttpResponse("ok")


@csrf_exempt
def post_echo(request):
    sentry_sdk.capture_message("hi")
    return HttpResponse(request.body)


@csrf_exempt
def handler404(*args, **kwargs):
    sentry_sdk.capture_message("not found", level="error")
    return HttpResponseNotFound("404")


@csrf_exempt
def template_exc(request, *args, **kwargs):
    return render(request, "error.html")


@csrf_exempt
def template_test(request, *args, **kwargs):
    return render(request, "user_name.html", {"user_age": 20})


@csrf_exempt
def custom_ok(request, *args, **kwargs):
    return HttpResponse("custom ok")


@csrf_exempt
def custom_exc(request, *args, **kwargs):
    1 / 0


@csrf_exempt
def template_test2(request, *args, **kwargs):
    return TemplateResponse(
        request, ("user_name.html", "another_template.html"), {"user_age": 25}
    )


@csrf_exempt
def template_test3(request, *args, **kwargs):
    from sentry_sdk import Hub

    hub = Hub.current
    capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
    return render(request, "trace_meta.html", {})


@csrf_exempt
def postgres_select(request, *args, **kwargs):
    from django.db import connections

    cursor = connections["postgres"].cursor()
    cursor.execute("SELECT 1;")
    return HttpResponse("ok")


@csrf_exempt
def postgres_select_orm(request, *args, **kwargs):
    user = User.objects.using("postgres").all().first()
    return HttpResponse("ok {}".format(user))


@csrf_exempt
def permission_denied_exc(*args, **kwargs):
    raise PermissionDenied("bye")


def csrf_hello_not_exempt(*args, **kwargs):
    return HttpResponse("ok")


def thread_ids_sync(*args, **kwargs):
    response = json.dumps(
        {
            "main": threading.main_thread().ident,
            "active": threading.current_thread().ident,
        }
    )
    return HttpResponse(response)


if VERSION >= (3, 1):
    # Use exec to produce valid Python 2
    exec(
        """async def async_message(request):
    sentry_sdk.capture_message("hi")
    return HttpResponse("ok")"""
    )

    exec(
        """async def my_async_view(request):
    import asyncio
    await asyncio.sleep(1)
    return HttpResponse('Hello World')"""
    )

    exec(
        """async def thread_ids_async(request):
    response = json.dumps({
        "main": threading.main_thread().ident,
        "active": threading.current_thread().ident,
    })
    return HttpResponse(response)"""
    )

    exec(
        """async def post_echo_async(request):
    sentry_sdk.capture_message("hi")
    return HttpResponse(request.body)
post_echo_async.csrf_exempt = True"""
    )
else:
    async_message = None
    my_async_view = None
    thread_ids_async = None
    post_echo_async = None
sentry-python-1.39.2/tests/integrations/django/myapp/wsgi.py000066400000000000000000000006431454744723200242530ustar00rootroot00000000000000"""
WSGI config for myapp project.

It exposes the WSGI callable as a module-level variable named ``application``.

For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""

import os

from django.core.wsgi import get_wsgi_application

os.environ.setdefault(
    "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)

application = get_wsgi_application()
sentry-python-1.39.2/tests/integrations/django/test_basic.py000066400000000000000000001242111454744723200242720ustar00rootroot00000000000000from __future__ import absolute_import

import json
import os
import random
import re
import pytest
from functools import partial

from werkzeug.test import Client

from django import VERSION as DJANGO_VERSION
from django.contrib.auth.models import User
from django.core.management import execute_from_command_line
from django.db.utils import OperationalError, ProgrammingError, DataError

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse

from sentry_sdk._compat import PY2, PY310
from sentry_sdk import capture_message, capture_exception, configure_scope
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.django import DjangoIntegration, _set_db_data
from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
from sentry_sdk.integrations.django.caching import _get_span_description
from sentry_sdk.integrations.executing import ExecutingIntegration
from sentry_sdk.tracing import Span
from tests.conftest import unpack_werkzeug_response
from tests.integrations.django.myapp.wsgi import application
from tests.integrations.django.utils import pytest_mark_django_db_decorator

DJANGO_VERSION = DJANGO_VERSION[:2]


@pytest.fixture
def client():
    return Client(application)


@pytest.fixture
def use_django_caching(settings):
    settings.CACHES = {
        "default": {
            "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
            "LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
        }
    }


@pytest.fixture
def use_django_caching_with_middlewares(settings):
    settings.CACHES = {
        "default": {
            "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
            "LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
        }
    }
    if hasattr(settings, "MIDDLEWARE"):
        middleware = settings.MIDDLEWARE
    elif hasattr(settings, "MIDDLEWARE_CLASSES"):
        middleware = settings.MIDDLEWARE_CLASSES
    else:
        middleware = None

    if middleware is not None:
        middleware.insert(0, "django.middleware.cache.UpdateCacheMiddleware")
        middleware.append("django.middleware.cache.FetchFromCacheMiddleware")


def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    events = capture_events()
    client.get(reverse("view_exc"))

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "django"


def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django(
    sentry_init, client, capture_exceptions, capture_events, settings
):
    """
    Test that ensures if django settings.USE_X_FORWARDED_HOST is set to True
    then the SDK sets the request url to the `HTTP_X_FORWARDED_FOR`
    """
    settings.USE_X_FORWARDED_HOST = True

    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    events = capture_events()
    client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    assert event["request"]["url"] == "http://example.com/view-exc"


def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django(
    sentry_init, client, capture_exceptions, capture_events
):
    """
    Test that ensures if django settings.USE_X_FORWARDED_HOST is set to False
    then the SDK sets the request url to the `HTTP_POST`
    """
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    events = capture_events()
    client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    assert event["request"]["url"] == "http://localhost/view-exc"


def test_middleware_exceptions(sentry_init, client, capture_exceptions):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    client.get(reverse("middleware_exc"))

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)


def test_request_captured(sentry_init, client, capture_events):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    events = capture_events()
    content, status, headers = unpack_werkzeug_response(client.get(reverse("message")))

    assert content == b"ok"

    (event,) = events
    assert event["transaction"] == "/message"
    assert event["request"] == {
        "cookies": {},
        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
        "headers": {"Host": "localhost"},
        "method": "GET",
        "query_string": "",
        "url": "http://localhost/message",
    }


def test_transaction_with_class_view(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration(transaction_style="function_name")],
        send_default_pii=True,
    )
    events = capture_events()
    content, status, headers = unpack_werkzeug_response(
        client.head(reverse("classbased"))
    )
    assert status.lower() == "200 ok"

    (event,) = events

    assert (
        event["transaction"] == "tests.integrations.django.myapp.views.ClassBasedView"
    )
    assert event["message"] == "hi"


def test_has_trace_if_performance_enabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()
    client.head(reverse("view_exc_with_msg"))

    (msg_event, error_event, transaction_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_has_trace_if_performance_disabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
    )
    events = capture_events()
    client.head(reverse("view_exc_with_msg"))

    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


def test_trace_from_headers_if_performance_enabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    client.head(
        reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
    )

    (msg_event, error_event, transaction_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


def test_trace_from_headers_if_performance_disabled(
    sentry_init, client, capture_events
):
    sentry_init(
        integrations=[DjangoIntegration()],
    )

    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    client.head(
        reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
    )

    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.forked
@pytest.mark.django_db
def test_user_captured(sentry_init, client, capture_events):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    events = capture_events()
    content, status, headers = unpack_werkzeug_response(client.get(reverse("mylogin")))
    assert content == b"ok"

    assert not events

    content, status, headers = unpack_werkzeug_response(client.get(reverse("message")))
    assert content == b"ok"

    (event,) = events

    assert event["user"] == {
        "email": "lennon@thebeatles.com",
        "username": "john",
        "id": "1",
    }


@pytest.mark.forked
@pytest.mark.django_db
def test_queryset_repr(sentry_init, capture_events):
    sentry_init(integrations=[DjangoIntegration()])
    events = capture_events()
    User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")

    try:
        my_queryset = User.objects.all()  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    (frame,) = exception["stacktrace"]["frames"]
    assert frame["vars"]["my_queryset"].startswith(
        "\n',
        rendered_meta,
    )
    assert match is not None
    assert match.group(1) == traceparent

    # Python 2 does not preserve sort order
    rendered_baggage = match.group(2)
    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))


@pytest.mark.parametrize("with_executing_integration", [[], [ExecutingIntegration()]])
def test_template_exception(
    sentry_init, client, capture_events, with_executing_integration
):
    sentry_init(integrations=[DjangoIntegration()] + with_executing_integration)
    events = capture_events()

    content, status, headers = unpack_werkzeug_response(
        client.get(reverse("template_exc"))
    )
    assert status.lower() == "500 internal server error"

    (event,) = events
    exception = event["exception"]["values"][-1]
    assert exception["type"] == "TemplateSyntaxError"

    frames = [
        f
        for f in exception["stacktrace"]["frames"]
        if not f["filename"].startswith("django/")
    ]
    view_frame, template_frame = frames[-2:]

    assert template_frame["context_line"] == "{% invalid template tag %}\n"
    assert template_frame["pre_context"] == ["5\n", "6\n", "7\n", "8\n", "9\n"]

    assert template_frame["post_context"] == ["11\n", "12\n", "13\n", "14\n", "15\n"]
    assert template_frame["lineno"] == 10
    assert template_frame["filename"].endswith("error.html")

    filenames = [
        (f.get("function"), f.get("module")) for f in exception["stacktrace"]["frames"]
    ]

    if with_executing_integration:
        assert filenames[-3:] == [
            ("Parser.parse", "django.template.base"),
            (None, None),
            ("Parser.invalid_block_tag", "django.template.base"),
        ]
    else:
        assert filenames[-3:] == [
            ("parse", "django.template.base"),
            (None, None),
            ("invalid_block_tag", "django.template.base"),
        ]


@pytest.mark.parametrize(
    "route", ["rest_framework_exc", "rest_framework_read_body_and_exc"]
)
@pytest.mark.parametrize(
    "ct,body",
    [
        ["application/json", {"foo": "bar"}],
        ["application/json", 1],
        ["application/json", "foo"],
        ["application/x-www-form-urlencoded", {"foo": "bar"}],
    ],
)
def test_rest_framework_basic(
    sentry_init, client, capture_events, capture_exceptions, ct, body, route
):
    pytest.importorskip("rest_framework")
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    events = capture_events()

    if ct == "application/json":
        client.post(
            reverse(route), data=json.dumps(body), content_type="application/json"
        )
    elif ct == "application/x-www-form-urlencoded":
        client.post(reverse(route), data=body)
    else:
        raise AssertionError("unreachable")

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "django"

    assert event["request"]["data"] == body
    assert event["request"]["headers"]["Content-Type"] == ct


@pytest.mark.parametrize(
    "endpoint", ["rest_permission_denied_exc", "permission_denied_exc"]
)
def test_does_not_capture_403(sentry_init, client, capture_events, endpoint):
    if endpoint == "rest_permission_denied_exc":
        pytest.importorskip("rest_framework")

    sentry_init(integrations=[DjangoIntegration()])
    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse(endpoint)))
    assert status.lower() == "403 forbidden"

    assert not events


def test_render_spans(sentry_init, client, capture_events, render_span_tree):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )
    views_tests = [
        (
            reverse("template_test2"),
            '- op="template.render": description="[user_name.html, ...]"',
        ),
    ]
    if DJANGO_VERSION >= (1, 7):
        views_tests.append(
            (
                reverse("template_test"),
                '- op="template.render": description="user_name.html"',
            ),
        )

    for url, expected_line in views_tests:
        events = capture_events()
        client.get(url)
        transaction = events[0]
        assert expected_line in render_span_tree(transaction)


if DJANGO_VERSION >= (1, 10):
    EXPECTED_MIDDLEWARE_SPANS = """\
- op="http.server": description=null
  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
          - op="middleware.django": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
            - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
            - op="view.render": description="message"\
"""
else:
    EXPECTED_MIDDLEWARE_SPANS = """\
- op="http.server": description=null
  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
  - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
  - op="view.render": description="message"
  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
"""


def test_middleware_spans(sentry_init, client, capture_events, render_span_tree):
    sentry_init(
        integrations=[
            DjangoIntegration(signals_spans=False),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("message"))

    message, transaction = events

    assert message["message"] == "hi"
    assert render_span_tree(transaction) == EXPECTED_MIDDLEWARE_SPANS


def test_middleware_spans_disabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[
            DjangoIntegration(middleware_spans=False, signals_spans=False),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("message"))

    message, transaction = events

    assert message["message"] == "hi"
    assert not len(transaction["spans"])


EXPECTED_SIGNALS_SPANS = """\
- op="http.server": description=null
  - op="event.django": description="django.db.reset_queries"
  - op="event.django": description="django.db.close_old_connections"\
"""


def test_signals_spans(sentry_init, client, capture_events, render_span_tree):
    sentry_init(
        integrations=[
            DjangoIntegration(middleware_spans=False),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("message"))

    message, transaction = events

    assert message["message"] == "hi"
    assert render_span_tree(transaction) == EXPECTED_SIGNALS_SPANS

    assert transaction["spans"][0]["op"] == "event.django"
    assert transaction["spans"][0]["description"] == "django.db.reset_queries"

    assert transaction["spans"][1]["op"] == "event.django"
    assert transaction["spans"][1]["description"] == "django.db.close_old_connections"


def test_signals_spans_disabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[
            DjangoIntegration(middleware_spans=False, signals_spans=False),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("message"))

    message, transaction = events

    assert message["message"] == "hi"
    assert not transaction["spans"]


def test_csrf(sentry_init, client):
    """
    Assert that CSRF view decorator works even with the view wrapped in our own
    callable.
    """

    sentry_init(integrations=[DjangoIntegration()])

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("csrf_hello_not_exempt"))
    )
    assert status.lower() == "403 forbidden"

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("sentryclass_csrf"))
    )
    assert status.lower() == "403 forbidden"

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("sentryclass"))
    )
    assert status.lower() == "200 ok"
    assert content == b"ok"

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("classbased"))
    )
    assert status.lower() == "200 ok"
    assert content == b"ok"

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("message"))
    )
    assert status.lower() == "200 ok"
    assert content == b"ok"


@pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0")
def test_custom_urlconf_middleware(
    settings, sentry_init, client, capture_events, render_span_tree
):
    """
    Some middlewares (for instance in django-tenants) overwrite request.urlconf.
    Test that the resolver picks up the correct urlconf for transaction naming.
    """
    urlconf = "tests.integrations.django.myapp.middleware.custom_urlconf_middleware"
    settings.ROOT_URLCONF = ""
    settings.MIDDLEWARE.insert(0, urlconf)
    client.application.load_middleware()

    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    content, status, _headers = unpack_werkzeug_response(client.get("/custom/ok"))
    assert status.lower() == "200 ok"
    assert content == b"custom ok"

    event = events.pop(0)
    assert event["transaction"] == "/custom/ok"
    assert "custom_urlconf_middleware" in render_span_tree(event)

    _content, status, _headers = unpack_werkzeug_response(client.get("/custom/exc"))
    assert status.lower() == "500 internal server error"

    error_event, transaction_event = events
    assert error_event["transaction"] == "/custom/exc"
    assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "django"
    assert transaction_event["transaction"] == "/custom/exc"
    assert "custom_urlconf_middleware" in render_span_tree(transaction_event)

    settings.MIDDLEWARE.pop(0)


def test_get_receiver_name():
    def dummy(a, b):
        return a + b

    name = _get_receiver_name(dummy)

    if PY2:
        assert name == "tests.integrations.django.test_basic.dummy"
    else:
        assert (
            name
            == "tests.integrations.django.test_basic.test_get_receiver_name..dummy"
        )

    a_partial = partial(dummy)
    name = _get_receiver_name(a_partial)
    if PY310:
        assert name == "functools.partial()"
    else:
        assert name == "partial()"


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_middleware(
    sentry_init, client, capture_events, use_django_caching_with_middlewares
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=False,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("not_cached_view"))
    client.get(reverse("not_cached_view"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 0
    assert len(second_event["spans"]) == 0


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_decorator(
    sentry_init, client, capture_events, use_django_caching
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=False,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))
    client.get(reverse("cached_view"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 0
    assert len(second_event["spans"]) == 0


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_templatetag(
    sentry_init, client, capture_events, use_django_caching
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=False,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("view_with_cached_template_fragment"))
    client.get(reverse("view_with_cached_template_fragment"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 0
    assert len(second_event["spans"]) == 0


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_middleware(
    sentry_init, client, capture_events, use_django_caching_with_middlewares
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )

    client.application.load_middleware()
    events = capture_events()

    client.get(reverse("not_cached_view"))
    client.get(reverse("not_cached_view"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 1
    assert first_event["spans"][0]["op"] == "cache.get_item"
    assert first_event["spans"][0]["description"].startswith(
        "get views.decorators.cache.cache_header."
    )
    assert first_event["spans"][0]["data"] == {"cache.hit": False}

    assert len(second_event["spans"]) == 2
    assert second_event["spans"][0]["op"] == "cache.get_item"
    assert second_event["spans"][0]["description"].startswith(
        "get views.decorators.cache.cache_header."
    )
    assert second_event["spans"][0]["data"] == {"cache.hit": False}

    assert second_event["spans"][1]["op"] == "cache.get_item"
    assert second_event["spans"][1]["description"].startswith(
        "get views.decorators.cache.cache_page."
    )
    assert second_event["spans"][1]["data"]["cache.hit"]
    assert "cache.item_size" in second_event["spans"][1]["data"]


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_decorator(sentry_init, client, capture_events, use_django_caching):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))
    client.get(reverse("cached_view"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 1
    assert first_event["spans"][0]["op"] == "cache.get_item"
    assert first_event["spans"][0]["description"].startswith(
        "get views.decorators.cache.cache_header."
    )
    assert first_event["spans"][0]["data"] == {"cache.hit": False}

    assert len(second_event["spans"]) == 2
    assert second_event["spans"][0]["op"] == "cache.get_item"
    assert second_event["spans"][0]["description"].startswith(
        "get views.decorators.cache.cache_header."
    )
    assert second_event["spans"][0]["data"] == {"cache.hit": False}

    assert second_event["spans"][1]["op"] == "cache.get_item"
    assert second_event["spans"][1]["description"].startswith(
        "get views.decorators.cache.cache_page."
    )
    assert second_event["spans"][1]["data"]["cache.hit"]
    assert "cache.item_size" in second_event["spans"][1]["data"]


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_templatetag(
    sentry_init, client, capture_events, use_django_caching
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("view_with_cached_template_fragment"))
    client.get(reverse("view_with_cached_template_fragment"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 1
    assert first_event["spans"][0]["op"] == "cache.get_item"
    assert first_event["spans"][0]["description"].startswith(
        "get template.cache.some_identifier."
    )
    assert first_event["spans"][0]["data"] == {"cache.hit": False}

    assert len(second_event["spans"]) == 1
    assert second_event["spans"][0]["op"] == "cache.get_item"
    assert second_event["spans"][0]["description"].startswith(
        "get template.cache.some_identifier."
    )
    assert second_event["spans"][0]["data"]["cache.hit"]
    assert "cache.item_size" in second_event["spans"][0]["data"]


@pytest.mark.parametrize(
    "method_name, args, kwargs, expected_description",
    [
        ("get", None, None, "get "),
        ("get", [], {}, "get "),
        ("get", ["bla", "blub", "foo"], {}, "get bla"),
        (
            "get_many",
            [["bla 1", "bla 2", "bla 3"], "blub", "foo"],
            {},
            "get_many ['bla 1', 'bla 2', 'bla 3']",
        ),
        (
            "get_many",
            [["bla 1", "bla 2", "bla 3"], "blub", "foo"],
            {"key": "bar"},
            "get_many ['bla 1', 'bla 2', 'bla 3']",
        ),
        ("get", [], {"key": "bar"}, "get bar"),
        (
            "get",
            "something",
            {},
            "get s",
        ),  # this should never happen, just making sure that we are not raising an exception in that case.
    ],
)
def test_cache_spans_get_span_description(
    method_name, args, kwargs, expected_description
):
    assert _get_span_description(method_name, args, kwargs) == expected_description
sentry-python-1.39.2/tests/integrations/django/test_data_scrubbing.py000066400000000000000000000046101454744723200261600ustar00rootroot00000000000000import pytest

from werkzeug.test import Client

from sentry_sdk.integrations.django import DjangoIntegration
from tests.conftest import werkzeug_set_cookie
from tests.integrations.django.myapp.wsgi import application
from tests.integrations.django.utils import pytest_mark_django_db_decorator

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse


@pytest.fixture
def client():
    return Client(application)


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_session_cookies_removed(
    sentry_init,
    client,
    capture_events,
):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=False)
    events = capture_events()
    werkzeug_set_cookie(client, "localhost", "sessionid", "123")
    werkzeug_set_cookie(client, "localhost", "csrftoken", "456")
    werkzeug_set_cookie(client, "localhost", "foo", "bar")
    client.get(reverse("view_exc"))

    (event,) = events
    assert "cookies" not in event["request"]


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_session_cookies_filtered(
    sentry_init,
    client,
    capture_events,
):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    events = capture_events()
    werkzeug_set_cookie(client, "localhost", "sessionid", "123")
    werkzeug_set_cookie(client, "localhost", "csrftoken", "456")
    werkzeug_set_cookie(client, "localhost", "foo", "bar")
    client.get(reverse("view_exc"))

    (event,) = events
    assert event["request"]["cookies"] == {
        "sessionid": "[Filtered]",
        "csrftoken": "[Filtered]",
        "foo": "bar",
    }


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_custom_session_cookies_filtered(
    sentry_init,
    client,
    capture_events,
    settings,
):
    settings.SESSION_COOKIE_NAME = "my_sess"
    settings.CSRF_COOKIE_NAME = "csrf_secret"

    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    events = capture_events()
    werkzeug_set_cookie(client, "localhost", "my_sess", "123")
    werkzeug_set_cookie(client, "localhost", "csrf_secret", "456")
    werkzeug_set_cookie(client, "localhost", "foo", "bar")
    client.get(reverse("view_exc"))

    (event,) = events
    assert event["request"]["cookies"] == {
        "my_sess": "[Filtered]",
        "csrf_secret": "[Filtered]",
        "foo": "bar",
    }
sentry-python-1.39.2/tests/integrations/django/test_db_query_data.py000066400000000000000000000170451454744723200260220ustar00rootroot00000000000000from __future__ import absolute_import

import os
import pytest

from django import VERSION as DJANGO_VERSION
from django.db import connections

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse

from werkzeug.test import Client

from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.django import DjangoIntegration

from tests.conftest import unpack_werkzeug_response
from tests.integrations.django.utils import pytest_mark_django_db_decorator
from tests.integrations.django.myapp.wsgi import application


@pytest.fixture
def client():
    return Client(application)


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
@pytest.mark.parametrize("enable_db_query_source", [None, False])
def test_query_source_disabled(
    sentry_init, client, capture_events, enable_db_query_source
):
    sentry_options = {
        "integrations": [DjangoIntegration()],
        "send_default_pii": True,
        "traces_sample_rate": 1.0,
    }
    if enable_db_query_source is not None:
        sentry_options["enable_db_query_source"] = enable_db_query_source
        sentry_options["db_query_source_threshold_ms"] = 0

    sentry_init(**sentry_options)

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO not in data
            assert SPANDATA.CODE_NAMESPACE not in data
            assert SPANDATA.CODE_FILEPATH not in data
            assert SPANDATA.CODE_FUNCTION not in data
            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0

            assert (
                data.get(SPANDATA.CODE_NAMESPACE)
                == "tests.integrations.django.myapp.views"
            )
            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                "tests/integrations/django/myapp/views.py"
            )

            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
            assert is_relative_path

            assert data.get(SPANDATA.CODE_FUNCTION) == "postgres_select_orm"

            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_with_in_app_exclude(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
        in_app_exclude=["tests.integrations.django.myapp.views"],
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0

            if DJANGO_VERSION >= (1, 11):
                assert (
                    data.get(SPANDATA.CODE_NAMESPACE)
                    == "tests.integrations.django.myapp.settings"
                )
                assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                    "tests/integrations/django/myapp/settings.py"
                )
                assert data.get(SPANDATA.CODE_FUNCTION) == "middleware"
            else:
                assert (
                    data.get(SPANDATA.CODE_NAMESPACE)
                    == "tests.integrations.django.test_db_query_data"
                )
                assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                    "tests/integrations/django/test_db_query_data.py"
                )
                assert (
                    data.get(SPANDATA.CODE_FUNCTION)
                    == "test_query_source_with_in_app_exclude"
                )

            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_with_in_app_include(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
        in_app_include=["django"],
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0

            assert data.get(SPANDATA.CODE_NAMESPACE) == "django.db.models.sql.compiler"
            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                "django/db/models/sql/compiler.py"
            )
            assert data.get(SPANDATA.CODE_FUNCTION) == "execute_sql"
            break
    else:
        raise AssertionError("No db span found")
sentry-python-1.39.2/tests/integrations/django/test_transactions.py000066400000000000000000000077551454744723200257360ustar00rootroot00000000000000from __future__ import absolute_import

import pytest
import django

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


# django<2.0 has only `url` with regex based patterns.
# django>=2.0 renames `url` to `re_path`, and additionally introduces `path`
# for new style URL patterns, e.g. .
if django.VERSION >= (2, 0):
    from django.urls import path, re_path
    from django.urls.converters import PathConverter
    from django.conf.urls import include
else:
    from django.conf.urls import url as re_path, include

if django.VERSION < (1, 9):
    included_url_conf = (re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", ""
else:
    included_url_conf = ((re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "")

from sentry_sdk.integrations.django.transactions import RavenResolver


example_url_conf = (
    re_path(r"^api/(?P[\w_-]+)/store/$", lambda x: ""),
    re_path(r"^api/(?P(v1|v2))/author/$", lambda x: ""),
    re_path(
        r"^api/(?P[^\/]+)/product/(?P(?:\d+|[A-Fa-f0-9-]{32,36}))/$",
        lambda x: "",
    ),
    re_path(r"^report/", lambda x: ""),
    re_path(r"^example/", include(included_url_conf)),
)


def test_resolver_no_match():
    resolver = RavenResolver()
    result = resolver.resolve("/foo/bar", example_url_conf)
    assert result is None


def test_resolver_re_path_complex_match():
    resolver = RavenResolver()
    result = resolver.resolve("/api/1234/store/", example_url_conf)
    assert result == "/api/{project_id}/store/"


def test_resolver_re_path_complex_either_match():
    resolver = RavenResolver()
    result = resolver.resolve("/api/v1/author/", example_url_conf)
    assert result == "/api/{version}/author/"
    result = resolver.resolve("/api/v2/author/", example_url_conf)
    assert result == "/api/{version}/author/"


def test_resolver_re_path_included_match():
    resolver = RavenResolver()
    result = resolver.resolve("/example/foo/bar/baz", example_url_conf)
    assert result == "/example/foo/bar/{param}"


def test_resolver_re_path_multiple_groups():
    resolver = RavenResolver()
    result = resolver.resolve(
        "/api/myproject/product/cb4ef1caf3554c34ae134f3c1b3d605f/", example_url_conf
    )
    assert result == "/api/{project_id}/product/{pid}/"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for  patterns",
)
def test_resolver_path_group():
    url_conf = (path("api/v2//store/", lambda x: ""),)
    resolver = RavenResolver()
    result = resolver.resolve("/api/v2/1234/store/", url_conf)
    assert result == "/api/v2/{project_id}/store/"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for  patterns",
)
def test_resolver_path_multiple_groups():
    url_conf = (path("api/v2//product/", lambda x: ""),)
    resolver = RavenResolver()
    result = resolver.resolve("/api/v2/myproject/product/5689", url_conf)
    assert result == "/api/v2/{project_id}/product/{pid}"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for  patterns",
)
def test_resolver_path_complex_path():
    class CustomPathConverter(PathConverter):
        regex = r"[^/]+(/[^/]+){0,2}"

    with mock.patch(
        "django.urls.resolvers.get_converter", return_value=CustomPathConverter
    ):
        url_conf = (path("api/v3/", lambda x: ""),)
        resolver = RavenResolver()
        result = resolver.resolve("/api/v3/abc/def/ghi", url_conf)
        assert result == "/api/v3/{my_path}"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for  patterns",
)
def test_resolver_path_no_converter():
    url_conf = (path("api/v4/", lambda x: ""),)
    resolver = RavenResolver()
    result = resolver.resolve("/api/v4/myproject", url_conf)
    assert result == "/api/v4/{project_id}"
sentry-python-1.39.2/tests/integrations/django/utils.py000066400000000000000000000013311454744723200233070ustar00rootroot00000000000000from functools import partial

import pytest
import pytest_django


# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
# requires explicit database allow from failing the test
pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
try:
    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
    if pytest_version > (4, 2, 0):
        pytest_mark_django_db_decorator = partial(
            pytest.mark.django_db, databases="__all__"
        )
except ValueError:
    if "dev" in pytest_django.__version__:
        pytest_mark_django_db_decorator = partial(
            pytest.mark.django_db, databases="__all__"
        )
except AttributeError:
    pass
sentry-python-1.39.2/tests/integrations/excepthook/000077500000000000000000000000001454744723200225065ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/excepthook/test_excepthook.py000066400000000000000000000033051454744723200262710ustar00rootroot00000000000000import pytest
import sys
import subprocess

from textwrap import dedent


def test_excepthook(tmpdir):
    app = tmpdir.join("app.py")
    app.write(
        dedent(
            """
    from sentry_sdk import init, transport

    def send_event(self, event):
        print("capture event was called")
        print(event)

    transport.HttpTransport._send_event = send_event

    init("http://foobar@localhost/123")

    frame_value = "LOL"

    1/0
    """
        )
    )

    with pytest.raises(subprocess.CalledProcessError) as excinfo:
        subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)

    output = excinfo.value.output
    print(output)

    assert b"ZeroDivisionError" in output
    assert b"LOL" in output
    assert b"capture event was called" in output


def test_always_value_excepthook(tmpdir):
    app = tmpdir.join("app.py")
    app.write(
        dedent(
            """
    import sys
    from sentry_sdk import init, transport
    from sentry_sdk.integrations.excepthook import ExcepthookIntegration

    def send_event(self, event):
        print("capture event was called")
        print(event)

    transport.HttpTransport._send_event = send_event

    sys.ps1 = "always_value_test"
    init("http://foobar@localhost/123",
        integrations=[ExcepthookIntegration(always_run=True)]
    )

    frame_value = "LOL"

    1/0
    """
        )
    )

    with pytest.raises(subprocess.CalledProcessError) as excinfo:
        subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)

    output = excinfo.value.output
    print(output)

    assert b"ZeroDivisionError" in output
    assert b"LOL" in output
    assert b"capture event was called" in output
sentry-python-1.39.2/tests/integrations/falcon/000077500000000000000000000000001454744723200215775ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/falcon/__init__.py000066400000000000000000000000551454744723200237100ustar00rootroot00000000000000import pytest

pytest.importorskip("falcon")
sentry-python-1.39.2/tests/integrations/falcon/test_falcon.py000066400000000000000000000302231454744723200244520ustar00rootroot00000000000000from __future__ import absolute_import

import logging

import pytest

import falcon
import falcon.testing
import sentry_sdk
from sentry_sdk.integrations.falcon import FalconIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.utils import parse_version


try:
    import falcon.asgi
except ImportError:
    pass
else:
    import falcon.inspect  # We only need this module for the ASGI test


FALCON_VERSION = parse_version(falcon.__version__)


@pytest.fixture
def make_app(sentry_init):
    def inner():
        class MessageResource:
            def on_get(self, req, resp):
                sentry_sdk.capture_message("hi")
                resp.media = "hi"

        class MessageByIdResource:
            def on_get(self, req, resp, message_id):
                sentry_sdk.capture_message("hi")
                resp.media = "hi"

        class CustomError(Exception):
            pass

        class CustomErrorResource:
            def on_get(self, req, resp):
                raise CustomError()

        def custom_error_handler(*args, **kwargs):
            raise falcon.HTTPError(status=falcon.HTTP_400)

        app = falcon.API()
        app.add_route("/message", MessageResource())
        app.add_route("/message/{message_id:int}", MessageByIdResource())
        app.add_route("/custom-error", CustomErrorResource())

        app.add_error_handler(CustomError, custom_error_handler)

        return app

    return inner


@pytest.fixture
def make_client(make_app):
    def inner():
        app = make_app()
        return falcon.testing.TestClient(app)

    return inner


def test_has_context(sentry_init, capture_events, make_client):
    sentry_init(integrations=[FalconIntegration()])
    events = capture_events()

    client = make_client()
    response = client.simulate_get("/message")
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["transaction"] == "/message"  # Falcon URI template
    assert "data" not in event["request"]
    assert event["request"]["url"] == "http://falconframework.org/message"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "uri_template", "/message", "route"),
        ("/message", "path", "/message", "url"),
        ("/message/123456", "uri_template", "/message/{message_id:int}", "route"),
        ("/message/123456", "path", "/message/123456", "url"),
    ],
)
def test_transaction_style(
    sentry_init,
    make_client,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    integration = FalconIntegration(transaction_style=transaction_style)
    sentry_init(integrations=[integration])
    events = capture_events()

    client = make_client()
    response = client.simulate_get(url)
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


def test_unhandled_errors(sentry_init, capture_exceptions, capture_events):
    sentry_init(integrations=[FalconIntegration()], debug=True)

    class Resource:
        def on_get(self, req, resp):
            1 / 0

    app = falcon.API()
    app.add_route("/", Resource())

    exceptions = capture_exceptions()
    events = capture_events()

    client = falcon.testing.TestClient(app)

    try:
        client.simulate_get("/")
    except ZeroDivisionError:
        pass

    (exc,) = exceptions
    assert isinstance(exc, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
    assert " by zero" in event["exception"]["values"][0]["value"]


def test_raised_5xx_errors(sentry_init, capture_exceptions, capture_events):
    sentry_init(integrations=[FalconIntegration()], debug=True)

    class Resource:
        def on_get(self, req, resp):
            raise falcon.HTTPError(falcon.HTTP_502)

    app = falcon.API()
    app.add_route("/", Resource())

    exceptions = capture_exceptions()
    events = capture_events()

    client = falcon.testing.TestClient(app)
    client.simulate_get("/")

    (exc,) = exceptions
    assert isinstance(exc, falcon.HTTPError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
    assert event["exception"]["values"][0]["type"] == "HTTPError"


def test_raised_4xx_errors(sentry_init, capture_exceptions, capture_events):
    sentry_init(integrations=[FalconIntegration()], debug=True)

    class Resource:
        def on_get(self, req, resp):
            raise falcon.HTTPError(falcon.HTTP_400)

    app = falcon.API()
    app.add_route("/", Resource())

    exceptions = capture_exceptions()
    events = capture_events()

    client = falcon.testing.TestClient(app)
    client.simulate_get("/")

    assert len(exceptions) == 0
    assert len(events) == 0


def test_http_status(sentry_init, capture_exceptions, capture_events):
    """
    This just demonstrates, that if Falcon raises a HTTPStatus with code 500
    (instead of a HTTPError with code 500) Sentry will not capture it.
    """
    sentry_init(integrations=[FalconIntegration()], debug=True)

    class Resource:
        def on_get(self, req, resp):
            raise falcon.http_status.HTTPStatus(falcon.HTTP_508)

    app = falcon.API()
    app.add_route("/", Resource())

    exceptions = capture_exceptions()
    events = capture_events()

    client = falcon.testing.TestClient(app)
    client.simulate_get("/")

    assert len(exceptions) == 0
    assert len(events) == 0


def test_falcon_large_json_request(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    data = {"foo": {"bar": "a" * 2000}}

    class Resource:
        def on_post(self, req, resp):
            assert req.media == data
            sentry_sdk.capture_message("hi")
            resp.media = "ok"

    app = falcon.API()
    app.add_route("/", Resource())

    events = capture_events()

    client = falcon.testing.TestClient(app)
    response = client.simulate_post("/", json=data)
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]["bar"]) == 1024


@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_falcon_empty_json_request(sentry_init, capture_events, data):
    sentry_init(integrations=[FalconIntegration()])

    class Resource:
        def on_post(self, req, resp):
            assert req.media == data
            sentry_sdk.capture_message("hi")
            resp.media = "ok"

    app = falcon.API()
    app.add_route("/", Resource())

    events = capture_events()

    client = falcon.testing.TestClient(app)
    response = client.simulate_post("/", json=data)
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["request"]["data"] == data


def test_falcon_raw_data_request(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    class Resource:
        def on_post(self, req, resp):
            sentry_sdk.capture_message("hi")
            resp.media = "ok"

    app = falcon.API()
    app.add_route("/", Resource())

    events = capture_events()

    client = falcon.testing.TestClient(app)
    response = client.simulate_post("/", body="hi")
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["request"]["headers"]["Content-Length"] == "2"
    assert event["request"]["data"] == ""


def test_logging(sentry_init, capture_events):
    sentry_init(
        integrations=[FalconIntegration(), LoggingIntegration(event_level="ERROR")]
    )

    logger = logging.getLogger()

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            logger.error("hi")
            resp.media = "ok"

    app.add_route("/", Resource())

    events = capture_events()

    client = falcon.testing.TestClient(app)
    client.simulate_get("/")

    (event,) = events
    assert event["level"] == "error"


def test_500(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            1 / 0

    app.add_route("/", Resource())

    def http500_handler(ex, req, resp, params):
        sentry_sdk.capture_exception(ex)
        resp.media = {"message": "Sentry error: %s" % sentry_sdk.last_event_id()}

    app.add_error_handler(Exception, http500_handler)

    events = capture_events()

    client = falcon.testing.TestClient(app)
    response = client.simulate_get("/")

    (event,) = events
    assert response.json == {"message": "Sentry error: %s" % event["event_id"]}


def test_error_in_errorhandler(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            raise ValueError()

    app.add_route("/", Resource())

    def http500_handler(ex, req, resp, params):
        1 / 0

    app.add_error_handler(Exception, http500_handler)

    events = capture_events()

    client = falcon.testing.TestClient(app)

    with pytest.raises(ZeroDivisionError):
        client.simulate_get("/")

    (event,) = events

    last_ex_values = event["exception"]["values"][-1]
    assert last_ex_values["type"] == "ZeroDivisionError"
    assert last_ex_values["stacktrace"]["frames"][-1]["vars"]["ex"] == "ValueError()"


def test_bad_request_not_captured(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])
    events = capture_events()

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            raise falcon.HTTPBadRequest()

    app.add_route("/", Resource())

    client = falcon.testing.TestClient(app)

    client.simulate_get("/")

    assert not events


def test_does_not_leak_scope(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])
    events = capture_events()

    with sentry_sdk.configure_scope() as scope:
        scope.set_tag("request_data", False)

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            with sentry_sdk.configure_scope() as scope:
                scope.set_tag("request_data", True)

            def generator():
                for row in range(1000):
                    with sentry_sdk.configure_scope() as scope:
                        assert scope._tags["request_data"]

                    yield (str(row) + "\n").encode()

            resp.stream = generator()

    app.add_route("/", Resource())

    client = falcon.testing.TestClient(app)
    response = client.simulate_get("/")

    expected_response = "".join(str(row) + "\n" for row in range(1000))
    assert response.text == expected_response
    assert not events

    with sentry_sdk.configure_scope() as scope:
        assert not scope._tags["request_data"]


@pytest.mark.skipif(
    not hasattr(falcon, "asgi"), reason="This Falcon version lacks ASGI support."
)
def test_falcon_not_breaking_asgi(sentry_init):
    """
    This test simply verifies that the Falcon integration does not break ASGI
    Falcon apps.

    The test does not verify ASGI Falcon support, since our Falcon integration
    currently lacks support for ASGI Falcon apps.
    """
    sentry_init(integrations=[FalconIntegration()])

    asgi_app = falcon.asgi.App()

    try:
        falcon.inspect.inspect_app(asgi_app)
    except TypeError:
        pytest.fail("Falcon integration causing errors in ASGI apps.")


@pytest.mark.skipif(
    (FALCON_VERSION or ()) < (3,),
    reason="The Sentry Falcon integration only supports custom error handlers on Falcon 3+",
)
def test_falcon_custom_error_handler(sentry_init, make_app, capture_events):
    """
    When a custom error handler handles what otherwise would have resulted in a 5xx error,
    changing the HTTP status to a non-5xx status, no error event should be sent to Sentry.
    """
    sentry_init(integrations=[FalconIntegration()])
    events = capture_events()

    app = make_app()
    client = falcon.testing.TestClient(app)

    client.simulate_get("/custom-error")

    assert len(events) == 0
sentry-python-1.39.2/tests/integrations/fastapi/000077500000000000000000000000001454744723200217645ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/fastapi/__init__.py000066400000000000000000000000561454744723200240760ustar00rootroot00000000000000import pytest

pytest.importorskip("fastapi")
sentry-python-1.39.2/tests/integrations/fastapi/test_fastapi.py000066400000000000000000000340011454744723200250220ustar00rootroot00000000000000import json
import logging
import threading

import pytest
from sentry_sdk.integrations.fastapi import FastApiIntegration

from fastapi import FastAPI, Request
from fastapi.testclient import TestClient
from fastapi.middleware.trustedhost import TrustedHostMiddleware
from sentry_sdk import capture_message
from sentry_sdk.integrations.starlette import StarletteIntegration
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


def fastapi_app_factory():
    app = FastAPI()

    @app.get("/error")
    async def _error():
        capture_message("Hi")
        1 / 0
        return {"message": "Hi"}

    @app.get("/message")
    async def _message():
        capture_message("Hi")
        return {"message": "Hi"}

    @app.get("/message/{message_id}")
    async def _message_with_id(message_id):
        capture_message("Hi")
        return {"message": "Hi"}

    @app.get("/sync/thread_ids")
    def _thread_ids_sync():
        return {
            "main": str(threading.main_thread().ident),
            "active": str(threading.current_thread().ident),
        }

    @app.get("/async/thread_ids")
    async def _thread_ids_async():
        return {
            "main": str(threading.main_thread().ident),
            "active": str(threading.current_thread().ident),
        }

    return app


@pytest.mark.asyncio
async def test_response(sentry_init, capture_events):
    # FastAPI is heavily based on Starlette so we also need
    # to enable StarletteIntegration.
    # In the future this will be auto enabled.
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=True,
        debug=True,
    )

    app = fastapi_app_factory()

    events = capture_events()

    client = TestClient(app)
    response = client.get("/message")

    assert response.json() == {"message": "Hi"}

    assert len(events) == 2

    (message_event, transaction_event) = events
    assert message_event["message"] == "Hi"
    assert transaction_event["transaction"] == "/message"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        (
            "/message",
            "url",
            "/message",
            "route",
        ),
        (
            "/message",
            "endpoint",
            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "/message/{message_id}",
            "route",
        ),
        (
            "/message/123456",
            "endpoint",
            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
            "component",
        ),
    ],
)
def test_transaction_style(
    sentry_init,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[
            StarletteIntegration(transaction_style=transaction_style),
            FastApiIntegration(transaction_style=transaction_style),
        ],
    )
    app = fastapi_app_factory()

    events = capture_events()

    client = TestClient(app)
    client.get(url)

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}

    # Assert that state is not leaked
    events.clear()
    capture_message("foo")
    (event,) = events

    assert "request" not in event
    assert "transaction" not in event


def test_legacy_setup(
    sentry_init,
    capture_events,
):
    # Check that behaviour does not change
    # if the user just adds the new Integrations
    # and forgets to remove SentryAsgiMiddleware
    sentry_init()
    app = fastapi_app_factory()
    asgi_app = SentryAsgiMiddleware(app)

    events = capture_events()

    client = TestClient(asgi_app)
    client.get("/message/123456")

    (event,) = events
    assert event["transaction"] == "/message/{message_id}"


@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )
    app = fastapi_app_factory()
    asgi_app = SentryAsgiMiddleware(app)

    envelopes = capture_envelopes()

    client = TestClient(asgi_app)
    response = client.get(endpoint)
    assert response.status_code == 200

    data = json.loads(response.content)

    envelopes = [envelope for envelope in envelopes]
    assert len(envelopes) == 1

    profiles = [item for item in envelopes[0].items if item.type == "profile"]
    assert len(profiles) == 1

    for profile in profiles:
        transactions = profile.payload.json["transactions"]
        assert len(transactions) == 1
        assert str(data["active"]) == transactions[0]["active_thread_id"]


@pytest.mark.asyncio
async def test_original_request_not_scrubbed(sentry_init, capture_events):
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        debug=True,
    )

    app = FastAPI()

    @app.post("/error")
    async def _error(request: Request):
        logging.critical("Oh no!")
        assert request.headers["Authorization"] == "Bearer ohno"
        assert await request.json() == {"password": "secret"}

        return {"error": "Oh no!"}

    events = capture_events()

    client = TestClient(app)
    client.post(
        "/error", json={"password": "secret"}, headers={"Authorization": "Bearer ohno"}
    )

    event = events[0]
    assert event["request"]["data"] == {"password": "[Filtered]"}
    assert event["request"]["headers"]["authorization"] == "[Filtered]"


@pytest.mark.asyncio
def test_response_status_code_ok_in_transaction_context(sentry_init, capture_envelopes):
    """
    Tests that the response status code is added to the transaction "response" context.
    """
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    client = TestClient(app)
    client.get("/message")

    (_, transaction_envelope) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 200


@pytest.mark.asyncio
def test_response_status_code_error_in_transaction_context(
    sentry_init,
    capture_envelopes,
):
    """
    Tests that the response status code is added to the transaction "response" context.
    """
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    client = TestClient(app)
    with pytest.raises(ZeroDivisionError):
        client.get("/error")

    (
        _,
        _,
        transaction_envelope,
    ) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 500


@pytest.mark.asyncio
def test_response_status_code_not_found_in_transaction_context(
    sentry_init,
    capture_envelopes,
):
    """
    Tests that the response status code is added to the transaction "response" context.
    """
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    client = TestClient(app)
    client.get("/non-existing-route-123")

    (transaction_envelope,) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 404


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "/message/{message_id}",
            "route",
        ),
    ],
)
def test_transaction_name(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[
            StarletteIntegration(transaction_style=transaction_style),
            FastApiIntegration(transaction_style=transaction_style),
        ],
        traces_sample_rate=1.0,
        debug=True,
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    client = TestClient(app)
    client.get(request_url)

    (_, transaction_envelope) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )


def test_route_endpoint_equal_dependant_call(sentry_init):
    """
    Tests that the route endpoint name is equal to the wrapped dependant call name.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[
            StarletteIntegration(),
            FastApiIntegration(),
        ],
        traces_sample_rate=1.0,
        debug=True,
    )

    app = fastapi_app_factory()

    for route in app.router.routes:
        if not hasattr(route, "dependant"):
            continue
        assert route.endpoint.__qualname__ == route.dependant.call.__qualname__


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "http://testserver/message/123456",
            "url",
        ),
        (
            "/message/123456",
            "url",
            "http://testserver/message/123456",
            "url",
        ),
    ],
)
def test_transaction_name_in_traces_sampler(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
):
    """
    Tests that a custom traces_sampler retrieves a meaningful transaction name.
    In this case the URL or endpoint, because we do not have the route yet.
    """

    def dummy_traces_sampler(sampling_context):
        assert (
            sampling_context["transaction_context"]["name"] == expected_transaction_name
        )
        assert (
            sampling_context["transaction_context"]["source"]
            == expected_transaction_source
        )

    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[StarletteIntegration(transaction_style=transaction_style)],
        traces_sampler=dummy_traces_sampler,
        traces_sample_rate=1.0,
        debug=True,
    )

    app = fastapi_app_factory()

    client = TestClient(app)
    client.get(request_url)


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "starlette.middleware.trustedhost.TrustedHostMiddleware",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "http://testserver/message/123456",
            "url",
        ),
    ],
)
def test_transaction_name_in_middleware(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[
            StarletteIntegration(transaction_style=transaction_style),
            FastApiIntegration(transaction_style=transaction_style),
        ],
        traces_sample_rate=1.0,
        debug=True,
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    app.add_middleware(
        TrustedHostMiddleware,
        allowed_hosts=[
            "example.com",
        ],
    )

    client = TestClient(app)
    client.get(request_url)

    (transaction_envelope,) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["contexts"]["response"]["status_code"] == 400
    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )
sentry-python-1.39.2/tests/integrations/flask/000077500000000000000000000000001454744723200214355ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/flask/__init__.py000066400000000000000000000000541454744723200235450ustar00rootroot00000000000000import pytest

pytest.importorskip("flask")
sentry-python-1.39.2/tests/integrations/flask/test_flask.py000066400000000000000000000654121454744723200241560ustar00rootroot00000000000000import json
import re
import logging
from io import BytesIO

import pytest
from flask import (
    Flask,
    Response,
    request,
    abort,
    stream_with_context,
    render_template_string,
)
from flask.views import View
from flask_login import LoginManager, login_user

try:
    from werkzeug.wrappers.request import UnsupportedMediaType
except ImportError:
    UnsupportedMediaType = None

import sentry_sdk.integrations.flask as flask_sentry
from sentry_sdk import (
    set_tag,
    configure_scope,
    capture_message,
    capture_exception,
    last_event_id,
    Hub,
)
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.serializer import MAX_DATABAG_BREADTH


login_manager = LoginManager()


@pytest.fixture
def app():
    app = Flask(__name__)
    app.config["TESTING"] = True
    app.secret_key = "haha"

    login_manager.init_app(app)

    @app.route("/message")
    def hi():
        capture_message("hi")
        return "ok"

    @app.route("/message/")
    def hi_with_id(message_id):
        capture_message("hi again")
        return "ok"

    return app


@pytest.fixture(params=("auto", "manual"))
def integration_enabled_params(request):
    if request.param == "auto":
        return {"auto_enabling_integrations": True}
    elif request.param == "manual":
        return {"integrations": [flask_sentry.FlaskIntegration()]}
    else:
        raise ValueError(request.param)


def test_has_context(sentry_init, app, capture_events):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    client = app.test_client()
    response = client.get("/message")
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == "hi"
    assert "data" not in event["request"]
    assert event["request"]["url"] == "http://localhost/message"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "endpoint", "hi", "component"),
        ("/message", "url", "/message", "route"),
        ("/message/123456", "endpoint", "hi_with_id", "component"),
        ("/message/123456", "url", "/message/", "route"),
    ],
)
def test_transaction_style(
    sentry_init,
    app,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[
            flask_sentry.FlaskIntegration(transaction_style=transaction_style)
        ]
    )
    events = capture_events()

    client = app.test_client()
    response = client.get(url)
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


@pytest.mark.parametrize("debug", (True, False))
@pytest.mark.parametrize("testing", (True, False))
def test_errors(
    sentry_init,
    capture_exceptions,
    capture_events,
    app,
    debug,
    testing,
    integration_enabled_params,
):
    sentry_init(debug=True, **integration_enabled_params)

    app.debug = debug
    app.testing = testing

    @app.route("/")
    def index():
        1 / 0

    exceptions = capture_exceptions()
    events = capture_events()

    client = app.test_client()
    try:
        client.get("/")
    except ZeroDivisionError:
        pass

    (exc,) = exceptions
    assert isinstance(exc, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "flask"


def test_flask_login_not_installed(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    monkeypatch.setattr(flask_sentry, "flask_login", None)

    events = capture_events()

    client = app.test_client()
    client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


def test_flask_login_not_configured(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    assert flask_sentry.flask_login

    events = capture_events()
    client = app.test_client()
    client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


def test_flask_login_partially_configured(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    events = capture_events()

    login_manager = LoginManager()
    login_manager.init_app(app)

    client = app.test_client()
    client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


@pytest.mark.parametrize("send_default_pii", [True, False])
@pytest.mark.parametrize("user_id", [None, "42", 3])
def test_flask_login_configured(
    send_default_pii,
    sentry_init,
    app,
    user_id,
    capture_events,
    monkeypatch,
    integration_enabled_params,
):
    sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)

    class User(object):
        is_authenticated = is_active = True
        is_anonymous = user_id is not None

        def get_id(self):
            return str(user_id)

    @login_manager.user_loader
    def load_user(user_id):
        if user_id is not None:
            return User()

    @app.route("/login")
    def login():
        if user_id is not None:
            login_user(User())
        return "ok"

    events = capture_events()

    client = app.test_client()
    assert client.get("/login").status_code == 200
    assert not events

    assert client.get("/message").status_code == 200

    (event,) = events
    if user_id is None or not send_default_pii:
        assert event.get("user", {}).get("id") is None
    else:
        assert event["user"]["id"] == str(user_id)


def test_flask_large_json_request(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    data = {"foo": {"bar": "a" * 2000}}

    @app.route("/", methods=["POST"])
    def index():
        assert request.get_json() == data
        assert request.get_data() == json.dumps(data).encode("ascii")
        assert not request.form
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response.status_code == 200

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]["bar"]) == 1024


def test_flask_session_tracking(sentry_init, capture_envelopes, app):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()],
        release="demo-release",
    )

    @app.route("/")
    def index():
        with configure_scope() as scope:
            scope.set_user({"ip_address": "1.2.3.4", "id": "42"})
        try:
            raise ValueError("stuff")
        except Exception:
            logging.exception("stuff happened")
        1 / 0

    envelopes = capture_envelopes()

    with app.test_client() as client:
        try:
            client.get("/", headers={"User-Agent": "blafasel/1.0"})
        except ZeroDivisionError:
            pass

    Hub.current.client.flush()

    (first_event, error_event, session) = envelopes
    first_event = first_event.get_event()
    error_event = error_event.get_event()
    session = session.items[0].payload.json
    aggregates = session["aggregates"]

    assert first_event["exception"]["values"][0]["type"] == "ValueError"
    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"

    assert len(aggregates) == 1
    assert aggregates[0]["crashed"] == 1
    assert aggregates[0]["started"]
    assert session["attrs"]["release"] == "demo-release"


@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_flask_empty_json_request(sentry_init, capture_events, app, data):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    @app.route("/", methods=["POST"])
    def index():
        assert request.get_json() == data
        assert request.get_data() == json.dumps(data).encode("ascii")
        assert not request.form
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response.status_code == 200

    (event,) = events
    assert event["request"]["data"] == data


def test_flask_medium_formdata_request(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    data = {"foo": "a" * 2000}

    @app.route("/", methods=["POST"])
    def index():
        assert request.form["foo"] == data["foo"]
        assert not request.get_data()
        try:
            assert not request.get_json()
        except UnsupportedMediaType:
            # flask/werkzeug 3
            pass
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", data=data)
    assert response.status_code == 200

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024


def test_flask_formdata_request_appear_transaction_body(
    sentry_init, capture_events, app
):
    """
    Test that ensures that transaction request data contains body, even if no exception was raised
    """
    sentry_init(integrations=[flask_sentry.FlaskIntegration()], traces_sample_rate=1.0)

    data = {"username": "sentry-user", "age": "26"}

    @app.route("/", methods=["POST"])
    def index():
        assert request.form["username"] == data["username"]
        assert request.form["age"] == data["age"]
        assert not request.get_data()
        try:
            assert not request.get_json()
        except UnsupportedMediaType:
            # flask/werkzeug 3
            pass
        set_tag("view", "yes")
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", data=data)
    assert response.status_code == 200

    event, transaction_event = events

    assert "request" in transaction_event
    assert "data" in transaction_event["request"]
    assert transaction_event["request"]["data"] == data


@pytest.mark.parametrize("input_char", ["a", b"a"])
def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="small"
    )

    data = input_char * 2000

    @app.route("/", methods=["POST"])
    def index():
        assert not request.form
        if isinstance(data, bytes):
            assert request.get_data() == data
        else:
            assert request.get_data() == data.encode("ascii")
        try:
            assert not request.get_json()
        except UnsupportedMediaType:
            # flask/werkzeug 3
            pass
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", data=data)
    assert response.status_code == 200

    (event,) = events
    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
    assert not event["request"]["data"]


def test_flask_files_and_form(sentry_init, capture_events, app):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
    )

    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}

    @app.route("/", methods=["POST"])
    def index():
        assert list(request.form) == ["foo"]
        assert list(request.files) == ["file"]
        try:
            assert not request.get_json()
        except UnsupportedMediaType:
            # flask/werkzeug 3
            pass
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", data=data)
    assert response.status_code == 200

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024

    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
    assert not event["request"]["data"]["file"]


def test_json_not_truncated_if_max_request_body_size_is_always(
    sentry_init, capture_events, app
):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
    )

    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }

    @app.route("/", methods=["POST"])
    def index():
        assert request.get_json() == data
        assert request.get_data() == json.dumps(data).encode("ascii")
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response.status_code == 200

    (event,) = events
    assert event["request"]["data"] == data


@pytest.mark.parametrize(
    "integrations",
    [
        [flask_sentry.FlaskIntegration()],
        [flask_sentry.FlaskIntegration(), LoggingIntegration(event_level="ERROR")],
    ],
)
def test_errors_not_reported_twice(sentry_init, integrations, capture_events, app):
    sentry_init(integrations=integrations)

    @app.route("/")
    def index():
        try:
            1 / 0
        except Exception as e:
            app.logger.exception(e)
            raise e

    events = capture_events()

    client = app.test_client()
    with pytest.raises(ZeroDivisionError):
        client.get("/")

    assert len(events) == 1


def test_logging(sentry_init, capture_events, app):
    # ensure that Flask's logger magic doesn't break ours
    sentry_init(
        integrations=[
            flask_sentry.FlaskIntegration(),
            LoggingIntegration(event_level="ERROR"),
        ]
    )

    @app.route("/")
    def index():
        app.logger.error("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    client.get("/")

    (event,) = events
    assert event["level"] == "error"


def test_no_errors_without_request(app, sentry_init):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    with app.app_context():
        capture_exception(ValueError())


def test_cli_commands_raise(app):
    if not hasattr(app, "cli"):
        pytest.skip("Too old flask version")

    from flask.cli import ScriptInfo

    @app.cli.command()
    def foo():
        1 / 0

    def create_app(*_):
        return app

    with pytest.raises(ZeroDivisionError):
        app.cli.main(
            args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=create_app)
        )


def test_wsgi_level_error_is_caught(
    app, capture_exceptions, capture_events, sentry_init
):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    def wsgi_app(environ, start_response):
        1 / 0

    app.wsgi_app = wsgi_app

    client = app.test_client()

    exceptions = capture_exceptions()
    events = capture_events()

    with pytest.raises(ZeroDivisionError) as exc:
        client.get("/")

    (error,) = exceptions

    assert error is exc.value

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"


def test_500(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    app.debug = False
    app.testing = False

    @app.route("/")
    def index():
        1 / 0

    @app.errorhandler(500)
    def error_handler(err):
        return "Sentry error: %s" % last_event_id()

    events = capture_events()

    client = app.test_client()
    response = client.get("/")

    (event,) = events
    assert response.data.decode("utf-8") == "Sentry error: %s" % event["event_id"]


def test_error_in_errorhandler(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    app.debug = False
    app.testing = False

    @app.route("/")
    def index():
        raise ValueError()

    @app.errorhandler(500)
    def error_handler(err):
        1 / 0

    events = capture_events()

    client = app.test_client()

    with pytest.raises(ZeroDivisionError):
        client.get("/")

    event1, event2 = events

    (exception,) = event1["exception"]["values"]
    assert exception["type"] == "ValueError"

    exception = event2["exception"]["values"][-1]
    assert exception["type"] == "ZeroDivisionError"


def test_bad_request_not_captured(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        abort(400)

    client = app.test_client()

    client.get("/")

    assert not events


def test_does_not_leak_scope(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    with configure_scope() as scope:
        scope.set_tag("request_data", False)

    @app.route("/")
    def index():
        with configure_scope() as scope:
            scope.set_tag("request_data", True)

        def generate():
            for row in range(1000):
                with configure_scope() as scope:
                    assert scope._tags["request_data"]

                yield str(row) + "\n"

        return Response(stream_with_context(generate()), mimetype="text/csv")

    client = app.test_client()
    response = client.get("/")
    assert response.data.decode() == "".join(str(row) + "\n" for row in range(1000))
    assert not events

    with configure_scope() as scope:
        assert not scope._tags["request_data"]


def test_scoped_test_client(sentry_init, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    @app.route("/")
    def index():
        return "ok"

    with app.test_client() as client:
        response = client.get("/")
        assert response.status_code == 200


@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
def test_errorhandler_for_exception_swallows_exception(
    sentry_init, app, capture_events, exc_cls
):
    # In contrast to error handlers for a status code, error
    # handlers for exceptions can swallow the exception (this is
    # just how the Flask signal works)
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        1 / 0

    @app.errorhandler(exc_cls)
    def zerodivision(e):
        return "ok"

    with app.test_client() as client:
        response = client.get("/")
        assert response.status_code == 200

    assert not events


def test_tracing_success(sentry_init, capture_events, app):
    sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])

    @app.before_request
    def _():
        set_tag("before_request", "yes")

    @app.route("/message_tx")
    def hi_tx():
        set_tag("view", "yes")
        capture_message("hi")
        return "ok"

    events = capture_events()

    with app.test_client() as client:
        response = client.get("/message_tx")
        assert response.status_code == 200

    message_event, transaction_event = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "hi_tx"
    assert transaction_event["contexts"]["trace"]["status"] == "ok"
    assert transaction_event["tags"]["view"] == "yes"
    assert transaction_event["tags"]["before_request"] == "yes"

    assert message_event["message"] == "hi"
    assert message_event["transaction"] == "hi_tx"
    assert message_event["tags"]["view"] == "yes"
    assert message_event["tags"]["before_request"] == "yes"


def test_tracing_error(sentry_init, capture_events, app):
    sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])

    events = capture_events()

    @app.route("/error")
    def error():
        1 / 0

    with pytest.raises(ZeroDivisionError):
        with app.test_client() as client:
            response = client.get("/error")
            assert response.status_code == 500

    error_event, transaction_event = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "error"
    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"

    assert error_event["transaction"] == "error"
    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"


def test_error_has_trace_context_if_tracing_disabled(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    events = capture_events()

    @app.route("/error")
    def error():
        1 / 0

    with pytest.raises(ZeroDivisionError):
        with app.test_client() as client:
            response = client.get("/error")
            assert response.status_code == 500

    (error_event,) = events

    assert error_event["contexts"]["trace"]


def test_class_based_views(sentry_init, app, capture_events):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    @app.route("/")
    class HelloClass(View):
        def dispatch_request(self):
            capture_message("hi")
            return "ok"

    app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))

    with app.test_client() as client:
        response = client.get("/hello-class/")
        assert response.status_code == 200

    (event,) = events

    assert event["message"] == "hi"
    assert event["transaction"] == "hello_class"


@pytest.mark.parametrize(
    "template_string", ["{{ sentry_trace }}", "{{ sentry_trace_meta }}"]
)
def test_template_tracing_meta(sentry_init, app, capture_events, template_string):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        hub = Hub.current
        capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
        return render_template_string(template_string)

    with app.test_client() as client:
        response = client.get("/")
        assert response.status_code == 200

        rendered_meta = response.data.decode("utf-8")
        traceparent, baggage = events[0]["message"].split("\n")
        assert traceparent != ""
        assert baggage != ""

    match = re.match(
        r'^',
        rendered_meta,
    )
    assert match is not None
    assert match.group(1) == traceparent

    # Python 2 does not preserve sort order
    rendered_baggage = match.group(2)
    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))


def test_dont_override_sentry_trace_context(sentry_init, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    @app.route("/")
    def index():
        return render_template_string("{{ sentry_trace }}", sentry_trace="hi")

    with app.test_client() as client:
        response = client.get("/")
        assert response.status_code == 200
        assert response.data == b"hi"


def test_request_not_modified_by_reference(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    @app.route("/", methods=["POST"])
    def index():
        logging.critical("oops")
        assert request.get_json() == {"password": "ohno"}
        assert request.headers["Authorization"] == "Bearer ohno"
        return "ok"

    events = capture_events()

    client = app.test_client()
    client.post(
        "/", json={"password": "ohno"}, headers={"Authorization": "Bearer ohno"}
    )

    (event,) = events

    assert event["request"]["data"]["password"] == "[Filtered]"
    assert event["request"]["headers"]["Authorization"] == "[Filtered]"


@pytest.mark.parametrize("traces_sample_rate", [None, 1.0])
def test_replay_event_context(sentry_init, capture_events, app, traces_sample_rate):
    """
    Tests that the replay context is added to the event context.
    This is not strictly a Flask integration test, but it's the easiest way to test this.
    """
    sentry_init(traces_sample_rate=traces_sample_rate)

    @app.route("/error")
    def error():
        return 1 / 0

    events = capture_events()

    client = app.test_client()
    headers = {
        "baggage": "other-vendor-value-1=foo;bar;baz,sentry-trace_id=771a43a4192642f0b136d5159a501700,sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,other-vendor-value-2=foo;bar,sentry-replay_id=12312012123120121231201212312012",
        "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1",
    }
    with pytest.raises(ZeroDivisionError):
        client.get("/error", headers=headers)

    event = events[0]

    assert event["contexts"]
    assert event["contexts"]["replay"]
    assert (
        event["contexts"]["replay"]["replay_id"] == "12312012123120121231201212312012"
    )


def test_response_status_code_ok_in_transaction_context(
    sentry_init, capture_envelopes, app
):
    """
    Tests that the response status code is added to the transaction context.
    This also works for when there is an Exception during the request, but somehow the test flask app doesn't seem to trigger that.
    """
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    client = app.test_client()
    client.get("/message")

    Hub.current.client.flush()

    (_, transaction_envelope, _) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 200


def test_response_status_code_not_found_in_transaction_context(
    sentry_init, capture_envelopes, app
):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    client = app.test_client()
    client.get("/not-existing-route")

    Hub.current.client.flush()

    (transaction_envelope, _) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 404
sentry-python-1.39.2/tests/integrations/gcp/000077500000000000000000000000001454744723200211065ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/gcp/test_gcp.py000066400000000000000000000421501454744723200232720ustar00rootroot00000000000000"""
# GCP Cloud Functions unit tests

"""
import json
from textwrap import dedent
import tempfile
import sys
import subprocess

import pytest
import os.path
import os

pytestmark = pytest.mark.skipif(
    not hasattr(tempfile, "TemporaryDirectory"), reason="need Python 3.2+"
)


FUNCTIONS_PRELUDE = """
from unittest.mock import Mock
import __main__ as gcp_functions
import os

# Initializing all the necessary environment variables
os.environ["FUNCTION_TIMEOUT_SEC"] = "3"
os.environ["FUNCTION_NAME"] = "Google Cloud function"
os.environ["ENTRY_POINT"] = "cloud_function"
os.environ["FUNCTION_IDENTITY"] = "func_ID"
os.environ["FUNCTION_REGION"] = "us-central1"
os.environ["GCP_PROJECT"] = "serverless_project"

def log_return_value(func):
    def inner(*args, **kwargs):
        rv = func(*args, **kwargs)

        print("\\nRETURN VALUE: {}\\n".format(json.dumps(rv)))

        return rv

    return inner

gcp_functions.worker_v1 = Mock()
gcp_functions.worker_v1.FunctionHandler = Mock()
gcp_functions.worker_v1.FunctionHandler.invoke_user_function = log_return_value(cloud_function)


import sentry_sdk
from sentry_sdk.integrations.gcp import GcpIntegration
import json
import time

from sentry_sdk.transport import HttpTransport

def event_processor(event):
    # Adding delay which would allow us to capture events.
    time.sleep(1)
    return event

def envelope_processor(envelope):
    (item,) = envelope.items
    return item.get_bytes()

class TestTransport(HttpTransport):
    def _send_event(self, event):
        event = event_processor(event)
        # Writing a single string to stdout holds the GIL (seems like) and
        # therefore cannot be interleaved with other threads. This is why we
        # explicitly add a newline at the end even though `print` would provide
        # us one.
        print("\\nEVENT: {}\\n".format(json.dumps(event)))

    def _send_envelope(self, envelope):
        envelope = envelope_processor(envelope)
        print("\\nENVELOPE: {}\\n".format(envelope.decode(\"utf-8\")))


def init_sdk(timeout_warning=False, **extra_init_args):
    sentry_sdk.init(
        dsn="https://123abc@example.com/123",
        transport=TestTransport,
        integrations=[GcpIntegration(timeout_warning=timeout_warning)],
        shutdown_timeout=10,
        # excepthook -> dedupe -> event_processor client report gets added
        # which we don't really care about for these tests
        send_client_reports=False,
        **extra_init_args
    )

"""


@pytest.fixture
def run_cloud_function():
    def inner(code, subprocess_kwargs=()):
        events = []
        envelopes = []
        return_value = None

        # STEP : Create a zip of cloud function

        subprocess_kwargs = dict(subprocess_kwargs)

        with tempfile.TemporaryDirectory() as tmpdir:
            main_py = os.path.join(tmpdir, "main.py")
            with open(main_py, "w") as f:
                f.write(code)

            setup_cfg = os.path.join(tmpdir, "setup.cfg")

            with open(setup_cfg, "w") as f:
                f.write("[install]\nprefix=")

            subprocess.check_call(
                [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")],
                **subprocess_kwargs
            )

            subprocess.check_call(
                "pip install ../*.tar.gz -t .",
                cwd=tmpdir,
                shell=True,
                **subprocess_kwargs
            )

            stream = os.popen("python {}/main.py".format(tmpdir))
            stream_data = stream.read()

            stream.close()

            for line in stream_data.splitlines():
                print("GCP:", line)
                if line.startswith("EVENT: "):
                    line = line[len("EVENT: ") :]
                    events.append(json.loads(line))
                elif line.startswith("ENVELOPE: "):
                    line = line[len("ENVELOPE: ") :]
                    envelopes.append(json.loads(line))
                elif line.startswith("RETURN VALUE: "):
                    line = line[len("RETURN VALUE: ") :]
                    return_value = json.loads(line)
                else:
                    continue

            stream.close()

        return envelopes, events, return_value

    return inner


def test_handled_exception(run_cloud_function):
    _, events, return_value = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            raise Exception("something went wrong")
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(timeout_warning=False)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    assert events[0]["level"] == "error"
    (exception,) = events[0]["exception"]["values"]

    assert exception["type"] == "Exception"
    assert exception["value"] == "something went wrong"
    assert exception["mechanism"]["type"] == "gcp"
    assert not exception["mechanism"]["handled"]


def test_unhandled_exception(run_cloud_function):
    _, events, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            x = 3/0
            return "3"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(timeout_warning=False)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    assert events[0]["level"] == "error"
    (exception,) = events[0]["exception"]["values"]

    assert exception["type"] == "ZeroDivisionError"
    assert exception["value"] == "division by zero"
    assert exception["mechanism"]["type"] == "gcp"
    assert not exception["mechanism"]["handled"]


def test_timeout_error(run_cloud_function):
    _, events, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            time.sleep(10)
            return "3"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(timeout_warning=True)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    assert events[0]["level"] == "error"
    (exception,) = events[0]["exception"]["values"]

    assert exception["type"] == "ServerlessTimeoutWarning"
    assert (
        exception["value"]
        == "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds."
    )
    assert exception["mechanism"]["type"] == "threading"
    assert not exception["mechanism"]["handled"]


def test_performance_no_error(run_cloud_function):
    envelopes, _, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            return "test_string"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )

    assert envelopes[0]["type"] == "transaction"
    assert envelopes[0]["contexts"]["trace"]["op"] == "function.gcp"
    assert envelopes[0]["transaction"].startswith("Google Cloud function")
    assert envelopes[0]["transaction_info"] == {"source": "component"}
    assert envelopes[0]["transaction"] in envelopes[0]["request"]["url"]


def test_performance_error(run_cloud_function):
    envelopes, events, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            raise Exception("something went wrong")
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )

    assert envelopes[0]["level"] == "error"
    (exception,) = envelopes[0]["exception"]["values"]

    assert exception["type"] == "Exception"
    assert exception["value"] == "something went wrong"
    assert exception["mechanism"]["type"] == "gcp"
    assert not exception["mechanism"]["handled"]

    assert envelopes[1]["type"] == "transaction"
    assert envelopes[1]["contexts"]["trace"]["op"] == "function.gcp"
    assert envelopes[1]["transaction"].startswith("Google Cloud function")
    assert envelopes[1]["transaction"] in envelopes[0]["request"]["url"]


def test_traces_sampler_gets_correct_values_in_sampling_context(
    run_cloud_function, DictionaryContaining  # noqa:N803
):
    # TODO: There are some decent sized hacks below. For more context, see the
    # long comment in the test of the same name in the AWS integration. The
    # situations there and here aren't identical, but they're similar enough
    # that solving one would probably solve both.

    import inspect

    envelopes, events, return_value = run_cloud_function(
        dedent(
            """
            functionhandler = None
            event = {
                "type": "chase",
                "chasers": ["Maisey", "Charlie"],
                "num_squirrels": 2,
            }
            def cloud_function(functionhandler, event):
                # this runs after the transaction has started, which means we
                # can make assertions about traces_sampler
                try:
                    traces_sampler.assert_any_call(
                        DictionaryContaining({
                            "gcp_env": DictionaryContaining({
                                "function_name": "chase_into_tree",
                                "function_region": "dogpark",
                                "function_project": "SquirrelChasing",
                            }),
                            "gcp_event": {
                                "type": "chase",
                                "chasers": ["Maisey", "Charlie"],
                                "num_squirrels": 2,
                            },
                        })
                    )
                except AssertionError:
                    # catch the error and return it because the error itself will
                    # get swallowed by the SDK as an "internal exception"
                    return {"AssertionError raised": True,}

                return {"AssertionError raised": False,}
            """
        )
        + FUNCTIONS_PRELUDE
        + dedent(inspect.getsource(DictionaryContaining))
        + dedent(
            """
            os.environ["FUNCTION_NAME"] = "chase_into_tree"
            os.environ["FUNCTION_REGION"] = "dogpark"
            os.environ["GCP_PROJECT"] = "SquirrelChasing"

            def _safe_is_equal(x, y):
                # copied from conftest.py - see docstring and comments there
                try:
                    is_equal = x.__eq__(y)
                except AttributeError:
                    is_equal = NotImplemented

                if is_equal == NotImplemented:
                    return x == y

                return is_equal

            traces_sampler = Mock(return_value=True)

            init_sdk(
                traces_sampler=traces_sampler,
            )

            gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
            """
        )
    )

    assert return_value["AssertionError raised"] is False


def test_error_has_new_trace_context_performance_enabled(run_cloud_function):
    """
    Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
    """
    envelopes, _, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            sentry_sdk.capture_message("hi")
            x = 3/0
            return "3"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    (msg_event, error_event, transaction_event) = envelopes

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_new_trace_context_performance_disabled(run_cloud_function):
    """
    Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
    """
    _, events, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            sentry_sdk.capture_message("hi")
            x = 3/0
            return "3"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=None),  # this is the default, just added for clarity
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )

    (msg_event, error_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_existing_trace_context_performance_enabled(run_cloud_function):
    """
    Check if an 'trace' context is added to errros and transactions
    from the incoming 'sentry-trace' header when performance monitoring is enabled.
    """
    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    envelopes, _, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None

        from collections import namedtuple
        GCPEvent = namedtuple("GCPEvent", ["headers"])
        event = GCPEvent(headers={"sentry-trace": "%s"})

        def cloud_function(functionhandler, event):
            sentry_sdk.capture_message("hi")
            x = 3/0
            return "3"
        """
            % sentry_trace_header
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    (msg_event, error_event, transaction_event) = envelopes

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_error_has_existing_trace_context_performance_disabled(run_cloud_function):
    """
    Check if an 'trace' context is added to errros and transactions
    from the incoming 'sentry-trace' header when performance monitoring is disabled.
    """
    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    _, events, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None

        from collections import namedtuple
        GCPEvent = namedtuple("GCPEvent", ["headers"])
        event = GCPEvent(headers={"sentry-trace": "%s"})

        def cloud_function(functionhandler, event):
            sentry_sdk.capture_message("hi")
            x = 3/0
            return "3"
        """
            % sentry_trace_header
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=None),  # this is the default, just added for clarity
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    (msg_event, error_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )
sentry-python-1.39.2/tests/integrations/gql/000077500000000000000000000000001454744723200211205ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/gql/__init__.py000066400000000000000000000000521454744723200232260ustar00rootroot00000000000000import pytest

pytest.importorskip("gql")
sentry-python-1.39.2/tests/integrations/gql/test_gql.py000066400000000000000000000163061454744723200233220ustar00rootroot00000000000000import pytest

import responses
from gql import gql
from gql import Client
from gql.transport.exceptions import TransportQueryError
from gql.transport.requests import RequestsHTTPTransport
from graphql import DocumentNode
from sentry_sdk.integrations.gql import GQLIntegration
from unittest.mock import MagicMock, patch


class _MockClientBase(MagicMock):
    """
    Mocked version of GQL Client class, following same spec as GQL Client.
    """

    def __init__(self, *args, **kwargs):
        kwargs["spec"] = Client
        super().__init__(*args, **kwargs)

    transport = MagicMock()


@responses.activate
def _execute_mock_query(response_json):
    url = "http://example.com/graphql"
    query_string = """
        query Example {
            example
        }
    """

    # Mock the GraphQL server response
    responses.add(
        method=responses.POST,
        url=url,
        json=response_json,
        status=200,
    )

    transport = RequestsHTTPTransport(url=url)
    client = Client(transport=transport)
    query = gql(query_string)

    return client.execute(query)


def _make_erroneous_query(capture_events):
    """
    Make an erroneous GraphQL query, and assert that the error was reraised, that
    exactly one event was recorded, and that the exception recorded was a
    TransportQueryError. Then, return the event to allow further verifications.
    """
    events = capture_events()
    response_json = {"errors": ["something bad happened"]}

    with pytest.raises(TransportQueryError):
        _execute_mock_query(response_json)

    assert (
        len(events) == 1
    ), "the sdk captured %d events, but 1 event was expected" % len(events)

    (event,) = events
    (exception,) = event["exception"]["values"]

    assert (
        exception["type"] == "TransportQueryError"
    ), "%s was captured, but we expected a TransportQueryError" % exception(type)

    assert "request" in event

    return event


def test_gql_init(sentry_init):
    """
    Integration test to ensure we can initialize the SDK with the GQL Integration
    """
    sentry_init(integrations=[GQLIntegration()])


@patch("sentry_sdk.integrations.gql.Hub")
def test_setup_once_patches_execute_and_patched_function_calls_original(_):
    """
    Unit test which ensures the following:
        1. The GQLIntegration setup_once function patches the gql.Client.execute method
        2. The patched gql.Client.execute method still calls the original method, and it
           forwards its arguments to the original method.
        3. The patched gql.Client.execute method returns the same value that the original
           method returns.
    """
    original_method_return_value = MagicMock()

    class OriginalMockClient(_MockClientBase):
        """
        This mock client always returns the mock original_method_return_value when a query
        is executed. This can be used to simulate successful GraphQL queries.
        """

        execute = MagicMock(
            spec=Client.execute, return_value=original_method_return_value
        )

    original_execute_method = OriginalMockClient.execute

    with patch(
        "sentry_sdk.integrations.gql.gql.Client", new=OriginalMockClient
    ) as PatchedMockClient:  # noqa: N806
        # Below line should patch the PatchedMockClient with Sentry SDK magic
        GQLIntegration.setup_once()

        # We expect GQLIntegration.setup_once to patch the execute method.
        assert (
            PatchedMockClient.execute is not original_execute_method
        ), "execute method not patched"

        # Now, let's instantiate a client and send it a query. Original execute still should get called.
        mock_query = MagicMock(spec=DocumentNode)
        client_instance = PatchedMockClient()
        patched_method_return_value = client_instance.execute(mock_query)

    # Here, we check that the original execute was called
    original_execute_method.assert_called_once_with(client_instance, mock_query)

    # Also, let's verify that the patched execute returns the expected value.
    assert (
        patched_method_return_value is original_method_return_value
    ), "pathced execute method returns a different value than the original execute method"


@patch("sentry_sdk.integrations.gql.event_from_exception")
@patch("sentry_sdk.integrations.gql.Hub")
def test_patched_gql_execute_captures_and_reraises_graphql_exception(
    mock_hub, mock_event_from_exception
):
    """
    Unit test which ensures that in the case that calling the execute method results in a
    TransportQueryError (which gql raises when a GraphQL error occurs), the patched method
    captures the event on the current Hub and it reraises the error.
    """
    mock_event_from_exception.return_value = (dict(), MagicMock())

    class OriginalMockClient(_MockClientBase):
        """
        This mock client always raises a TransportQueryError when a GraphQL query is attempted.
        This simulates a GraphQL query which results in errors.
        """

        execute = MagicMock(
            spec=Client.execute, side_effect=TransportQueryError("query failed")
        )

    with patch(
        "sentry_sdk.integrations.gql.gql.Client", new=OriginalMockClient
    ) as PatchedMockClient:  # noqa: N806
        # Below line should patch the PatchedMockClient with Sentry SDK magic
        GQLIntegration.setup_once()

        mock_query = MagicMock(spec=DocumentNode)
        client_instance = PatchedMockClient()

        # The error should still get raised even though we have instrumented the execute method.
        with pytest.raises(TransportQueryError):
            client_instance.execute(mock_query)

    # However, we should have also captured the error on the hub.
    mock_capture_event = mock_hub.current.capture_event
    mock_capture_event.assert_called_once()


def test_real_gql_request_no_error(sentry_init, capture_events):
    """
    Integration test verifying that the GQLIntegration works as expected with successful query.
    """
    sentry_init(integrations=[GQLIntegration()])
    events = capture_events()

    response_data = {"example": "This is the example"}
    response_json = {"data": response_data}

    result = _execute_mock_query(response_json)

    assert (
        result == response_data
    ), "client.execute returned a different value from what it received from the server"
    assert (
        len(events) == 0
    ), "the sdk captured an event, even though the query was successful"


def test_real_gql_request_with_error_no_pii(sentry_init, capture_events):
    """
    Integration test verifying that the GQLIntegration works as expected with query resulting
    in a GraphQL error, and that PII is not sent.
    """
    sentry_init(integrations=[GQLIntegration()])

    event = _make_erroneous_query(capture_events)

    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_real_gql_request_with_error_with_pii(sentry_init, capture_events):
    """
    Integration test verifying that the GQLIntegration works as expected with query resulting
    in a GraphQL error, and that PII is not sent.
    """
    sentry_init(integrations=[GQLIntegration()], send_default_pii=True)

    event = _make_erroneous_query(capture_events)

    assert "data" in event["request"]
    assert "response" in event["contexts"]
sentry-python-1.39.2/tests/integrations/graphene/000077500000000000000000000000001454744723200221265ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/graphene/__init__.py000066400000000000000000000001531454744723200242360ustar00rootroot00000000000000import pytest

pytest.importorskip("graphene")
pytest.importorskip("fastapi")
pytest.importorskip("flask")
sentry-python-1.39.2/tests/integrations/graphene/test_graphene_py3.py000066400000000000000000000130371454744723200261270ustar00rootroot00000000000000from fastapi import FastAPI, Request
from fastapi.testclient import TestClient
from flask import Flask, request, jsonify
from graphene import ObjectType, String, Schema

from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.graphene import GrapheneIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration


class Query(ObjectType):
    hello = String(first_name=String(default_value="stranger"))
    goodbye = String()

    def resolve_hello(root, info, first_name):  # noqa: N805
        return "Hello {}!".format(first_name)

    def resolve_goodbye(root, info):  # noqa: N805
        raise RuntimeError("oh no!")


def test_capture_request_if_available_and_send_pii_is_on_async(
    sentry_init, capture_events
):
    sentry_init(
        send_default_pii=True,
        integrations=[
            GrapheneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = Schema(query=Query)

    async_app = FastAPI()

    @async_app.post("/graphql")
    async def graphql_server_async(request: Request):
        data = await request.json()
        result = await schema.execute_async(data["query"])
        return result.data

    query = {"query": "query ErrorQuery {goodbye}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_capture_request_if_available_and_send_pii_is_on_sync(
    sentry_init, capture_events
):
    sentry_init(
        send_default_pii=True,
        integrations=[GrapheneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = Schema(query=Query)

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server_sync():
        data = request.get_json()
        result = schema.execute(data["query"])
        return jsonify(result.data), 200

    query = {"query": "query ErrorQuery {goodbye}"}
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_do_not_capture_request_if_send_pii_is_off_async(sentry_init, capture_events):
    sentry_init(
        integrations=[
            GrapheneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = Schema(query=Query)

    async_app = FastAPI()

    @async_app.post("/graphql")
    async def graphql_server_async(request: Request):
        data = await request.json()
        result = await schema.execute_async(data["query"])
        return result.data

    query = {"query": "query ErrorQuery {goodbye}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_do_not_capture_request_if_send_pii_is_off_sync(sentry_init, capture_events):
    sentry_init(
        integrations=[GrapheneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = Schema(query=Query)

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server_sync():
        data = request.get_json()
        result = schema.execute(data["query"])
        return jsonify(result.data), 200

    query = {"query": "query ErrorQuery {goodbye}"}
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_no_event_if_no_errors_async(sentry_init, capture_events):
    sentry_init(
        integrations=[
            GrapheneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = Schema(query=Query)

    async_app = FastAPI()

    @async_app.post("/graphql")
    async def graphql_server_async(request: Request):
        data = await request.json()
        result = await schema.execute_async(data["query"])
        return result.data

    query = {
        "query": "query GreetingQuery { hello }",
    }
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 0


def test_no_event_if_no_errors_sync(sentry_init, capture_events):
    sentry_init(
        integrations=[
            GrapheneIntegration(),
            FlaskIntegration(),
        ],
    )
    events = capture_events()

    schema = Schema(query=Query)

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server_sync():
        data = request.get_json()
        result = schema.execute(data["query"])
        return jsonify(result.data), 200

    query = {
        "query": "query GreetingQuery { hello }",
    }
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 0
sentry-python-1.39.2/tests/integrations/grpc/000077500000000000000000000000001454744723200212705ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/grpc/__init__.py000066400000000000000000000002611454744723200234000ustar00rootroot00000000000000import sys
from pathlib import Path

import pytest

# For imports inside gRPC autogenerated code to work
sys.path.append(str(Path(__file__).parent))
pytest.importorskip("grpc")
sentry-python-1.39.2/tests/integrations/grpc/compile_test_services.sh000077500000000000000000000006231454744723200262220ustar00rootroot00000000000000#!/usr/bin/env bash

# Run this script from the project root to generate the python code

TARGET_PATH=./tests/integrations/grpc

# Create python file
python -m grpc_tools.protoc \
    --proto_path=$TARGET_PATH/protos/ \
    --python_out=$TARGET_PATH/ \
    --pyi_out=$TARGET_PATH/ \
    --grpc_python_out=$TARGET_PATH/ \
    $TARGET_PATH/protos/grpc_test_service.proto

echo Code generation successfull
sentry-python-1.39.2/tests/integrations/grpc/grpc_test_service_pb2.py000066400000000000000000000031101454744723200261120ustar00rootroot00000000000000# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler.  DO NOT EDIT!
# source: grpc_test_service.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)

_sym_db = _symbol_database.Default()




DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17grpc_test_service.proto\x12\x10grpc_test_server\"\x1f\n\x0fgRPCTestMessage\x12\x0c\n\x04text\x18\x01 \x01(\t2\xf8\x02\n\x0fgRPCTestService\x12Q\n\tTestServe\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage\x12Y\n\x0fTestUnaryStream\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage0\x01\x12\\\n\x10TestStreamStream\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage(\x01\x30\x01\x12Y\n\x0fTestStreamUnary\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage(\x01\x62\x06proto3')

_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'grpc_test_service_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
  DESCRIPTOR._options = None
  _globals['_GRPCTESTMESSAGE']._serialized_start=45
  _globals['_GRPCTESTMESSAGE']._serialized_end=76
  _globals['_GRPCTESTSERVICE']._serialized_start=79
  _globals['_GRPCTESTSERVICE']._serialized_end=455
# @@protoc_insertion_point(module_scope)
sentry-python-1.39.2/tests/integrations/grpc/grpc_test_service_pb2.pyi000066400000000000000000000006051454744723200262710ustar00rootroot00000000000000from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Optional as _Optional

DESCRIPTOR: _descriptor.FileDescriptor

class gRPCTestMessage(_message.Message):
    __slots__ = ["text"]
    TEXT_FIELD_NUMBER: _ClassVar[int]
    text: str
    def __init__(self, text: _Optional[str] = ...) -> None: ...
sentry-python-1.39.2/tests/integrations/grpc/grpc_test_service_pb2_grpc.py000066400000000000000000000166051454744723200271420ustar00rootroot00000000000000# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc

import grpc_test_service_pb2 as grpc__test__service__pb2


class gRPCTestServiceStub(object):
    """Missing associated documentation comment in .proto file."""

    def __init__(self, channel):
        """Constructor.

        Args:
            channel: A grpc.Channel.
        """
        self.TestServe = channel.unary_unary(
                '/grpc_test_server.gRPCTestService/TestServe',
                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                )
        self.TestUnaryStream = channel.unary_stream(
                '/grpc_test_server.gRPCTestService/TestUnaryStream',
                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                )
        self.TestStreamStream = channel.stream_stream(
                '/grpc_test_server.gRPCTestService/TestStreamStream',
                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                )
        self.TestStreamUnary = channel.stream_unary(
                '/grpc_test_server.gRPCTestService/TestStreamUnary',
                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                )


class gRPCTestServiceServicer(object):
    """Missing associated documentation comment in .proto file."""

    def TestServe(self, request, context):
        """Missing associated documentation comment in .proto file."""
        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
        context.set_details('Method not implemented!')
        raise NotImplementedError('Method not implemented!')

    def TestUnaryStream(self, request, context):
        """Missing associated documentation comment in .proto file."""
        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
        context.set_details('Method not implemented!')
        raise NotImplementedError('Method not implemented!')

    def TestStreamStream(self, request_iterator, context):
        """Missing associated documentation comment in .proto file."""
        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
        context.set_details('Method not implemented!')
        raise NotImplementedError('Method not implemented!')

    def TestStreamUnary(self, request_iterator, context):
        """Missing associated documentation comment in .proto file."""
        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
        context.set_details('Method not implemented!')
        raise NotImplementedError('Method not implemented!')


def add_gRPCTestServiceServicer_to_server(servicer, server):
    rpc_method_handlers = {
            'TestServe': grpc.unary_unary_rpc_method_handler(
                    servicer.TestServe,
                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            ),
            'TestUnaryStream': grpc.unary_stream_rpc_method_handler(
                    servicer.TestUnaryStream,
                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            ),
            'TestStreamStream': grpc.stream_stream_rpc_method_handler(
                    servicer.TestStreamStream,
                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            ),
            'TestStreamUnary': grpc.stream_unary_rpc_method_handler(
                    servicer.TestStreamUnary,
                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            ),
    }
    generic_handler = grpc.method_handlers_generic_handler(
            'grpc_test_server.gRPCTestService', rpc_method_handlers)
    server.add_generic_rpc_handlers((generic_handler,))


 # This class is part of an EXPERIMENTAL API.
class gRPCTestService(object):
    """Missing associated documentation comment in .proto file."""

    @staticmethod
    def TestServe(request,
            target,
            options=(),
            channel_credentials=None,
            call_credentials=None,
            insecure=False,
            compression=None,
            wait_for_ready=None,
            timeout=None,
            metadata=None):
        return grpc.experimental.unary_unary(request, target, '/grpc_test_server.gRPCTestService/TestServe',
            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            grpc__test__service__pb2.gRPCTestMessage.FromString,
            options, channel_credentials,
            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

    @staticmethod
    def TestUnaryStream(request,
            target,
            options=(),
            channel_credentials=None,
            call_credentials=None,
            insecure=False,
            compression=None,
            wait_for_ready=None,
            timeout=None,
            metadata=None):
        return grpc.experimental.unary_stream(request, target, '/grpc_test_server.gRPCTestService/TestUnaryStream',
            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            grpc__test__service__pb2.gRPCTestMessage.FromString,
            options, channel_credentials,
            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

    @staticmethod
    def TestStreamStream(request_iterator,
            target,
            options=(),
            channel_credentials=None,
            call_credentials=None,
            insecure=False,
            compression=None,
            wait_for_ready=None,
            timeout=None,
            metadata=None):
        return grpc.experimental.stream_stream(request_iterator, target, '/grpc_test_server.gRPCTestService/TestStreamStream',
            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            grpc__test__service__pb2.gRPCTestMessage.FromString,
            options, channel_credentials,
            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

    @staticmethod
    def TestStreamUnary(request_iterator,
            target,
            options=(),
            channel_credentials=None,
            call_credentials=None,
            insecure=False,
            compression=None,
            wait_for_ready=None,
            timeout=None,
            metadata=None):
        return grpc.experimental.stream_unary(request_iterator, target, '/grpc_test_server.gRPCTestService/TestStreamUnary',
            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            grpc__test__service__pb2.gRPCTestMessage.FromString,
            options, channel_credentials,
            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
sentry-python-1.39.2/tests/integrations/grpc/protos/000077500000000000000000000000001454744723200226165ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/grpc/protos/grpc_test_service.proto000066400000000000000000000006311454744723200274150ustar00rootroot00000000000000syntax = "proto3";

package grpc_test_server;

service gRPCTestService{
  rpc TestServe(gRPCTestMessage) returns (gRPCTestMessage);
  rpc TestUnaryStream(gRPCTestMessage) returns (stream gRPCTestMessage);
  rpc TestStreamStream(stream gRPCTestMessage) returns (stream gRPCTestMessage);
  rpc TestStreamUnary(stream gRPCTestMessage) returns (gRPCTestMessage);
}

message gRPCTestMessage {
  string text = 1;
}
sentry-python-1.39.2/tests/integrations/grpc/test_grpc.py000066400000000000000000000242341454744723200236410ustar00rootroot00000000000000from __future__ import absolute_import

import os
from typing import List, Optional
from concurrent import futures
from unittest.mock import Mock

import grpc
import pytest

from sentry_sdk import Hub, start_transaction
from sentry_sdk.consts import OP
from sentry_sdk.integrations.grpc import GRPCIntegration
from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
    gRPCTestServiceServicer,
    add_gRPCTestServiceServicer_to_server,
    gRPCTestServiceStub,
)

PORT = 50051
PORT += os.getpid() % 100  # avoid port conflicts when running tests in parallel


@pytest.mark.forked
def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    events.write_file.close()
    event = events.read_event()
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert span["op"] == "test"


@pytest.mark.forked
def test_grpc_server_other_interceptors(sentry_init, capture_events_forksafe):
    """Ensure compatibility with additional server interceptors."""
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()
    mock_intercept = lambda continuation, handler_call_details: continuation(
        handler_call_details
    )
    mock_interceptor = Mock()
    mock_interceptor.intercept_service.side_effect = mock_intercept

    server = _set_up(interceptors=[mock_interceptor])

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    mock_interceptor.intercept_service.assert_called_once()

    events.write_file.close()
    event = events.read_event()
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert span["op"] == "test"


@pytest.mark.forked
def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction() as transaction:
            metadata = (
                (
                    "baggage",
                    "sentry-trace_id={trace_id},sentry-environment=test,"
                    "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
                        trace_id=transaction.trace_id
                    ),
                ),
                (
                    "sentry-trace",
                    "{trace_id}-{parent_span_id}-{sampled}".format(
                        trace_id=transaction.trace_id,
                        parent_span_id=transaction.span_id,
                        sampled=1,
                    ),
                ),
            )
            stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)

    _tear_down(server=server)

    events.write_file.close()
    event = events.read_event()
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert span["op"] == "test"


@pytest.mark.forked
def test_grpc_client_starts_span(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction():
            stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    events.write_file.close()
    events.read_event()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
    )
    assert span["data"] == {
        "type": "unary unary",
        "method": "/grpc_test_server.gRPCTestService/TestServe",
        "code": "OK",
    }


@pytest.mark.forked
def test_grpc_client_unary_stream_starts_span(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction():
            [el for el in stub.TestUnaryStream(gRPCTestMessage(text="test"))]

    _tear_down(server=server)

    events.write_file.close()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream"
    )
    assert span["data"] == {
        "type": "unary stream",
        "method": "/grpc_test_server.gRPCTestService/TestUnaryStream",
    }


# using unittest.mock.Mock not possible because grpc verifies
# that the interceptor is of the correct type
class MockClientInterceptor(grpc.UnaryUnaryClientInterceptor):
    call_counter = 0

    def intercept_unary_unary(self, continuation, client_call_details, request):
        self.__class__.call_counter += 1
        return continuation(client_call_details, request)


@pytest.mark.forked
def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe):
    """Ensure compatibility with additional client interceptors."""
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        channel = grpc.intercept_channel(channel, MockClientInterceptor())
        stub = gRPCTestServiceStub(channel)

        with start_transaction():
            stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    assert MockClientInterceptor.call_counter == 1

    events.write_file.close()
    events.read_event()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
    )
    assert span["data"] == {
        "type": "unary unary",
        "method": "/grpc_test_server.gRPCTestService/TestServe",
        "code": "OK",
    }


@pytest.mark.forked
def test_grpc_client_and_servers_interceptors_integration(
    sentry_init, capture_events_forksafe
):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction():
            stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    events.write_file.close()
    server_transaction = events.read_event()
    local_transaction = events.read_event()

    assert (
        server_transaction["contexts"]["trace"]["trace_id"]
        == local_transaction["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.forked
def test_stream_stream(sentry_init):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    _set_up()
    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        response_iterator = stub.TestStreamStream(iter((gRPCTestMessage(text="test"),)))
        for response in response_iterator:
            assert response.text == "test"


def test_stream_unary(sentry_init):
    """Test to verify stream-stream works.
    Tracing not supported for it yet.
    """
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    _set_up()
    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        response = stub.TestStreamUnary(iter((gRPCTestMessage(text="test"),)))
        assert response.text == "test"


def _set_up(interceptors: Optional[List[grpc.ServerInterceptor]] = None):
    server = grpc.server(
        futures.ThreadPoolExecutor(max_workers=2),
        interceptors=interceptors,
    )

    add_gRPCTestServiceServicer_to_server(TestService(), server)
    server.add_insecure_port("[::]:{}".format(PORT))
    server.start()

    return server


def _tear_down(server: grpc.Server):
    server.stop(None)


def _find_name(request):
    return request.__class__


class TestService(gRPCTestServiceServicer):
    events = []

    @staticmethod
    def TestServe(request, context):  # noqa: N802
        hub = Hub.current
        with hub.start_span(op="test", description="test"):
            pass

        return gRPCTestMessage(text=request.text)

    @staticmethod
    def TestUnaryStream(request, context):  # noqa: N802
        for _ in range(3):
            yield gRPCTestMessage(text=request.text)

    @staticmethod
    def TestStreamStream(request, context):  # noqa: N802
        for r in request:
            yield r

    @staticmethod
    def TestStreamUnary(request, context):  # noqa: N802
        requests = [r for r in request]
        return requests.pop()
sentry-python-1.39.2/tests/integrations/grpc/test_grpc_aio.py000066400000000000000000000176031454744723200244730ustar00rootroot00000000000000from __future__ import absolute_import

import asyncio
import os

import grpc
import pytest
import pytest_asyncio
import sentry_sdk

from sentry_sdk import Hub, start_transaction
from sentry_sdk.consts import OP
from sentry_sdk.integrations.grpc import GRPCIntegration
from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
    gRPCTestServiceServicer,
    add_gRPCTestServiceServicer_to_server,
    gRPCTestServiceStub,
)

AIO_PORT = 50052
AIO_PORT += os.getpid() % 100  # avoid port conflicts when running tests in parallel


@pytest.fixture(scope="function")
def event_loop(request):
    """Create an instance of the default event loop for each test case."""
    loop = asyncio.new_event_loop()
    yield loop
    loop.close()


@pytest_asyncio.fixture(scope="function")
async def grpc_server(sentry_init, event_loop):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    server = grpc.aio.server()
    server.add_insecure_port("[::]:{}".format(AIO_PORT))
    add_gRPCTestServiceServicer_to_server(TestService, server)

    await event_loop.create_task(server.start())

    try:
        yield server
    finally:
        await server.stop(None)


@pytest.mark.asyncio
async def test_grpc_server_starts_transaction(capture_events, grpc_server):
    events = capture_events()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        await stub.TestServe(gRPCTestMessage(text="test"))

    (event,) = events
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert span["op"] == "test"


@pytest.mark.asyncio
async def test_grpc_server_continues_transaction(capture_events, grpc_server):
    events = capture_events()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with sentry_sdk.start_transaction() as transaction:
            metadata = (
                (
                    "baggage",
                    "sentry-trace_id={trace_id},sentry-environment=test,"
                    "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
                        trace_id=transaction.trace_id
                    ),
                ),
                (
                    "sentry-trace",
                    "{trace_id}-{parent_span_id}-{sampled}".format(
                        trace_id=transaction.trace_id,
                        parent_span_id=transaction.span_id,
                        sampled=1,
                    ),
                ),
            )

            await stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)

    (event, _) = events
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert span["op"] == "test"


@pytest.mark.asyncio
async def test_grpc_server_exception(capture_events, grpc_server):
    events = capture_events()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        try:
            await stub.TestServe(gRPCTestMessage(text="exception"))
            raise AssertionError()
        except Exception:
            pass

    (event, _) = events

    assert event["exception"]["values"][0]["type"] == "TestService.TestException"
    assert event["exception"]["values"][0]["value"] == "test"
    assert event["exception"]["values"][0]["mechanism"]["handled"] is False
    assert event["exception"]["values"][0]["mechanism"]["type"] == "grpc"


@pytest.mark.asyncio
async def test_grpc_server_abort(capture_events, grpc_server):
    events = capture_events()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        try:
            await stub.TestServe(gRPCTestMessage(text="abort"))
            raise AssertionError()
        except Exception:
            pass

    assert len(events) == 1


@pytest.mark.asyncio
async def test_grpc_client_starts_span(
    grpc_server, sentry_init, capture_events_forksafe
):
    events = capture_events_forksafe()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        with start_transaction():
            await stub.TestServe(gRPCTestMessage(text="test"))

    events.write_file.close()
    events.read_event()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
    )
    assert span["data"] == {
        "type": "unary unary",
        "method": "/grpc_test_server.gRPCTestService/TestServe",
        "code": "OK",
    }


@pytest.mark.asyncio
async def test_grpc_client_unary_stream_starts_span(
    grpc_server, capture_events_forksafe
):
    events = capture_events_forksafe()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        with start_transaction():
            response = stub.TestUnaryStream(gRPCTestMessage(text="test"))
            [_ async for _ in response]

    events.write_file.close()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream"
    )
    assert span["data"] == {
        "type": "unary stream",
        "method": "/grpc_test_server.gRPCTestService/TestUnaryStream",
    }


@pytest.mark.asyncio
async def test_stream_stream(grpc_server):
    """Test to verify stream-stream works.
    Tracing not supported for it yet.
    """
    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        response = stub.TestStreamStream((gRPCTestMessage(text="test"),))
        async for r in response:
            assert r.text == "test"


@pytest.mark.asyncio
async def test_stream_unary(grpc_server):
    """Test to verify stream-stream works.
    Tracing not supported for it yet.
    """
    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        response = await stub.TestStreamUnary((gRPCTestMessage(text="test"),))
        assert response.text == "test"


class TestService(gRPCTestServiceServicer):
    class TestException(Exception):
        def __init__(self):
            super().__init__("test")

    @classmethod
    async def TestServe(cls, request, context):  # noqa: N802
        hub = Hub.current
        with hub.start_span(op="test", description="test"):
            pass

        if request.text == "exception":
            raise cls.TestException()

        if request.text == "abort":
            await context.abort(grpc.StatusCode.ABORTED)

        return gRPCTestMessage(text=request.text)

    @classmethod
    async def TestUnaryStream(cls, request, context):  # noqa: N802
        for _ in range(3):
            yield gRPCTestMessage(text=request.text)

    @classmethod
    async def TestStreamStream(cls, request, context):  # noqa: N802
        async for r in request:
            yield r

    @classmethod
    async def TestStreamUnary(cls, request, context):  # noqa: N802
        requests = [r async for r in request]
        return requests.pop()
sentry-python-1.39.2/tests/integrations/httpx/000077500000000000000000000000001454744723200215045ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/httpx/__init__.py000066400000000000000000000000541454744723200236140ustar00rootroot00000000000000import pytest

pytest.importorskip("httpx")
sentry-python-1.39.2/tests/integrations/httpx/test_httpx.py000066400000000000000000000206111454744723200242640ustar00rootroot00000000000000import asyncio

import pytest
import httpx
import responses

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import MATCH_ALL, SPANDATA
from sentry_sdk.integrations.httpx import HttpxIntegration

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


@pytest.mark.parametrize(
    "httpx_client",
    (httpx.Client(), httpx.AsyncClient()),
)
def test_crumb_capture_and_hint(sentry_init, capture_events, httpx_client):
    def before_breadcrumb(crumb, hint):
        crumb["data"]["extra"] = "foo"
        return crumb

    sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb)

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    with start_transaction():
        events = capture_events()

        if asyncio.iscoroutinefunction(httpx_client.get):
            response = asyncio.get_event_loop().run_until_complete(
                httpx_client.get(url)
            )
        else:
            response = httpx_client.get(url)

        assert response.status_code == 200
        capture_message("Testing!")

        (event,) = events

        crumb = event["breadcrumbs"]["values"][0]
        assert crumb["type"] == "http"
        assert crumb["category"] == "httplib"
        assert crumb["data"] == {
            "url": url,
            SPANDATA.HTTP_METHOD: "GET",
            SPANDATA.HTTP_FRAGMENT: "",
            SPANDATA.HTTP_QUERY: "",
            SPANDATA.HTTP_STATUS_CODE: 200,
            "reason": "OK",
            "extra": "foo",
        }


@pytest.mark.parametrize(
    "httpx_client",
    (httpx.Client(), httpx.AsyncClient()),
)
def test_outgoing_trace_headers(sentry_init, httpx_client):
    sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()])

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    with start_transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="01234567890123456789012345678901",
    ) as transaction:
        if asyncio.iscoroutinefunction(httpx_client.get):
            response = asyncio.get_event_loop().run_until_complete(
                httpx_client.get(url)
            )
        else:
            response = httpx_client.get(url)

        request_span = transaction._span_recorder.spans[-1]
        assert response.request.headers[
            "sentry-trace"
        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )


@pytest.mark.parametrize(
    "httpx_client",
    (httpx.Client(), httpx.AsyncClient()),
)
def test_outgoing_trace_headers_append_to_baggage(sentry_init, httpx_client):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[HttpxIntegration()],
        release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
    )

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    with start_transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="01234567890123456789012345678901",
    ) as transaction:
        if asyncio.iscoroutinefunction(httpx_client.get):
            response = asyncio.get_event_loop().run_until_complete(
                httpx_client.get(url, headers={"baGGage": "custom=data"})
            )
        else:
            response = httpx_client.get(url, headers={"baGGage": "custom=data"})

        request_span = transaction._span_recorder.spans[-1]
        assert response.request.headers[
            "sentry-trace"
        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )
        assert (
            response.request.headers["baggage"]
            == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
        )


@pytest.mark.parametrize(
    "httpx_client,trace_propagation_targets,url,trace_propagated",
    [
        [
            httpx.Client(),
            None,
            "https://example.com/",
            False,
        ],
        [
            httpx.Client(),
            [],
            "https://example.com/",
            False,
        ],
        [
            httpx.Client(),
            [MATCH_ALL],
            "https://example.com/",
            True,
        ],
        [
            httpx.Client(),
            ["https://example.com/"],
            "https://example.com/",
            True,
        ],
        [
            httpx.Client(),
            ["https://example.com/"],
            "https://example.com",
            False,
        ],
        [
            httpx.Client(),
            ["https://example.com"],
            "https://example.com",
            True,
        ],
        [
            httpx.Client(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://example.net",
            False,
        ],
        [
            httpx.Client(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://good.example.net",
            True,
        ],
        [
            httpx.Client(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://good.example.net/some/thing",
            True,
        ],
        [
            httpx.AsyncClient(),
            None,
            "https://example.com/",
            False,
        ],
        [
            httpx.AsyncClient(),
            [],
            "https://example.com/",
            False,
        ],
        [
            httpx.AsyncClient(),
            [MATCH_ALL],
            "https://example.com/",
            True,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com/"],
            "https://example.com/",
            True,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com/"],
            "https://example.com",
            False,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com"],
            "https://example.com",
            True,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://example.net",
            False,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://good.example.net",
            True,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://good.example.net/some/thing",
            True,
        ],
    ],
)
def test_option_trace_propagation_targets(
    sentry_init,
    httpx_client,
    httpx_mock,  # this comes from pytest-httpx
    trace_propagation_targets,
    url,
    trace_propagated,
):
    httpx_mock.add_response()

    sentry_init(
        release="test",
        trace_propagation_targets=trace_propagation_targets,
        traces_sample_rate=1.0,
        integrations=[HttpxIntegration()],
    )

    if asyncio.iscoroutinefunction(httpx_client.get):
        asyncio.get_event_loop().run_until_complete(httpx_client.get(url))
    else:
        httpx_client.get(url)

    request_headers = httpx_mock.get_request().headers

    if trace_propagated:
        assert "sentry-trace" in request_headers
    else:
        assert "sentry-trace" not in request_headers


@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
    sentry_init(integrations=[HttpxIntegration()])

    httpx_client = httpx.Client()
    url = "http://example.com"
    responses.add(responses.GET, url, status=200)

    events = capture_events()
    with mock.patch(
        "sentry_sdk.integrations.httpx.parse_url",
        side_effect=ValueError,
    ):
        response = httpx_client.get(url)

    assert response.status_code == 200
    capture_message("Testing!")

    (event,) = events
    assert event["breadcrumbs"]["values"][0]["data"] == {
        SPANDATA.HTTP_METHOD: "GET",
        SPANDATA.HTTP_STATUS_CODE: 200,
        "reason": "OK",
        # no url related data
    }
sentry-python-1.39.2/tests/integrations/huey/000077500000000000000000000000001454744723200213075ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/huey/__init__.py000066400000000000000000000000531454744723200234160ustar00rootroot00000000000000import pytest

pytest.importorskip("huey")
sentry-python-1.39.2/tests/integrations/huey/test_huey.py000066400000000000000000000107631454744723200237010ustar00rootroot00000000000000import pytest
from decimal import DivisionByZero

from sentry_sdk import start_transaction
from sentry_sdk.integrations.huey import HueyIntegration
from sentry_sdk.utils import parse_version

from huey import __version__ as HUEY_VERSION
from huey.api import MemoryHuey, Result
from huey.exceptions import RetryTask


HUEY_VERSION = parse_version(HUEY_VERSION)


@pytest.fixture
def init_huey(sentry_init):
    def inner():
        sentry_init(
            integrations=[HueyIntegration()],
            traces_sample_rate=1.0,
            send_default_pii=True,
            debug=True,
        )

        return MemoryHuey(name="sentry_sdk")

    return inner


@pytest.fixture(autouse=True)
def flush_huey_tasks(init_huey):
    huey = init_huey()
    huey.flush()


def execute_huey_task(huey, func, *args, **kwargs):
    exceptions = kwargs.pop("exceptions", None)
    result = func(*args, **kwargs)
    task = huey.dequeue()
    if exceptions is not None:
        try:
            huey.execute(task)
        except exceptions:
            pass
    else:
        huey.execute(task)
    return result


def test_task_result(init_huey):
    huey = init_huey()

    @huey.task()
    def increase(num):
        return num + 1

    result = increase(3)

    assert isinstance(result, Result)
    assert len(huey) == 1
    task = huey.dequeue()
    assert huey.execute(task) == 4
    assert result.get() == 4


@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
def test_task_transaction(capture_events, init_huey, task_fails):
    huey = init_huey()

    @huey.task()
    def division(a, b):
        return a / b

    events = capture_events()
    execute_huey_task(
        huey, division, 1, int(not task_fails), exceptions=(DivisionByZero,)
    )

    if task_fails:
        error_event = events.pop(0)
        assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
        assert error_event["exception"]["values"][0]["mechanism"]["type"] == "huey"

    (event,) = events
    assert event["type"] == "transaction"
    assert event["transaction"] == "division"
    assert event["transaction_info"] == {"source": "task"}

    if task_fails:
        assert event["contexts"]["trace"]["status"] == "internal_error"
    else:
        assert event["contexts"]["trace"]["status"] == "ok"

    assert "huey_task_id" in event["tags"]
    assert "huey_task_retry" in event["tags"]


def test_task_retry(capture_events, init_huey):
    huey = init_huey()
    context = {"retry": True}

    @huey.task()
    def retry_task(context):
        if context["retry"]:
            context["retry"] = False
            raise RetryTask()

    events = capture_events()
    result = execute_huey_task(huey, retry_task, context)
    (event,) = events

    assert event["transaction"] == "retry_task"
    assert event["tags"]["huey_task_id"] == result.task.id
    assert len(huey) == 1

    task = huey.dequeue()
    huey.execute(task)
    (event, _) = events

    assert event["transaction"] == "retry_task"
    assert event["tags"]["huey_task_id"] == result.task.id
    assert len(huey) == 0


@pytest.mark.parametrize("lock_name", ["lock.a", "lock.b"], ids=["locked", "unlocked"])
@pytest.mark.skipif(HUEY_VERSION < (2, 5), reason="is_locked was added in 2.5")
def test_task_lock(capture_events, init_huey, lock_name):
    huey = init_huey()

    task_lock_name = "lock.a"
    should_be_locked = task_lock_name == lock_name

    @huey.task()
    @huey.lock_task(task_lock_name)
    def maybe_locked_task():
        pass

    events = capture_events()

    with huey.lock_task(lock_name):
        assert huey.is_locked(task_lock_name) == should_be_locked
        result = execute_huey_task(huey, maybe_locked_task)

    (event,) = events

    assert event["transaction"] == "maybe_locked_task"
    assert event["tags"]["huey_task_id"] == result.task.id
    assert (
        event["contexts"]["trace"]["status"] == "aborted" if should_be_locked else "ok"
    )
    assert len(huey) == 0


def test_huey_enqueue(init_huey, capture_events):
    huey = init_huey()

    @huey.task(name="different_task_name")
    def dummy_task():
        pass

    events = capture_events()

    with start_transaction() as transaction:
        dummy_task()

    (event,) = events

    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert event["contexts"]["trace"]["span_id"] == transaction.span_id

    assert len(event["spans"])
    assert event["spans"][0]["op"] == "queue.submit.huey"
    assert event["spans"][0]["description"] == "different_task_name"
sentry-python-1.39.2/tests/integrations/logging/000077500000000000000000000000001454744723200217635ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/logging/test_logging.py000066400000000000000000000154001454744723200250220ustar00rootroot00000000000000# coding: utf-8
import sys

import pytest
import logging
import warnings

from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger

other_logger = logging.getLogger("testfoo")
logger = logging.getLogger(__name__)


@pytest.fixture(autouse=True)
def reset_level():
    other_logger.setLevel(logging.DEBUG)
    logger.setLevel(logging.DEBUG)


@pytest.mark.parametrize("logger", [logger, other_logger])
def test_logging_works_with_many_loggers(sentry_init, capture_events, logger):
    sentry_init(integrations=[LoggingIntegration(event_level="ERROR")])
    events = capture_events()

    logger.info("bread")
    logger.critical("LOL")
    (event,) = events
    assert event["level"] == "fatal"
    assert not event["logentry"]["params"]
    assert event["logentry"]["message"] == "LOL"
    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])


@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]])
@pytest.mark.parametrize(
    "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}]
)
def test_logging_defaults(integrations, sentry_init, capture_events, kwargs):
    sentry_init(integrations=integrations)
    events = capture_events()

    logger.info("bread")
    logger.critical("LOL", **kwargs)
    (event,) = events

    assert event["level"] == "fatal"
    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])
    assert not any(
        crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"]
    )
    assert "threads" not in event


def test_logging_extra_data(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    logger.info("bread", extra=dict(foo=42))
    logger.critical("lol", extra=dict(bar=69))

    (event,) = events

    assert event["level"] == "fatal"
    assert event["extra"] == {"bar": 69}
    assert any(
        crumb["message"] == "bread" and crumb["data"] == {"foo": 42}
        for crumb in event["breadcrumbs"]["values"]
    )


def test_logging_extra_data_integer_keys(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    logger.critical("integer in extra keys", extra={1: 1})

    (event,) = events

    assert event["extra"] == {"1": 1}


@pytest.mark.xfail(sys.version_info[:2] == (3, 4), reason="buggy logging module")
def test_logging_stack(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    logger.error("first", exc_info=True)
    logger.error("second")

    (
        event_with,
        event_without,
    ) = events

    assert event_with["level"] == "error"
    assert event_with["threads"]["values"][0]["stacktrace"]["frames"]

    assert event_without["level"] == "error"
    assert "threads" not in event_without


def test_logging_level(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    logger.setLevel(logging.WARNING)
    logger.error("hi")
    (event,) = events
    assert event["level"] == "error"
    assert event["logentry"]["message"] == "hi"

    del events[:]

    logger.setLevel(logging.ERROR)
    logger.warning("hi")
    assert not events


def test_custom_log_level_names(sentry_init, capture_events):
    levels = {
        logging.DEBUG: "debug",
        logging.INFO: "info",
        logging.WARN: "warning",
        logging.WARNING: "warning",
        logging.ERROR: "error",
        logging.CRITICAL: "fatal",
        logging.FATAL: "fatal",
    }

    # set custom log level names
    # fmt: off
    logging.addLevelName(logging.DEBUG, u"custom level debüg: ")
    # fmt: on
    logging.addLevelName(logging.INFO, "")
    logging.addLevelName(logging.WARN, "custom level warn: ")
    logging.addLevelName(logging.WARNING, "custom level warning: ")
    logging.addLevelName(logging.ERROR, None)
    logging.addLevelName(logging.CRITICAL, "custom level critical: ")
    logging.addLevelName(logging.FATAL, "custom level 🔥: ")

    for logging_level, sentry_level in levels.items():
        logger.setLevel(logging_level)
        sentry_init(
            integrations=[LoggingIntegration(event_level=logging_level)],
            default_integrations=False,
        )
        events = capture_events()

        logger.log(logging_level, "Trying level %s", logging_level)
        assert events
        assert events[0]["level"] == sentry_level
        assert events[0]["logentry"]["message"] == "Trying level %s"
        assert events[0]["logentry"]["params"] == [logging_level]

        del events[:]


def test_logging_filters(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    should_log = False

    class MyFilter(logging.Filter):
        def filter(self, record):
            return should_log

    logger.addFilter(MyFilter())
    logger.error("hi")

    assert not events

    should_log = True
    logger.error("hi")

    (event,) = events
    assert event["logentry"]["message"] == "hi"


def test_logging_captured_warnings(sentry_init, capture_events, recwarn):
    sentry_init(
        integrations=[LoggingIntegration(event_level="WARNING")],
        default_integrations=False,
    )
    events = capture_events()

    logging.captureWarnings(True)
    warnings.warn("first", stacklevel=2)
    warnings.warn("second", stacklevel=2)
    logging.captureWarnings(False)

    warnings.warn("third", stacklevel=2)

    assert len(events) == 2

    assert events[0]["level"] == "warning"
    # Captured warnings start with the path where the warning was raised
    assert "UserWarning: first" in events[0]["logentry"]["message"]
    assert events[0]["logentry"]["params"] == []

    assert events[1]["level"] == "warning"
    assert "UserWarning: second" in events[1]["logentry"]["message"]
    assert events[1]["logentry"]["params"] == []

    # Using recwarn suppresses the "third" warning in the test output
    assert len(recwarn) == 1
    assert str(recwarn[0].message) == "third"


def test_ignore_logger(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    ignore_logger("testfoo")

    other_logger.error("hi")

    assert not events


def test_ignore_logger_wildcard(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    ignore_logger("testfoo.*")

    nested_logger = logging.getLogger("testfoo.submodule")

    logger.error("hi")

    nested_logger.error("bye")

    (event,) = events
    assert event["logentry"]["message"] == "hi"
sentry-python-1.39.2/tests/integrations/loguru/000077500000000000000000000000001454744723200216525ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/loguru/__init__.py000066400000000000000000000000551454744723200237630ustar00rootroot00000000000000import pytest

pytest.importorskip("loguru")
sentry-python-1.39.2/tests/integrations/loguru/test_loguru.py000066400000000000000000000063021454744723200246010ustar00rootroot00000000000000import pytest
from loguru import logger

import sentry_sdk
from sentry_sdk.integrations.loguru import LoguruIntegration, LoggingLevels

logger.remove(0)  # don't print to console


@pytest.mark.parametrize(
    "level,created_event",
    [
        # None - no breadcrumb
        # False - no event
        # True - event created
        (LoggingLevels.TRACE, None),
        (LoggingLevels.DEBUG, None),
        (LoggingLevels.INFO, False),
        (LoggingLevels.SUCCESS, False),
        (LoggingLevels.WARNING, False),
        (LoggingLevels.ERROR, True),
        (LoggingLevels.CRITICAL, True),
    ],
)
@pytest.mark.parametrize("disable_breadcrumbs", [True, False])
@pytest.mark.parametrize("disable_events", [True, False])
def test_just_log(
    sentry_init,
    capture_events,
    level,
    created_event,
    disable_breadcrumbs,
    disable_events,
):
    sentry_init(
        integrations=[
            LoguruIntegration(
                level=None if disable_breadcrumbs else LoggingLevels.INFO.value,
                event_level=None if disable_events else LoggingLevels.ERROR.value,
            )
        ],
        default_integrations=False,
    )
    events = capture_events()

    getattr(logger, level.name.lower())("test")

    formatted_message = (
        " | "
        + "{:9}".format(level.name.upper())
        + "| tests.integrations.loguru.test_loguru:test_just_log:46 - test"
    )

    if not created_event:
        assert not events

        breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
        if (
            not disable_breadcrumbs and created_event is not None
        ):  # not None == not TRACE or DEBUG level
            (breadcrumb,) = breadcrumbs
            assert breadcrumb["level"] == level.name.lower()
            assert breadcrumb["category"] == "tests.integrations.loguru.test_loguru"
            assert breadcrumb["message"][23:] == formatted_message
        else:
            assert not breadcrumbs

        return

    if disable_events:
        assert not events
        return

    (event,) = events
    assert event["level"] == (level.name.lower())
    assert event["logger"] == "tests.integrations.loguru.test_loguru"
    assert event["logentry"]["message"][23:] == formatted_message


def test_breadcrumb_format(sentry_init, capture_events):
    sentry_init(
        integrations=[
            LoguruIntegration(
                level=LoggingLevels.INFO.value,
                event_level=None,
                breadcrumb_format="{message}",
            )
        ],
        default_integrations=False,
    )

    logger.info("test")
    formatted_message = "test"

    breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
    (breadcrumb,) = breadcrumbs
    assert breadcrumb["message"] == formatted_message


def test_event_format(sentry_init, capture_events):
    sentry_init(
        integrations=[
            LoguruIntegration(
                level=None,
                event_level=LoggingLevels.ERROR.value,
                event_format="{message}",
            )
        ],
        default_integrations=False,
    )
    events = capture_events()

    logger.error("test")
    formatted_message = "test"

    (event,) = events
    assert event["logentry"]["message"] == formatted_message
sentry-python-1.39.2/tests/integrations/modules/000077500000000000000000000000001454744723200220055ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/modules/test_modules.py000066400000000000000000000005571454744723200250750ustar00rootroot00000000000000import sentry_sdk

from sentry_sdk.integrations.modules import ModulesIntegration


def test_basic(sentry_init, capture_events):
    sentry_init(integrations=[ModulesIntegration()])
    events = capture_events()

    sentry_sdk.capture_exception(ValueError())

    (event,) = events
    assert "sentry-sdk" in event["modules"]
    assert "pytest" in event["modules"]
sentry-python-1.39.2/tests/integrations/opentelemetry/000077500000000000000000000000001454744723200232315ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/opentelemetry/__init__.py000066400000000000000000000000641454744723200253420ustar00rootroot00000000000000import pytest

pytest.importorskip("opentelemetry")
sentry-python-1.39.2/tests/integrations/opentelemetry/test_experimental.py000066400000000000000000000017411454744723200273420ustar00rootroot00000000000000try:
    # python 3.3 and above
    from unittest.mock import MagicMock
except ImportError:
    # python < 3.3
    from mock import MagicMock

from sentry_sdk.integrations.opentelemetry.integration import OpenTelemetryIntegration


def test_integration_enabled_if_option_is_on(sentry_init):
    OpenTelemetryIntegration.setup_once = MagicMock()
    sentry_init(
        _experiments={
            "otel_powered_performance": True,
        }
    )
    OpenTelemetryIntegration.setup_once.assert_called_once()


def test_integration_not_enabled_if_option_is_off(sentry_init):
    OpenTelemetryIntegration.setup_once = MagicMock()
    sentry_init(
        _experiments={
            "otel_powered_performance": False,
        }
    )
    OpenTelemetryIntegration.setup_once.assert_not_called()


def test_integration_not_enabled_if_option_is_missing(sentry_init):
    OpenTelemetryIntegration.setup_once = MagicMock()
    sentry_init()
    OpenTelemetryIntegration.setup_once.assert_not_called()
sentry-python-1.39.2/tests/integrations/opentelemetry/test_propagator.py000066400000000000000000000174171454744723200270320ustar00rootroot00000000000000try:
    from unittest import mock  # python 3.3 and above
    from unittest.mock import MagicMock
except ImportError:
    import mock  # python < 3.3
    from mock import MagicMock

from opentelemetry.context import get_current
from opentelemetry.trace.propagation import get_current_span
from opentelemetry.trace import (
    set_span_in_context,
    TraceFlags,
    SpanContext,
)
from sentry_sdk.integrations.opentelemetry.consts import (
    SENTRY_BAGGAGE_KEY,
    SENTRY_TRACE_KEY,
)

from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
from sentry_sdk.tracing_utils import Baggage


def test_extract_no_context_no_sentry_trace_header():
    """
    No context and NO Sentry trace data in getter.
    Extract should return empty context.
    """
    carrier = None
    context = None
    getter = MagicMock()
    getter.get.return_value = None

    modified_context = SentryPropagator().extract(carrier, context, getter)

    assert modified_context == {}


def test_extract_context_no_sentry_trace_header():
    """
    Context but NO Sentry trace data in getter.
    Extract should return context as is.
    """
    carrier = None
    context = {"some": "value"}
    getter = MagicMock()
    getter.get.return_value = None

    modified_context = SentryPropagator().extract(carrier, context, getter)

    assert modified_context == context


def test_extract_empty_context_sentry_trace_header_no_baggage():
    """
    Empty context but Sentry trace data but NO Baggage in getter.
    Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id.
    """
    carrier = None
    context = {}
    getter = MagicMock()
    getter.get.side_effect = [
        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
        None,
    ]

    modified_context = SentryPropagator().extract(carrier, context, getter)

    assert len(modified_context.keys()) == 3

    assert modified_context[SENTRY_TRACE_KEY] == {
        "trace_id": "1234567890abcdef1234567890abcdef",
        "parent_span_id": "1234567890abcdef",
        "parent_sampled": True,
    }
    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ""

    span_context = get_current_span(modified_context).get_span_context()
    assert span_context.span_id == int("1234567890abcdef", 16)
    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)


def test_extract_context_sentry_trace_header_baggage():
    """
    Empty context but Sentry trace data and Baggage in getter.
    Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id.
    """
    baggage_header = (
        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
    )

    carrier = None
    context = {"some": "value"}
    getter = MagicMock()
    getter.get.side_effect = [
        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
        [baggage_header],
    ]

    modified_context = SentryPropagator().extract(carrier, context, getter)

    assert len(modified_context.keys()) == 4

    assert modified_context[SENTRY_TRACE_KEY] == {
        "trace_id": "1234567890abcdef1234567890abcdef",
        "parent_span_id": "1234567890abcdef",
        "parent_sampled": True,
    }

    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == (
        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
    )

    span_context = get_current_span(modified_context).get_span_context()
    assert span_context.span_id == int("1234567890abcdef", 16)
    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)


def test_inject_empty_otel_span_map():
    """
    Empty otel_span_map.
    So there is no sentry_span to be found in inject()
    and the function is returned early and no setters are called.
    """
    carrier = None
    context = get_current()
    setter = MagicMock()
    setter.set = MagicMock()

    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        trace_flags=TraceFlags(TraceFlags.SAMPLED),
        is_remote=True,
    )
    span = MagicMock()
    span.get_span_context.return_value = span_context

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
        return_value=span,
    ):
        full_context = set_span_in_context(span, context)
        SentryPropagator().inject(carrier, full_context, setter)

        setter.set.assert_not_called()


def test_inject_sentry_span_no_baggage():
    """
    Inject a sentry span with no baggage.
    """
    carrier = None
    context = get_current()
    setter = MagicMock()
    setter.set = MagicMock()

    trace_id = "1234567890abcdef1234567890abcdef"
    span_id = "1234567890abcdef"

    span_context = SpanContext(
        trace_id=int(trace_id, 16),
        span_id=int(span_id, 16),
        trace_flags=TraceFlags(TraceFlags.SAMPLED),
        is_remote=True,
    )
    span = MagicMock()
    span.get_span_context.return_value = span_context

    sentry_span = MagicMock()
    sentry_span.to_traceparent = mock.Mock(
        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
    )
    sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None)

    span_processor = SentrySpanProcessor()
    span_processor.otel_span_map[span_id] = sentry_span

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
        return_value=span,
    ):
        full_context = set_span_in_context(span, context)
        SentryPropagator().inject(carrier, full_context, setter)

        setter.set.assert_called_once_with(
            carrier,
            "sentry-trace",
            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
        )


def test_inject_sentry_span_baggage():
    """
    Inject a sentry span with baggage.
    """
    carrier = None
    context = get_current()
    setter = MagicMock()
    setter.set = MagicMock()

    trace_id = "1234567890abcdef1234567890abcdef"
    span_id = "1234567890abcdef"

    span_context = SpanContext(
        trace_id=int(trace_id, 16),
        span_id=int(span_id, 16),
        trace_flags=TraceFlags(TraceFlags.SAMPLED),
        is_remote=True,
    )
    span = MagicMock()
    span.get_span_context.return_value = span_context

    sentry_span = MagicMock()
    sentry_span.to_traceparent = mock.Mock(
        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
    )
    sentry_items = {
        "sentry-trace_id": "771a43a4192642f0b136d5159a501700",
        "sentry-public_key": "49d0f7386ad645858ae85020e393bef3",
        "sentry-sample_rate": 0.01337,
        "sentry-user_id": "Amélie",
    }
    baggage = Baggage(sentry_items=sentry_items)
    sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage)

    span_processor = SentrySpanProcessor()
    span_processor.otel_span_map[span_id] = sentry_span

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
        return_value=span,
    ):
        full_context = set_span_in_context(span, context)
        SentryPropagator().inject(carrier, full_context, setter)

        setter.set.assert_any_call(
            carrier,
            "sentry-trace",
            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
        )

        setter.set.assert_any_call(
            carrier,
            "baggage",
            baggage.serialize(),
        )
sentry-python-1.39.2/tests/integrations/opentelemetry/test_span_processor.py000066400000000000000000000432431454744723200277100ustar00rootroot00000000000000from datetime import datetime
from datetime import timezone
import time
import pytest

try:
    from unittest import mock  # python 3.3 and above
    from unittest.mock import MagicMock
except ImportError:
    import mock
    from mock import MagicMock  # python < 3.3

from sentry_sdk.integrations.opentelemetry.span_processor import (
    SentrySpanProcessor,
    link_trace_context_to_error_event,
)
from sentry_sdk.tracing import Span, Transaction

from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode
from sentry_sdk.tracing_utils import extract_sentrytrace_data


def test_is_sentry_span():
    otel_span = MagicMock()

    hub = MagicMock()
    hub.client = None

    span_processor = SentrySpanProcessor()
    assert not span_processor._is_sentry_span(hub, otel_span)

    client = MagicMock()
    client.options = {"instrumenter": "otel"}
    client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"

    hub.client = client
    assert not span_processor._is_sentry_span(hub, otel_span)

    otel_span.attributes = {
        "http.url": "https://example.com",
    }
    assert not span_processor._is_sentry_span(hub, otel_span)

    otel_span.attributes = {
        "http.url": "https://o123456.ingest.sentry.io/api/123/envelope",
    }
    assert span_processor._is_sentry_span(hub, otel_span)


def test_get_otel_context():
    otel_span = MagicMock()
    otel_span.attributes = {"foo": "bar"}
    otel_span.resource = MagicMock()
    otel_span.resource.attributes = {"baz": "qux"}

    span_processor = SentrySpanProcessor()
    otel_context = span_processor._get_otel_context(otel_span)

    assert otel_context == {
        "attributes": {"foo": "bar"},
        "resource": {"baz": "qux"},
    }


def test_get_trace_data_with_span_and_trace():
    otel_span = MagicMock()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = None

    parent_context = {}

    span_processor = SentrySpanProcessor()
    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
    assert sentry_trace_data["span_id"] == "1234567890abcdef"
    assert sentry_trace_data["parent_span_id"] is None
    assert sentry_trace_data["parent_sampled"] is None
    assert sentry_trace_data["baggage"] is None


def test_get_trace_data_with_span_and_trace_and_parent():
    otel_span = MagicMock()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    span_processor = SentrySpanProcessor()
    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
    assert sentry_trace_data["span_id"] == "1234567890abcdef"
    assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
    assert sentry_trace_data["parent_sampled"] is None
    assert sentry_trace_data["baggage"] is None


def test_get_trace_data_with_sentry_trace():
    otel_span = MagicMock()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
        side_effect=[
            extract_sentrytrace_data(
                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
            ),
            None,
        ],
    ):
        span_processor = SentrySpanProcessor()
        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
        assert sentry_trace_data["span_id"] == "1234567890abcdef"
        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
        assert sentry_trace_data["parent_sampled"] is True
        assert sentry_trace_data["baggage"] is None

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
        side_effect=[
            extract_sentrytrace_data(
                "1234567890abcdef1234567890abcdef-1234567890abcdef-0"
            ),
            None,
        ],
    ):
        span_processor = SentrySpanProcessor()
        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
        assert sentry_trace_data["span_id"] == "1234567890abcdef"
        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
        assert sentry_trace_data["parent_sampled"] is False
        assert sentry_trace_data["baggage"] is None


def test_get_trace_data_with_sentry_trace_and_baggage():
    otel_span = MagicMock()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    baggage = (
        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
    )

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
        side_effect=[
            extract_sentrytrace_data(
                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
            ),
            baggage,
        ],
    ):
        span_processor = SentrySpanProcessor()
        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
        assert sentry_trace_data["span_id"] == "1234567890abcdef"
        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
        assert sentry_trace_data["parent_sampled"]
        assert sentry_trace_data["baggage"] == baggage


def test_update_span_with_otel_data_http_method():
    sentry_span = Span()

    otel_span = MagicMock()
    otel_span.name = "Test OTel Span"
    otel_span.kind = SpanKind.CLIENT
    otel_span.attributes = {
        "http.method": "GET",
        "http.status_code": 429,
        "http.status_text": "xxx",
        "http.user_agent": "curl/7.64.1",
        "net.peer.name": "example.com",
        "http.target": "/",
    }

    span_processor = SentrySpanProcessor()
    span_processor._update_span_with_otel_data(sentry_span, otel_span)

    assert sentry_span.op == "http.client"
    assert sentry_span.description == "GET example.com /"
    assert sentry_span.status == "resource_exhausted"

    assert sentry_span._data["http.method"] == "GET"
    assert sentry_span._data["http.response.status_code"] == 429
    assert sentry_span._data["http.status_text"] == "xxx"
    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
    assert sentry_span._data["net.peer.name"] == "example.com"
    assert sentry_span._data["http.target"] == "/"


@pytest.mark.parametrize(
    "otel_status, expected_status",
    [
        pytest.param(Status(StatusCode.UNSET), None, id="unset"),
        pytest.param(Status(StatusCode.OK), "ok", id="ok"),
        pytest.param(Status(StatusCode.ERROR), "internal_error", id="error"),
    ],
)
def test_update_span_with_otel_status(otel_status, expected_status):
    sentry_span = Span()

    otel_span = MagicMock()
    otel_span.name = "Test OTel Span"
    otel_span.kind = SpanKind.INTERNAL
    otel_span.status = otel_status

    span_processor = SentrySpanProcessor()
    span_processor._update_span_with_otel_status(sentry_span, otel_span)

    assert sentry_span.get_trace_context().get("status") == expected_status


def test_update_span_with_otel_data_http_method2():
    sentry_span = Span()

    otel_span = MagicMock()
    otel_span.name = "Test OTel Span"
    otel_span.kind = SpanKind.SERVER
    otel_span.attributes = {
        "http.method": "GET",
        "http.status_code": 429,
        "http.status_text": "xxx",
        "http.user_agent": "curl/7.64.1",
        "http.url": "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef",
    }

    span_processor = SentrySpanProcessor()
    span_processor._update_span_with_otel_data(sentry_span, otel_span)

    assert sentry_span.op == "http.server"
    assert sentry_span.description == "GET https://example.com/status/403"
    assert sentry_span.status == "resource_exhausted"

    assert sentry_span._data["http.method"] == "GET"
    assert sentry_span._data["http.response.status_code"] == 429
    assert sentry_span._data["http.status_text"] == "xxx"
    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
    assert (
        sentry_span._data["http.url"]
        == "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef"
    )


def test_update_span_with_otel_data_db_query():
    sentry_span = Span()

    otel_span = MagicMock()
    otel_span.name = "Test OTel Span"
    otel_span.attributes = {
        "db.system": "postgresql",
        "db.statement": "SELECT * FROM table where pwd = '123456'",
    }

    span_processor = SentrySpanProcessor()
    span_processor._update_span_with_otel_data(sentry_span, otel_span)

    assert sentry_span.op == "db"
    assert sentry_span.description == "SELECT * FROM table where pwd = '123456'"

    assert sentry_span._data["db.system"] == "postgresql"
    assert (
        sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'"
    )


def test_on_start_transaction():
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.start_time = time.time_ns()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}
    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"

    current_hub = MagicMock()
    current_hub.client = fake_client

    fake_hub = MagicMock()
    fake_hub.current = current_hub

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
    ):
        span_processor = SentrySpanProcessor()
        span_processor.on_start(otel_span, parent_context)

        fake_hub.current.start_transaction.assert_called_once_with(
            name="Sample OTel Span",
            span_id="1234567890abcdef",
            parent_span_id="abcdef1234567890",
            trace_id="1234567890abcdef1234567890abcdef",
            baggage=None,
            start_timestamp=datetime.fromtimestamp(
                otel_span.start_time / 1e9, timezone.utc
            ),
            instrumenter="otel",
        )

        assert len(span_processor.otel_span_map.keys()) == 1
        assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef"


def test_on_start_child():
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.start_time = time.time_ns()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}
    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"

    current_hub = MagicMock()
    current_hub.client = fake_client

    fake_hub = MagicMock()
    fake_hub.current = current_hub

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
    ):
        fake_span = MagicMock()

        span_processor = SentrySpanProcessor()
        span_processor.otel_span_map["abcdef1234567890"] = fake_span
        span_processor.on_start(otel_span, parent_context)

        fake_span.start_child.assert_called_once_with(
            span_id="1234567890abcdef",
            description="Sample OTel Span",
            start_timestamp=datetime.fromtimestamp(
                otel_span.start_time / 1e9, timezone.utc
            ),
            instrumenter="otel",
        )

        assert len(span_processor.otel_span_map.keys()) == 2
        assert "abcdef1234567890" in span_processor.otel_span_map.keys()
        assert "1234567890abcdef" in span_processor.otel_span_map.keys()


def test_on_end_no_sentry_span():
    """
    If on_end is called on a span that is not in the otel_span_map, it should be a no-op.
    """
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.end_time = time.time_ns()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context

    span_processor = SentrySpanProcessor()
    span_processor.otel_span_map = {}
    span_processor._get_otel_context = MagicMock()
    span_processor._update_span_with_otel_data = MagicMock()

    span_processor.on_end(otel_span)

    span_processor._get_otel_context.assert_not_called()
    span_processor._update_span_with_otel_data.assert_not_called()


def test_on_end_sentry_transaction():
    """
    Test on_end for a sentry Transaction.
    """
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.end_time = time.time_ns()
    otel_span.status = Status(StatusCode.OK)
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context

    fake_sentry_span = MagicMock(spec=Transaction)
    fake_sentry_span.set_context = MagicMock()
    fake_sentry_span.finish = MagicMock()

    span_processor = SentrySpanProcessor()
    span_processor._get_otel_context = MagicMock()
    span_processor._update_span_with_otel_data = MagicMock()
    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span

    span_processor.on_end(otel_span)

    fake_sentry_span.set_context.assert_called_once()
    span_processor._update_span_with_otel_data.assert_not_called()
    fake_sentry_span.set_status.assert_called_once_with("ok")
    fake_sentry_span.finish.assert_called_once()


def test_on_end_sentry_span():
    """
    Test on_end for a sentry Span.
    """
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.end_time = time.time_ns()
    otel_span.status = Status(StatusCode.OK)
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context

    fake_sentry_span = MagicMock(spec=Span)
    fake_sentry_span.set_context = MagicMock()
    fake_sentry_span.finish = MagicMock()

    span_processor = SentrySpanProcessor()
    span_processor._get_otel_context = MagicMock()
    span_processor._update_span_with_otel_data = MagicMock()
    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span

    span_processor.on_end(otel_span)

    fake_sentry_span.set_context.assert_not_called()
    span_processor._update_span_with_otel_data.assert_called_once_with(
        fake_sentry_span, otel_span
    )
    fake_sentry_span.set_status.assert_called_once_with("ok")
    fake_sentry_span.finish.assert_called_once()


def test_link_trace_context_to_error_event():
    """
    Test that the trace context is added to the error event.
    """
    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}

    current_hub = MagicMock()
    current_hub.client = fake_client

    fake_hub = MagicMock()
    fake_hub.current = current_hub

    span_id = "1234567890abcdef"
    trace_id = "1234567890abcdef1234567890abcdef"

    fake_trace_context = {
        "bla": "blub",
        "foo": "bar",
        "baz": 123,
    }

    sentry_span = MagicMock()
    sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context)

    otel_span_map = {
        span_id: sentry_span,
    }

    span_context = SpanContext(
        trace_id=int(trace_id, 16),
        span_id=int(span_id, 16),
        is_remote=True,
    )
    otel_span = MagicMock()
    otel_span.get_span_context = MagicMock(return_value=span_context)

    fake_event = {"event_id": "1234567890abcdef1234567890abcdef"}

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span",
        return_value=otel_span,
    ):
        event = link_trace_context_to_error_event(fake_event, otel_span_map)

        assert event
        assert event == fake_event  # the event is changed in place inside the function
        assert "contexts" in event
        assert "trace" in event["contexts"]
        assert event["contexts"]["trace"] == fake_trace_context
sentry-python-1.39.2/tests/integrations/pure_eval/000077500000000000000000000000001454744723200223175ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/pure_eval/__init__.py000066400000000000000000000000601454744723200244240ustar00rootroot00000000000000import pytest

pytest.importorskip("pure_eval")
sentry-python-1.39.2/tests/integrations/pure_eval/test_pure_eval.py000066400000000000000000000046151454744723200257200ustar00rootroot00000000000000import sys
from types import SimpleNamespace

import pytest

from sentry_sdk import capture_exception, serializer
from sentry_sdk.integrations.pure_eval import PureEvalIntegration


@pytest.mark.parametrize("integrations", [[], [PureEvalIntegration()]])
def test_include_local_variables_enabled(sentry_init, capture_events, integrations):
    sentry_init(include_local_variables=True, integrations=integrations)
    events = capture_events()

    def foo():
        namespace = SimpleNamespace()
        q = 1
        w = 2
        e = 3
        r = 4
        t = 5
        y = 6
        u = 7
        i = 8
        o = 9
        p = 10
        a = 11
        s = 12
        str((q, w, e, r, t, y, u, i, o, p, a, s))  # use variables for linter
        namespace.d = {1: 2}
        print(namespace.d[1] / 0)

        # Appearances of variables after the main statement don't affect order
        print(q)
        print(s)
        print(events)

    try:
        foo()
    except Exception:
        capture_exception()

    (event,) = events

    assert all(
        frame["vars"]
        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
    )

    frame_vars = event["exception"]["values"][0]["stacktrace"]["frames"][-1]["vars"]

    if integrations:
        # Values closest to the exception line appear first
        # Test this order if possible given the Python version and dict order
        expected_keys = [
            "namespace",
            "namespace.d",
            "namespace.d[1]",
            "s",
            "a",
            "p",
            "o",
            "i",
            "u",
            "y",
        ]
        if sys.version_info[:2] == (3, 5):
            assert frame_vars.keys() == set(expected_keys)
        else:
            assert list(frame_vars.keys()) == expected_keys
        assert frame_vars["namespace.d"] == {"1": "2"}
        assert frame_vars["namespace.d[1]"] == "2"
    else:
        # Without pure_eval, the variables are unpredictable.
        # In later versions, those at the top appear first and are thus included
        assert frame_vars.keys() <= {
            "namespace",
            "q",
            "w",
            "e",
            "r",
            "t",
            "y",
            "u",
            "i",
            "o",
            "p",
            "a",
            "s",
            "events",
        }
        assert len(frame_vars) == serializer.MAX_DATABAG_BREADTH
sentry-python-1.39.2/tests/integrations/pymongo/000077500000000000000000000000001454744723200220255ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/pymongo/__init__.py000066400000000000000000000000561454744723200241370ustar00rootroot00000000000000import pytest

pytest.importorskip("pymongo")
sentry-python-1.39.2/tests/integrations/pymongo/test_pymongo.py000066400000000000000000000341311454744723200251300ustar00rootroot00000000000000from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii

from mockupdb import MockupDB, OpQuery
from pymongo import MongoClient
import pytest


@pytest.fixture(scope="session")
def mongo_server():
    server = MockupDB(verbose=True)
    server.autoresponds("ismaster", maxWireVersion=6)
    server.run()
    server.autoresponds(
        {"find": "test_collection"}, cursor={"id": 123, "firstBatch": []}
    )
    # Find query changed somewhere between PyMongo 3.1 and 3.12.
    # This line is to respond to "find" queries sent by old PyMongo the same way it's done above.
    server.autoresponds(OpQuery({"foobar": 1}), cursor={"id": 123, "firstBatch": []})
    server.autoresponds({"insert": "test_collection"}, ok=1)
    server.autoresponds({"insert": "erroneous"}, ok=0, errmsg="test error")
    yield server
    server.stop()


@pytest.mark.parametrize("with_pii", [False, True])
def test_transactions(sentry_init, capture_events, mongo_server, with_pii):
    sentry_init(
        integrations=[PyMongoIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=with_pii,
    )
    events = capture_events()

    connection = MongoClient(mongo_server.uri)

    with start_transaction():
        list(
            connection["test_db"]["test_collection"].find({"foobar": 1})
        )  # force query execution
        connection["test_db"]["test_collection"].insert_one({"foo": 2})
        try:
            connection["test_db"]["erroneous"].insert_many([{"bar": 3}, {"baz": 4}])
            pytest.fail("Request should raise")
        except Exception:
            pass

    (event,) = events
    (find, insert_success, insert_fail) = event["spans"]

    common_tags = {
        "db.name": "test_db",
        "db.system": "mongodb",
        "net.peer.name": mongo_server.host,
        "net.peer.port": str(mongo_server.port),
    }
    for span in find, insert_success, insert_fail:
        assert span["data"][SPANDATA.DB_SYSTEM] == "mongodb"
        assert span["data"][SPANDATA.DB_NAME] == "test_db"
        assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
        assert span["data"][SPANDATA.SERVER_PORT] == mongo_server.port
        for field, value in common_tags.items():
            assert span["tags"][field] == value

    assert find["op"] == "db.query"
    assert insert_success["op"] == "db.query"
    assert insert_fail["op"] == "db.query"

    assert find["tags"]["db.operation"] == "find"
    assert insert_success["tags"]["db.operation"] == "insert"
    assert insert_fail["tags"]["db.operation"] == "insert"

    assert find["description"].startswith("find {")
    assert insert_success["description"].startswith("insert {")
    assert insert_fail["description"].startswith("insert {")
    if with_pii:
        assert "1" in find["description"]
        assert "2" in insert_success["description"]
        assert "3" in insert_fail["description"] and "4" in insert_fail["description"]
    else:
        # All values in filter replaced by "%s"
        assert "1" not in find["description"]
        # All keys below top level replaced by "%s"
        assert "2" not in insert_success["description"]
        assert (
            "3" not in insert_fail["description"]
            and "4" not in insert_fail["description"]
        )

    assert find["tags"]["status"] == "ok"
    assert insert_success["tags"]["status"] == "ok"
    assert insert_fail["tags"]["status"] == "internal_error"


@pytest.mark.parametrize("with_pii", [False, True])
def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii):
    sentry_init(
        integrations=[PyMongoIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=with_pii,
    )
    events = capture_events()

    connection = MongoClient(mongo_server.uri)

    list(
        connection["test_db"]["test_collection"].find({"foobar": 1})
    )  # force query execution
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb["category"] == "query"
    assert crumb["message"].startswith("find {")
    if with_pii:
        assert "1" in crumb["message"]
    else:
        assert "1" not in crumb["message"]
    assert crumb["type"] == "db.query"
    assert crumb["data"] == {
        "db.name": "test_db",
        "db.system": "mongodb",
        "db.operation": "find",
        "net.peer.name": mongo_server.host,
        "net.peer.port": str(mongo_server.port),
    }


@pytest.mark.parametrize(
    "testcase",
    [
        {
            "command": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {
                        "username": "anton2",
                        "email": "anton@somewhere.io",
                        "password": "c4e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
                        "_id": "635bc7403cb4f8a736f61cf2",
                    }
                ],
            },
            "command_stripped": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
                ],
            },
        },
        {
            "command": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {
                        "username": "indiana4",
                        "email": "indy@jones.org",
                        "password": "63e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf016b",
                        "_id": "635bc7403cb4f8a736f61cf3",
                    }
                ],
            },
            "command_stripped": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
                ],
            },
        },
        {
            "command": {
                "find": "my_collection",
                "filter": {},
                "limit": 1,
                "singleBatch": True,
            },
            "command_stripped": {
                "find": "my_collection",
                "filter": {},
                "limit": 1,
                "singleBatch": True,
            },
        },
        {
            "command": {
                "find": "my_collection",
                "filter": {"username": "notthere"},
                "limit": 1,
                "singleBatch": True,
            },
            "command_stripped": {
                "find": "my_collection",
                "filter": {"username": "%s"},
                "limit": 1,
                "singleBatch": True,
            },
        },
        {
            "command": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {
                        "username": "userx1",
                        "email": "x@somewhere.io",
                        "password": "ccc86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
                        "_id": "635bc7403cb4f8a736f61cf4",
                    },
                    {
                        "username": "userx2",
                        "email": "x@somewhere.io",
                        "password": "xxx86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
                        "_id": "635bc7403cb4f8a736f61cf5",
                    },
                ],
            },
            "command_stripped": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
                ],
            },
        },
        {
            "command": {
                "find": "my_collection",
                "filter": {"email": "ada@lovelace.com"},
            },
            "command_stripped": {"find": "my_collection", "filter": {"email": "%s"}},
        },
        {
            "command": {
                "aggregate": "my_collection",
                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
                "cursor": {},
            },
            "command_stripped": {
                "aggregate": "my_collection",
                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
                "cursor": "%s",
            },
        },
        {
            "command": {
                "aggregate": "my_collection",
                "pipeline": [
                    {"$match": {"email": "x@somewhere.io"}},
                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
                ],
                "cursor": {},
            },
            "command_stripped": {
                "aggregate": "my_collection",
                "pipeline": [
                    {"$match": {"email": "%s"}},
                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
                ],
                "cursor": "%s",
            },
        },
        {
            "command": {
                "createIndexes": "my_collection",
                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
            },
            "command_stripped": {
                "createIndexes": "my_collection",
                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
            },
        },
        {
            "command": {
                "update": "my_collection",
                "ordered": True,
                "updates": [
                    ("q", {"email": "anton@somewhere.io"}),
                    (
                        "u",
                        {
                            "email": "anton2@somwehre.io",
                            "extra_field": "extra_content",
                            "new": "bla",
                        },
                    ),
                    ("multi", False),
                    ("upsert", False),
                ],
            },
            "command_stripped": {
                "update": "my_collection",
                "ordered": True,
                "updates": "%s",
            },
        },
        {
            "command": {
                "update": "my_collection",
                "ordered": True,
                "updates": [
                    ("q", {"email": "anton2@somwehre.io"}),
                    ("u", {"$rename": {"new": "new_field"}}),
                    ("multi", False),
                    ("upsert", False),
                ],
            },
            "command_stripped": {
                "update": "my_collection",
                "ordered": True,
                "updates": "%s",
            },
        },
        {
            "command": {
                "update": "my_collection",
                "ordered": True,
                "updates": [
                    ("q", {"email": "x@somewhere.io"}),
                    ("u", {"$rename": {"password": "pwd"}}),
                    ("multi", True),
                    ("upsert", False),
                ],
            },
            "command_stripped": {
                "update": "my_collection",
                "ordered": True,
                "updates": "%s",
            },
        },
        {
            "command": {
                "delete": "my_collection",
                "ordered": True,
                "deletes": [("q", {"username": "userx2"}), ("limit", 1)],
            },
            "command_stripped": {
                "delete": "my_collection",
                "ordered": True,
                "deletes": "%s",
            },
        },
        {
            "command": {
                "delete": "my_collection",
                "ordered": True,
                "deletes": [("q", {"email": "xplus@somewhere.io"}), ("limit", 0)],
            },
            "command_stripped": {
                "delete": "my_collection",
                "ordered": True,
                "deletes": "%s",
            },
        },
        {
            "command": {
                "findAndModify": "my_collection",
                "query": {"email": "ada@lovelace.com"},
                "new": False,
                "remove": True,
            },
            "command_stripped": {
                "findAndModify": "my_collection",
                "query": {"email": "%s"},
                "new": "%s",
                "remove": "%s",
            },
        },
        {
            "command": {
                "findAndModify": "my_collection",
                "query": {"email": "anton2@somewhere.io"},
                "new": False,
                "update": {"email": "anton3@somwehre.io", "extra_field": "xxx"},
                "upsert": False,
            },
            "command_stripped": {
                "findAndModify": "my_collection",
                "query": {"email": "%s"},
                "new": "%s",
                "update": {"email": "%s", "extra_field": "%s"},
                "upsert": "%s",
            },
        },
        {
            "command": {
                "findAndModify": "my_collection",
                "query": {"email": "anton3@somewhere.io"},
                "new": False,
                "update": {"$rename": {"extra_field": "extra_field2"}},
                "upsert": False,
            },
            "command_stripped": {
                "findAndModify": "my_collection",
                "query": {"email": "%s"},
                "new": "%s",
                "update": {"$rename": "%s"},
                "upsert": "%s",
            },
        },
        {
            "command": {
                "renameCollection": "test.my_collection",
                "to": "test.new_collection",
            },
            "command_stripped": {
                "renameCollection": "test.my_collection",
                "to": "test.new_collection",
            },
        },
        {
            "command": {"drop": "new_collection"},
            "command_stripped": {"drop": "new_collection"},
        },
    ],
)
def test_strip_pii(testcase):
    assert _strip_pii(testcase["command"]) == testcase["command_stripped"]
sentry-python-1.39.2/tests/integrations/pyramid/000077500000000000000000000000001454744723200220025ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/pyramid/__init__.py000066400000000000000000000000561454744723200241140ustar00rootroot00000000000000import pytest

pytest.importorskip("pyramid")
sentry-python-1.39.2/tests/integrations/pyramid/test_pyramid.py000066400000000000000000000255001454744723200250620ustar00rootroot00000000000000import json
import logging
from io import BytesIO

import pyramid.testing
import pytest
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.response import Response
from werkzeug.test import Client

from sentry_sdk import capture_message, add_breadcrumb
from sentry_sdk.integrations.pyramid import PyramidIntegration
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
from tests.conftest import unpack_werkzeug_response


try:
    from importlib.metadata import version

    PYRAMID_VERSION = tuple(map(int, version("pyramid").split(".")))

except ImportError:
    # < py3.8
    import pkg_resources

    PYRAMID_VERSION = tuple(
        map(int, pkg_resources.get_distribution("pyramid").version.split("."))
    )


def hi(request):
    capture_message("hi")
    return Response("hi")


def hi_with_id(request):
    capture_message("hi with id")
    return Response("hi with id")


@pytest.fixture
def pyramid_config():
    config = pyramid.testing.setUp()
    try:
        config.add_route("hi", "/message")
        config.add_view(hi, route_name="hi")
        config.add_route("hi_with_id", "/message/{message_id}")
        config.add_view(hi_with_id, route_name="hi_with_id")
        yield config
    finally:
        pyramid.testing.tearDown()


@pytest.fixture
def route(pyramid_config):
    def inner(url):
        def wrapper(f):
            pyramid_config.add_route(f.__name__, url)
            pyramid_config.add_view(f, route_name=f.__name__)
            return f

        return wrapper

    return inner


@pytest.fixture
def get_client(pyramid_config):
    def inner():
        return Client(pyramid_config.make_wsgi_app())

    return inner


def test_view_exceptions(
    get_client, route, sentry_init, capture_events, capture_exceptions
):
    sentry_init(integrations=[PyramidIntegration()])
    events = capture_events()
    exceptions = capture_exceptions()

    add_breadcrumb({"message": "hi"})

    @route("/errors")
    def errors(request):
        add_breadcrumb({"message": "hi2"})
        1 / 0

    client = get_client()
    with pytest.raises(ZeroDivisionError):
        client.get("/errors")

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    (breadcrumb,) = event["breadcrumbs"]["values"]
    assert breadcrumb["message"] == "hi2"
    # Checking only the last value in the exceptions list,
    # because Pyramid >= 1.9 returns a chained exception and before just a single exception
    assert event["exception"]["values"][-1]["mechanism"]["type"] == "pyramid"
    assert event["exception"]["values"][-1]["type"] == "ZeroDivisionError"


def test_has_context(route, get_client, sentry_init, capture_events):
    sentry_init(integrations=[PyramidIntegration()])
    events = capture_events()

    @route("/context_message/{msg}")
    def hi2(request):
        capture_message(request.matchdict["msg"])
        return Response("hi")

    client = get_client()
    client.get("/context_message/yoo")

    (event,) = events
    assert event["message"] == "yoo"
    assert event["request"] == {
        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
        "headers": {"Host": "localhost"},
        "method": "GET",
        "query_string": "",
        "url": "http://localhost/context_message/yoo",
    }
    assert event["transaction"] == "hi2"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "route_name", "hi", "component"),
        ("/message", "route_pattern", "/message", "route"),
        ("/message/123456", "route_name", "hi_with_id", "component"),
        ("/message/123456", "route_pattern", "/message/{message_id}", "route"),
    ],
)
def test_transaction_style(
    sentry_init,
    get_client,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(integrations=[PyramidIntegration(transaction_style=transaction_style)])

    events = capture_events()
    client = get_client()
    client.get(url)

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


def test_large_json_request(sentry_init, capture_events, route, get_client):
    sentry_init(integrations=[PyramidIntegration()])

    data = {"foo": {"bar": "a" * 2000}}

    @route("/")
    def index(request):
        assert request.json == data
        assert request.text == json.dumps(data)
        assert not request.POST
        capture_message("hi")
        return Response("ok")

    events = capture_events()

    client = get_client()
    client.post("/", content_type="application/json", data=json.dumps(data))

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]["bar"]) == 1024


@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_flask_empty_json_request(sentry_init, capture_events, route, get_client, data):
    sentry_init(integrations=[PyramidIntegration()])

    @route("/")
    def index(request):
        assert request.json == data
        assert request.text == json.dumps(data)
        assert not request.POST
        capture_message("hi")
        return Response("ok")

    events = capture_events()

    client = get_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response[1] == "200 OK"

    (event,) = events
    assert event["request"]["data"] == data


def test_json_not_truncated_if_max_request_body_size_is_always(
    sentry_init, capture_events, route, get_client
):
    sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")

    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }

    @route("/")
    def index(request):
        assert request.json == data
        assert request.text == json.dumps(data)
        capture_message("hi")
        return Response("ok")

    events = capture_events()

    client = get_client()
    client.post("/", content_type="application/json", data=json.dumps(data))

    (event,) = events
    assert event["request"]["data"] == data


def test_files_and_form(sentry_init, capture_events, route, get_client):
    sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")

    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}

    @route("/")
    def index(request):
        capture_message("hi")
        return Response("ok")

    events = capture_events()

    client = get_client()
    client.post("/", data=data)

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024

    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
    assert not event["request"]["data"]["file"]


def test_bad_request_not_captured(
    sentry_init, pyramid_config, capture_events, route, get_client
):
    import pyramid.httpexceptions as exc

    sentry_init(integrations=[PyramidIntegration()])
    events = capture_events()

    @route("/")
    def index(request):
        raise exc.HTTPBadRequest()

    def errorhandler(exc, request):
        return Response("bad request")

    pyramid_config.add_view(errorhandler, context=exc.HTTPBadRequest)

    client = get_client()
    client.get("/")

    assert not events


def test_errorhandler_ok(
    sentry_init, pyramid_config, capture_exceptions, route, get_client
):
    sentry_init(integrations=[PyramidIntegration()])
    errors = capture_exceptions()

    @route("/")
    def index(request):
        raise Exception()

    def errorhandler(exc, request):
        return Response("bad request")

    pyramid_config.add_view(errorhandler, context=Exception)

    client = get_client()
    client.get("/")

    assert not errors


@pytest.mark.skipif(
    PYRAMID_VERSION < (1, 9),
    reason="We don't have the right hooks in older Pyramid versions",
)
def test_errorhandler_500(
    sentry_init, pyramid_config, capture_exceptions, route, get_client
):
    sentry_init(integrations=[PyramidIntegration()])
    errors = capture_exceptions()

    @route("/")
    def index(request):
        1 / 0

    def errorhandler(exc, request):
        return Response("bad request", status=500)

    pyramid_config.add_view(errorhandler, context=Exception)

    client = get_client()
    app_iter, status, headers = unpack_werkzeug_response(client.get("/"))
    assert app_iter == b"bad request"
    assert status.lower() == "500 internal server error"

    (error,) = errors

    assert isinstance(error, ZeroDivisionError)


def test_error_in_errorhandler(
    sentry_init, pyramid_config, capture_events, route, get_client
):
    sentry_init(integrations=[PyramidIntegration()])

    @route("/")
    def index(request):
        raise ValueError()

    def error_handler(err, request):
        1 / 0

    pyramid_config.add_view(error_handler, context=ValueError)

    events = capture_events()

    client = get_client()

    with pytest.raises(ZeroDivisionError):
        client.get("/")

    (event,) = events

    exception = event["exception"]["values"][-1]
    assert exception["type"] == "ZeroDivisionError"


def test_error_in_authenticated_userid(
    sentry_init, pyramid_config, capture_events, route, get_client
):
    from sentry_sdk.integrations.logging import LoggingIntegration

    sentry_init(
        send_default_pii=True,
        integrations=[
            PyramidIntegration(),
            LoggingIntegration(event_level=logging.ERROR),
        ],
    )
    logger = logging.getLogger("test_pyramid")

    class AuthenticationPolicy(object):
        def authenticated_userid(self, request):
            logger.error("failed to identify user")

    pyramid_config.set_authorization_policy(ACLAuthorizationPolicy())
    pyramid_config.set_authentication_policy(AuthenticationPolicy())

    events = capture_events()

    client = get_client()
    client.get("/message")

    assert len(events) == 1


def tween_factory(handler, registry):
    def tween(request):
        try:
            response = handler(request)
        except Exception:
            mroute = request.matched_route
            if mroute and mroute.name in ("index",):
                return Response("bad request", status_code=400)
        return response

    return tween


def test_tween_ok(sentry_init, pyramid_config, capture_exceptions, route, get_client):
    sentry_init(integrations=[PyramidIntegration()])
    errors = capture_exceptions()

    @route("/")
    def index(request):
        raise Exception()

    pyramid_config.add_tween(
        "tests.integrations.pyramid.test_pyramid.tween_factory",
        under=pyramid.tweens.INGRESS,
    )

    client = get_client()
    client.get("/")

    assert not errors
sentry-python-1.39.2/tests/integrations/quart/000077500000000000000000000000001454744723200214715ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/quart/__init__.py000066400000000000000000000000541454744723200236010ustar00rootroot00000000000000import pytest

pytest.importorskip("quart")
sentry-python-1.39.2/tests/integrations/quart/test_quart.py000066400000000000000000000343141454744723200242430ustar00rootroot00000000000000import json
import threading

import pytest
import pytest_asyncio

from sentry_sdk import (
    set_tag,
    configure_scope,
    capture_message,
    capture_exception,
    last_event_id,
)
from sentry_sdk.integrations.logging import LoggingIntegration
import sentry_sdk.integrations.quart as quart_sentry

from quart import Quart, Response, abort, stream_with_context
from quart.views import View

from quart_auth import AuthUser, login_user

try:
    from quart_auth import QuartAuth

    auth_manager = QuartAuth()
except ImportError:
    from quart_auth import AuthManager

    auth_manager = AuthManager()


@pytest_asyncio.fixture
async def app():
    app = Quart(__name__)
    app.debug = False
    app.config["TESTING"] = False
    app.secret_key = "haha"

    auth_manager.init_app(app)

    @app.route("/message")
    async def hi():
        capture_message("hi")
        return "ok"

    @app.route("/message/")
    async def hi_with_id(message_id):
        capture_message("hi with id")
        return "ok with id"

    @app.get("/sync/thread_ids")
    def _thread_ids_sync():
        return {
            "main": str(threading.main_thread().ident),
            "active": str(threading.current_thread().ident),
        }

    @app.get("/async/thread_ids")
    async def _thread_ids_async():
        return {
            "main": str(threading.main_thread().ident),
            "active": str(threading.current_thread().ident),
        }

    return app


@pytest.fixture(params=("manual",))
def integration_enabled_params(request):
    if request.param == "manual":
        return {"integrations": [quart_sentry.QuartIntegration()]}
    else:
        raise ValueError(request.param)


@pytest.mark.asyncio
async def test_has_context(sentry_init, app, capture_events):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    events = capture_events()

    client = app.test_client()
    response = await client.get("/message")
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == "hi"
    assert "data" not in event["request"]
    assert event["request"]["url"] == "http://localhost/message"


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "endpoint", "hi", "component"),
        ("/message", "url", "/message", "route"),
        ("/message/123456", "endpoint", "hi_with_id", "component"),
        ("/message/123456", "url", "/message/", "route"),
    ],
)
async def test_transaction_style(
    sentry_init,
    app,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[
            quart_sentry.QuartIntegration(transaction_style=transaction_style)
        ]
    )
    events = capture_events()

    client = app.test_client()
    response = await client.get(url)
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == expected_transaction


@pytest.mark.asyncio
async def test_errors(
    sentry_init,
    capture_exceptions,
    capture_events,
    app,
    integration_enabled_params,
):
    sentry_init(debug=True, **integration_enabled_params)

    @app.route("/")
    async def index():
        1 / 0

    exceptions = capture_exceptions()
    events = capture_events()

    client = app.test_client()
    try:
        await client.get("/")
    except ZeroDivisionError:
        pass

    (exc,) = exceptions
    assert isinstance(exc, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "quart"


@pytest.mark.asyncio
async def test_quart_auth_not_installed(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    monkeypatch.setattr(quart_sentry, "quart_auth", None)

    events = capture_events()

    client = app.test_client()
    await client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


@pytest.mark.asyncio
async def test_quart_auth_not_configured(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    assert quart_sentry.quart_auth

    events = capture_events()
    client = app.test_client()
    await client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


@pytest.mark.asyncio
async def test_quart_auth_partially_configured(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    events = capture_events()

    client = app.test_client()
    await client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


@pytest.mark.asyncio
@pytest.mark.parametrize("send_default_pii", [True, False])
@pytest.mark.parametrize("user_id", [None, "42", "3"])
async def test_quart_auth_configured(
    send_default_pii,
    sentry_init,
    app,
    user_id,
    capture_events,
    monkeypatch,
    integration_enabled_params,
):
    sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)

    @app.route("/login")
    async def login():
        if user_id is not None:
            login_user(AuthUser(user_id))
        return "ok"

    events = capture_events()

    client = app.test_client()
    assert (await client.get("/login")).status_code == 200
    assert not events

    assert (await client.get("/message")).status_code == 200

    (event,) = events
    if user_id is None or not send_default_pii:
        assert event.get("user", {}).get("id") is None
    else:
        assert event["user"]["id"] == str(user_id)


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "integrations",
    [
        [quart_sentry.QuartIntegration()],
        [quart_sentry.QuartIntegration(), LoggingIntegration(event_level="ERROR")],
    ],
)
async def test_errors_not_reported_twice(
    sentry_init, integrations, capture_events, app
):
    sentry_init(integrations=integrations)

    @app.route("/")
    async def index():
        try:
            1 / 0
        except Exception as e:
            app.logger.exception(e)
            raise e

    events = capture_events()

    client = app.test_client()
    # with pytest.raises(ZeroDivisionError):
    await client.get("/")

    assert len(events) == 1


@pytest.mark.asyncio
async def test_logging(sentry_init, capture_events, app):
    # ensure that Quart's logger magic doesn't break ours
    sentry_init(
        integrations=[
            quart_sentry.QuartIntegration(),
            LoggingIntegration(event_level="ERROR"),
        ]
    )

    @app.route("/")
    async def index():
        app.logger.error("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    await client.get("/")

    (event,) = events
    assert event["level"] == "error"


@pytest.mark.asyncio
async def test_no_errors_without_request(app, sentry_init):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    async with app.app_context():
        capture_exception(ValueError())


def test_cli_commands_raise(app):
    if not hasattr(app, "cli"):
        pytest.skip("Too old quart version")

    from quart.cli import ScriptInfo

    @app.cli.command()
    def foo():
        1 / 0

    with pytest.raises(ZeroDivisionError):
        app.cli.main(
            args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=lambda _: app)
        )


@pytest.mark.asyncio
async def test_500(sentry_init, capture_events, app):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])

    @app.route("/")
    async def index():
        1 / 0

    @app.errorhandler(500)
    async def error_handler(err):
        return "Sentry error: %s" % last_event_id()

    events = capture_events()

    client = app.test_client()
    response = await client.get("/")

    (event,) = events
    assert (await response.get_data(as_text=True)) == "Sentry error: %s" % event[
        "event_id"
    ]


@pytest.mark.asyncio
async def test_error_in_errorhandler(sentry_init, capture_events, app):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])

    @app.route("/")
    async def index():
        raise ValueError()

    @app.errorhandler(500)
    async def error_handler(err):
        1 / 0

    events = capture_events()

    client = app.test_client()

    with pytest.raises(ZeroDivisionError):
        await client.get("/")

    event1, event2 = events

    (exception,) = event1["exception"]["values"]
    assert exception["type"] == "ValueError"

    exception = event2["exception"]["values"][-1]
    assert exception["type"] == "ZeroDivisionError"


@pytest.mark.asyncio
async def test_bad_request_not_captured(sentry_init, capture_events, app):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    events = capture_events()

    @app.route("/")
    async def index():
        abort(400)

    client = app.test_client()

    await client.get("/")

    assert not events


@pytest.mark.asyncio
async def test_does_not_leak_scope(sentry_init, capture_events, app):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    events = capture_events()

    with configure_scope() as scope:
        scope.set_tag("request_data", False)

    @app.route("/")
    async def index():
        with configure_scope() as scope:
            scope.set_tag("request_data", True)

        async def generate():
            for row in range(1000):
                with configure_scope() as scope:
                    assert scope._tags["request_data"]

                yield str(row) + "\n"

        return Response(stream_with_context(generate)(), mimetype="text/csv")

    client = app.test_client()
    response = await client.get("/")
    assert (await response.get_data(as_text=True)) == "".join(
        str(row) + "\n" for row in range(1000)
    )
    assert not events

    with configure_scope() as scope:
        assert not scope._tags["request_data"]


@pytest.mark.asyncio
async def test_scoped_test_client(sentry_init, app):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])

    @app.route("/")
    async def index():
        return "ok"

    async with app.test_client() as client:
        response = await client.get("/")
        assert response.status_code == 200


@pytest.mark.asyncio
@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
async def test_errorhandler_for_exception_swallows_exception(
    sentry_init, app, capture_events, exc_cls
):
    # In contrast to error handlers for a status code, error
    # handlers for exceptions can swallow the exception (this is
    # just how the Quart signal works)
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    events = capture_events()

    @app.route("/")
    async def index():
        1 / 0

    @app.errorhandler(exc_cls)
    async def zerodivision(e):
        return "ok"

    async with app.test_client() as client:
        response = await client.get("/")
        assert response.status_code == 200

    assert not events


@pytest.mark.asyncio
async def test_tracing_success(sentry_init, capture_events, app):
    sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()])

    @app.before_request
    async def _():
        set_tag("before_request", "yes")

    @app.route("/message_tx")
    async def hi_tx():
        set_tag("view", "yes")
        capture_message("hi")
        return "ok"

    events = capture_events()

    async with app.test_client() as client:
        response = await client.get("/message_tx")
        assert response.status_code == 200

    message_event, transaction_event = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "hi_tx"
    assert transaction_event["tags"]["view"] == "yes"
    assert transaction_event["tags"]["before_request"] == "yes"

    assert message_event["message"] == "hi"
    assert message_event["transaction"] == "hi_tx"
    assert message_event["tags"]["view"] == "yes"
    assert message_event["tags"]["before_request"] == "yes"


@pytest.mark.asyncio
async def test_tracing_error(sentry_init, capture_events, app):
    sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()])

    events = capture_events()

    @app.route("/error")
    async def error():
        1 / 0

    async with app.test_client() as client:
        response = await client.get("/error")
        assert response.status_code == 500

    error_event, transaction_event = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "error"

    assert error_event["transaction"] == "error"
    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"


@pytest.mark.asyncio
async def test_class_based_views(sentry_init, app, capture_events):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    events = capture_events()

    @app.route("/")
    class HelloClass(View):
        methods = ["GET"]

        async def dispatch_request(self):
            capture_message("hi")
            return "ok"

    app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))

    async with app.test_client() as client:
        response = await client.get("/hello-class/")
        assert response.status_code == 200

    (event,) = events

    assert event["message"] == "hi"
    assert event["transaction"] == "hello_class"


@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, app):
    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )

    envelopes = capture_envelopes()

    async with app.test_client() as client:
        response = await client.get(endpoint)
        assert response.status_code == 200

    data = json.loads(response.content)

    envelopes = [envelope for envelope in envelopes]
    assert len(envelopes) == 1

    profiles = [item for item in envelopes[0].items if item.type == "profile"]
    assert len(profiles) == 1

    for profile in profiles:
        transactions = profile.payload.json["transactions"]
        assert len(transactions) == 1
        assert str(data["active"]) == transactions[0]["active_thread_id"]
sentry-python-1.39.2/tests/integrations/redis/000077500000000000000000000000001454744723200214435ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/redis/__init__.py000066400000000000000000000000541454744723200235530ustar00rootroot00000000000000import pytest

pytest.importorskip("redis")
sentry-python-1.39.2/tests/integrations/redis/asyncio/000077500000000000000000000000001454744723200231105ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/redis/asyncio/__init__.py000066400000000000000000000000711454744723200252170ustar00rootroot00000000000000import pytest

pytest.importorskip("fakeredis.aioredis")
sentry-python-1.39.2/tests/integrations/redis/asyncio/test_redis_asyncio.py000066400000000000000000000044131454744723200273560ustar00rootroot00000000000000import pytest

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration

from fakeredis.aioredis import FakeRedis


@pytest.mark.asyncio
async def test_async_basic(sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    connection = FakeRedis()

    await connection.get("foobar")
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": {
            "db.operation": "GET",
            "redis.key": "foobar",
            "redis.command": "GET",
            "redis.is_cluster": False,
        },
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "is_transaction, send_default_pii, expected_first_ten",
    [
        (False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
@pytest.mark.asyncio
async def test_async_redis_pipeline(
    sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    connection = FakeRedis()
    with start_transaction():
        pipeline = connection.pipeline(transaction=is_transaction)
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        await pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == {
        "redis.commands": {
            "count": 3,
            "first_ten": expected_first_ten,
        },
        SPANDATA.DB_SYSTEM: "redis",
        SPANDATA.DB_NAME: "0",
        SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get(
            "host"
        ),
        SPANDATA.SERVER_PORT: 6379,
    }
    assert span["tags"] == {
        "redis.transaction": is_transaction,
        "redis.is_cluster": False,
    }
sentry-python-1.39.2/tests/integrations/redis/cluster/000077500000000000000000000000001454744723200231245ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/redis/cluster/__init__.py000066400000000000000000000000641454744723200252350ustar00rootroot00000000000000import pytest

pytest.importorskip("redis.cluster")
sentry-python-1.39.2/tests/integrations/redis/cluster/test_redis_cluster.py000066400000000000000000000103071454744723200274050ustar00rootroot00000000000000import pytest
from sentry_sdk import capture_message
from sentry_sdk.consts import SPANDATA
from sentry_sdk.api import start_transaction
from sentry_sdk.integrations.redis import RedisIntegration

import redis


@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_class(reset_integrations):
    pipeline_cls = redis.cluster.ClusterPipeline
    redis.cluster.NodesManager.initialize = lambda *_, **__: None
    redis.RedisCluster.command = lambda *_: []
    redis.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(None, None)
    redis.RedisCluster.get_default_node = lambda *_, **__: redis.cluster.ClusterNode(
        "localhost", 6379
    )
    pipeline_cls.execute = lambda *_, **__: None
    redis.RedisCluster.execute_command = lambda *_, **__: []


def test_rediscluster_breadcrumb(sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    rc = redis.RedisCluster(host="localhost", port=6379)
    rc.get("foobar")
    capture_message("hi")

    (event,) = events
    crumbs = event["breadcrumbs"]["values"]

    # on initializing a RedisCluster, a COMMAND call is made - this is not important for the test
    # but must be accounted for
    assert len(crumbs) in (1, 2)
    assert len(crumbs) == 1 or crumbs[0]["message"] == "COMMAND"

    crumb = crumbs[-1]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": {
            "db.operation": "GET",
            "redis.key": "foobar",
            "redis.command": "GET",
            "redis.is_cluster": True,
        },
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "send_default_pii, description",
    [
        (False, "SET 'bar' [Filtered]"),
        (True, "SET 'bar' 1"),
    ],
)
def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, description):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    with start_transaction():
        rc = redis.RedisCluster(host="localhost", port=6379)
        rc.set("bar", 1)

    (event,) = events
    spans = event["spans"]

    # on initializing a RedisCluster, a COMMAND call is made - this is not important for the test
    # but must be accounted for
    assert len(spans) in (1, 2)
    assert len(spans) == 1 or spans[0]["description"] == "COMMAND"

    span = spans[-1]
    assert span["op"] == "db.redis"
    assert span["description"] == description
    assert span["data"] == {
        SPANDATA.DB_SYSTEM: "redis",
        # ClusterNode converts localhost to 127.0.0.1
        SPANDATA.SERVER_ADDRESS: "127.0.0.1",
        SPANDATA.SERVER_PORT: 6379,
    }
    assert span["tags"] == {
        "db.operation": "SET",
        "redis.command": "SET",
        "redis.is_cluster": True,
        "redis.key": "bar",
    }


@pytest.mark.parametrize(
    "send_default_pii, expected_first_ten",
    [
        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
def test_rediscluster_pipeline(
    sentry_init, capture_events, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    rc = redis.RedisCluster(host="localhost", port=6379)
    with start_transaction():
        pipeline = rc.pipeline()
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == {
        "redis.commands": {
            "count": 3,
            "first_ten": expected_first_ten,
        },
        SPANDATA.DB_SYSTEM: "redis",
        # ClusterNode converts localhost to 127.0.0.1
        SPANDATA.SERVER_ADDRESS: "127.0.0.1",
        SPANDATA.SERVER_PORT: 6379,
    }
    assert span["tags"] == {
        "redis.transaction": False,  # For Cluster, this is always False
        "redis.is_cluster": True,
    }
sentry-python-1.39.2/tests/integrations/redis/cluster_asyncio/000077500000000000000000000000001454744723200246515ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/redis/cluster_asyncio/__init__.py000066400000000000000000000000741454744723200267630ustar00rootroot00000000000000import pytest

pytest.importorskip("redis.asyncio.cluster")
sentry-python-1.39.2/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py000066400000000000000000000076241454744723200326670ustar00rootroot00000000000000import pytest

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration

from redis.asyncio import cluster


async def fake_initialize(*_, **__):
    return None


async def fake_execute_command(*_, **__):
    return []


async def fake_execute(*_, **__):
    return None


@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_asyncio_class(reset_integrations):
    pipeline_cls = cluster.ClusterPipeline
    cluster.NodesManager.initialize = fake_initialize
    cluster.RedisCluster.get_default_node = lambda *_, **__: cluster.ClusterNode(
        "localhost", 6379
    )
    cluster.RedisCluster.pipeline = lambda self, *_, **__: pipeline_cls(self)
    pipeline_cls.execute = fake_execute
    cluster.RedisCluster.execute_command = fake_execute_command


@pytest.mark.asyncio
async def test_async_breadcrumb(sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    connection = cluster.RedisCluster(host="localhost", port=6379)

    await connection.get("foobar")
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": {
            "db.operation": "GET",
            "redis.key": "foobar",
            "redis.command": "GET",
            "redis.is_cluster": True,
        },
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "send_default_pii, description",
    [
        (False, "SET 'bar' [Filtered]"),
        (True, "SET 'bar' 1"),
    ],
)
@pytest.mark.asyncio
async def test_async_basic(sentry_init, capture_events, send_default_pii, description):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    connection = cluster.RedisCluster(host="localhost", port=6379)
    with start_transaction():
        await connection.set("bar", 1)

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == description
    assert span["data"] == {
        SPANDATA.DB_SYSTEM: "redis",
        # ClusterNode converts localhost to 127.0.0.1
        SPANDATA.SERVER_ADDRESS: "127.0.0.1",
        SPANDATA.SERVER_PORT: 6379,
    }
    assert span["tags"] == {
        "redis.is_cluster": True,
        "db.operation": "SET",
        "redis.command": "SET",
        "redis.key": "bar",
    }


@pytest.mark.parametrize(
    "send_default_pii, expected_first_ten",
    [
        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
@pytest.mark.asyncio
async def test_async_redis_pipeline(
    sentry_init, capture_events, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    connection = cluster.RedisCluster(host="localhost", port=6379)
    with start_transaction():
        pipeline = connection.pipeline()
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        await pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == {
        "redis.commands": {
            "count": 3,
            "first_ten": expected_first_ten,
        },
        SPANDATA.DB_SYSTEM: "redis",
        # ClusterNode converts localhost to 127.0.0.1
        SPANDATA.SERVER_ADDRESS: "127.0.0.1",
        SPANDATA.SERVER_PORT: 6379,
    }
    assert span["tags"] == {
        "redis.transaction": False,
        "redis.is_cluster": True,
    }
sentry-python-1.39.2/tests/integrations/redis/test_redis.py000066400000000000000000000214761454744723200241740ustar00rootroot00000000000000import pytest

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration

from fakeredis import FakeStrictRedis

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


MOCK_CONNECTION_POOL = mock.MagicMock()
MOCK_CONNECTION_POOL.connection_kwargs = {
    "host": "localhost",
    "port": 63791,
    "db": 1,
}


def test_basic(sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    connection = FakeStrictRedis()

    connection.get("foobar")
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": {
            "redis.key": "foobar",
            "redis.command": "GET",
            "redis.is_cluster": False,
            "db.operation": "GET",
        },
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "is_transaction, send_default_pii, expected_first_ten",
    [
        (False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
def test_redis_pipeline(
    sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        pipeline = connection.pipeline(transaction=is_transaction)
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
    assert span["data"]["redis.commands"] == {
        "count": 3,
        "first_ten": expected_first_ten,
    }
    assert span["tags"] == {
        "redis.transaction": is_transaction,
        "redis.is_cluster": False,
    }


def test_sensitive_data(sentry_init, capture_events):
    # fakeredis does not support the AUTH command, so we need to mock it
    with mock.patch(
        "sentry_sdk.integrations.redis._COMMANDS_INCLUDING_SENSITIVE_DATA", ["get"]
    ):
        sentry_init(
            integrations=[RedisIntegration()],
            traces_sample_rate=1.0,
            send_default_pii=True,
        )
        events = capture_events()

        connection = FakeStrictRedis()
        with start_transaction():
            connection.get(
                "this is super secret"
            )  # because fakeredis does not support AUTH we use GET instead

        (event,) = events
        spans = event["spans"]
        assert spans[0]["op"] == "db.redis"
        assert spans[0]["description"] == "GET [Filtered]"


def test_pii_data_redacted(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        connection.set("somekey1", "my secret string1")
        connection.set("somekey2", "my secret string2")
        connection.get("somekey2")
        connection.delete("somekey1", "somekey2")

    (event,) = events
    spans = event["spans"]
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "SET 'somekey1' [Filtered]"
    assert spans[1]["description"] == "SET 'somekey2' [Filtered]"
    assert spans[2]["description"] == "GET 'somekey2'"
    assert spans[3]["description"] == "DEL 'somekey1' [Filtered]"


def test_pii_data_sent(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        connection.set("somekey1", "my secret string1")
        connection.set("somekey2", "my secret string2")
        connection.get("somekey2")
        connection.delete("somekey1", "somekey2")

    (event,) = events
    spans = event["spans"]
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "SET 'somekey1' 'my secret string1'"
    assert spans[1]["description"] == "SET 'somekey2' 'my secret string2'"
    assert spans[2]["description"] == "GET 'somekey2'"
    assert spans[3]["description"] == "DEL 'somekey1' 'somekey2'"


def test_data_truncation(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        long_string = "a" * 100000
        connection.set("somekey1", long_string)
        short_string = "b" * 10
        connection.set("somekey2", short_string)

    (event,) = events
    spans = event["spans"]
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
        long_string[: 1024 - len("...") - len("SET 'somekey1' '")],
    )
    assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)


def test_data_truncation_custom(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration(max_data_size=30)],
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        long_string = "a" * 100000
        connection.set("somekey1", long_string)
        short_string = "b" * 10
        connection.set("somekey2", short_string)

    (event,) = events
    spans = event["spans"]
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
        long_string[: 30 - len("...") - len("SET 'somekey1' '")],
    )
    assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)


def test_breadcrumbs(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration(max_data_size=30)],
        send_default_pii=True,
    )
    events = capture_events()

    connection = FakeStrictRedis()

    long_string = "a" * 100000
    connection.set("somekey1", long_string)
    short_string = "b" * 10
    connection.set("somekey2", short_string)

    capture_message("hi")

    (event,) = events
    crumbs = event["breadcrumbs"]["values"]

    assert crumbs[0] == {
        "message": "SET 'somekey1' 'aaaaaaaaaaa...",
        "type": "redis",
        "category": "redis",
        "data": {
            "db.operation": "SET",
            "redis.is_cluster": False,
            "redis.command": "SET",
            "redis.key": "somekey1",
        },
        "timestamp": crumbs[0]["timestamp"],
    }
    assert crumbs[1] == {
        "message": "SET 'somekey2' 'bbbbbbbbbb'",
        "type": "redis",
        "category": "redis",
        "data": {
            "db.operation": "SET",
            "redis.is_cluster": False,
            "redis.command": "SET",
            "redis.key": "somekey2",
        },
        "timestamp": crumbs[1]["timestamp"],
    }


def test_db_connection_attributes_client(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[RedisIntegration()],
    )
    events = capture_events()

    with start_transaction():
        connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL)
        connection.get("foobar")

    (event,) = events
    (span,) = event["spans"]

    assert span["op"] == "db.redis"
    assert span["description"] == "GET 'foobar'"
    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
    assert span["data"][SPANDATA.DB_NAME] == "1"
    assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
    assert span["data"][SPANDATA.SERVER_PORT] == 63791


def test_db_connection_attributes_pipeline(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[RedisIntegration()],
    )
    events = capture_events()

    with start_transaction():
        connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL)
        pipeline = connection.pipeline(transaction=False)
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]

    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
    assert span["data"][SPANDATA.DB_NAME] == "1"
    assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
    assert span["data"][SPANDATA.SERVER_PORT] == 63791
sentry-python-1.39.2/tests/integrations/rediscluster/000077500000000000000000000000001454744723200230455ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/rediscluster/__init__.py000066400000000000000000000000631454744723200251550ustar00rootroot00000000000000import pytest

pytest.importorskip("rediscluster")
sentry-python-1.39.2/tests/integrations/rediscluster/test_rediscluster.py000066400000000000000000000112221454744723200271640ustar00rootroot00000000000000import pytest

from sentry_sdk import capture_message
from sentry_sdk.api import start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration

try:
    from unittest import mock
except ImportError:
    import mock

import rediscluster


MOCK_CONNECTION_POOL = mock.MagicMock()
MOCK_CONNECTION_POOL.connection_kwargs = {
    "host": "localhost",
    "port": 63791,
    "db": 1,
}


rediscluster_classes = [rediscluster.RedisCluster]

if hasattr(rediscluster, "StrictRedisCluster"):
    rediscluster_classes.append(rediscluster.StrictRedisCluster)


@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_classes(reset_integrations):
    try:
        pipeline_cls = rediscluster.pipeline.ClusterPipeline
    except AttributeError:
        pipeline_cls = rediscluster.StrictClusterPipeline
    rediscluster.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(
        connection_pool=MOCK_CONNECTION_POOL
    )
    pipeline_cls.execute = lambda *_, **__: None
    for cls in rediscluster_classes:
        cls.execute_command = lambda *_, **__: None


@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL)
    rc.get("foobar")
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": {
            "db.operation": "GET",
            "redis.key": "foobar",
            "redis.command": "GET",
            "redis.is_cluster": True,
        },
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "send_default_pii, expected_first_ten",
    [
        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
def test_rediscluster_pipeline(
    sentry_init, capture_events, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL)
    with start_transaction():
        pipeline = rc.pipeline()
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == {
        "redis.commands": {
            "count": 3,
            "first_ten": expected_first_ten,
        },
        SPANDATA.DB_SYSTEM: "redis",
        SPANDATA.DB_NAME: "1",
        SPANDATA.SERVER_ADDRESS: "localhost",
        SPANDATA.SERVER_PORT: 63791,
    }
    assert span["tags"] == {
        "redis.transaction": False,  # For Cluster, this is always False
        "redis.is_cluster": True,
    }


@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_db_connection_attributes_client(sentry_init, capture_events, rediscluster_cls):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[RedisIntegration()],
    )
    events = capture_events()

    rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL)
    with start_transaction():
        rc.get("foobar")

    (event,) = events
    (span,) = event["spans"]

    assert span["data"] == {
        SPANDATA.DB_SYSTEM: "redis",
        SPANDATA.DB_NAME: "1",
        SPANDATA.SERVER_ADDRESS: "localhost",
        SPANDATA.SERVER_PORT: 63791,
    }


@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_db_connection_attributes_pipeline(
    sentry_init, capture_events, rediscluster_cls
):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[RedisIntegration()],
    )
    events = capture_events()

    rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL)
    with start_transaction():
        pipeline = rc.pipeline()
        pipeline.get("foo")
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == {
        "redis.commands": {
            "count": 1,
            "first_ten": ["GET 'foo'"],
        },
        SPANDATA.DB_SYSTEM: "redis",
        SPANDATA.DB_NAME: "1",
        SPANDATA.SERVER_ADDRESS: "localhost",
        SPANDATA.SERVER_PORT: 63791,
    }
sentry-python-1.39.2/tests/integrations/requests/000077500000000000000000000000001454744723200222105ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/requests/__init__.py000066400000000000000000000000571454744723200243230ustar00rootroot00000000000000import pytest

pytest.importorskip("requests")
sentry-python-1.39.2/tests/integrations/requests/test_requests.py000066400000000000000000000032351454744723200254770ustar00rootroot00000000000000import requests
import responses

import pytest

from sentry_sdk import capture_message
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.stdlib import StdlibIntegration

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


def test_crumb_capture(sentry_init, capture_events):
    sentry_init(integrations=[StdlibIntegration()])

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    events = capture_events()

    response = requests.get(url)
    capture_message("Testing!")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]
    assert crumb["type"] == "http"
    assert crumb["category"] == "httplib"
    assert crumb["data"] == {
        "url": url,
        SPANDATA.HTTP_METHOD: "GET",
        SPANDATA.HTTP_FRAGMENT: "",
        SPANDATA.HTTP_QUERY: "",
        SPANDATA.HTTP_STATUS_CODE: response.status_code,
        "reason": response.reason,
    }


@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
    sentry_init(integrations=[StdlibIntegration()])

    url = "https://example.com"
    responses.add(responses.GET, url, status=200)

    events = capture_events()

    with mock.patch(
        "sentry_sdk.integrations.stdlib.parse_url",
        side_effect=ValueError,
    ):
        response = requests.get(url)

    capture_message("Testing!")

    (event,) = events
    assert event["breadcrumbs"]["values"][0]["data"] == {
        SPANDATA.HTTP_METHOD: "GET",
        SPANDATA.HTTP_STATUS_CODE: response.status_code,
        "reason": response.reason,
        # no url related data
    }
sentry-python-1.39.2/tests/integrations/rq/000077500000000000000000000000001454744723200207575ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/rq/__init__.py000066400000000000000000000000511454744723200230640ustar00rootroot00000000000000import pytest

pytest.importorskip("rq")
sentry-python-1.39.2/tests/integrations/rq/test_rq.py000066400000000000000000000206061454744723200230160ustar00rootroot00000000000000import pytest
from fakeredis import FakeStrictRedis
from sentry_sdk import configure_scope, start_transaction
from sentry_sdk.integrations.rq import RqIntegration
from sentry_sdk.utils import parse_version

import rq

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


@pytest.fixture(autouse=True)
def _patch_rq_get_server_version(monkeypatch):
    """
    Patch RQ lower than 1.5.1 to work with fakeredis.

    https://github.com/jamesls/fakeredis/issues/273
    """

    from distutils.version import StrictVersion

    if parse_version(rq.VERSION) <= (1, 5, 1):
        for k in (
            "rq.job.Job.get_redis_server_version",
            "rq.worker.Worker.get_redis_server_version",
        ):
            try:
                monkeypatch.setattr(k, lambda _: StrictVersion("4.0.0"))
            except AttributeError:
                # old RQ Job/Worker doesn't have a get_redis_server_version attr
                pass


def crashing_job(foo):
    1 / 0


def chew_up_shoes(dog, human, shoes):
    raise Exception("{}!! Why did you eat {}'s {}??".format(dog, human, shoes))


def do_trick(dog, trick):
    return "{}, can you {}? Good dog!".format(dog, trick)


def test_basic(sentry_init, capture_events):
    sentry_init(integrations=[RqIntegration()])
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=42)
    worker.work(burst=True)

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "rq"
    assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"

    assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"

    extra = event["extra"]["rq-job"]
    assert extra["args"] == []
    assert extra["kwargs"] == {"foo": 42}
    assert extra["description"] == "tests.integrations.rq.test_rq.crashing_job(foo=42)"
    assert extra["func"] == "tests.integrations.rq.test_rq.crashing_job"
    assert "job_id" in extra
    assert "enqueued_at" in extra

    # older versions don't persist started_at correctly
    if tuple(map(int, rq.VERSION.split("."))) >= (0, 9):
        assert "started_at" in extra


def test_transport_shutdown(sentry_init, capture_events_forksafe):
    sentry_init(integrations=[RqIntegration()])

    events = capture_events_forksafe()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.Worker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=42)
    worker.work(burst=True)

    event = events.read_event()
    events.read_flush()

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"


def test_transaction_with_error(
    sentry_init, capture_events, DictionaryContaining  # noqa:N803
):
    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(chew_up_shoes, "Charlie", "Katie", shoes="flip-flops")
    worker.work(burst=True)

    error_event, envelope = events

    assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes"
    assert error_event["contexts"]["trace"]["op"] == "queue.task.rq"
    assert error_event["exception"]["values"][0]["type"] == "Exception"
    assert (
        error_event["exception"]["values"][0]["value"]
        == "Charlie!! Why did you eat Katie's flip-flops??"
    )

    assert envelope["type"] == "transaction"
    assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]
    assert envelope["transaction"] == error_event["transaction"]
    assert envelope["extra"]["rq-job"] == DictionaryContaining(
        {
            "args": ["Charlie", "Katie"],
            "kwargs": {"shoes": "flip-flops"},
            "func": "tests.integrations.rq.test_rq.chew_up_shoes",
            "description": "tests.integrations.rq.test_rq.chew_up_shoes('Charlie', 'Katie', shoes='flip-flops')",
        }
    )


def test_error_has_trace_context_if_tracing_disabled(
    sentry_init,
    capture_events,
):
    sentry_init(integrations=[RqIntegration()])
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=None)
    worker.work(burst=True)

    (error_event,) = events

    assert error_event["contexts"]["trace"]


def test_tracing_enabled(
    sentry_init,
    capture_events,
):
    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    with start_transaction(op="rq transaction") as transaction:
        queue.enqueue(crashing_job, foo=None)
        worker.work(burst=True)

    error_event, envelope, _ = events

    assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
    assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id

    assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]


def test_tracing_disabled(
    sentry_init,
    capture_events,
):
    sentry_init(integrations=[RqIntegration()])
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    with configure_scope() as scope:
        queue.enqueue(crashing_job, foo=None)
        worker.work(burst=True)

        (error_event,) = events

        assert (
            error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
        )
        assert (
            error_event["contexts"]["trace"]["trace_id"]
            == scope._propagation_context["trace_id"]
        )


def test_transaction_no_error(
    sentry_init, capture_events, DictionaryContaining  # noqa:N803
):
    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(do_trick, "Maisey", trick="kangaroo")
    worker.work(burst=True)

    envelope = events[0]

    assert envelope["type"] == "transaction"
    assert envelope["contexts"]["trace"]["op"] == "queue.task.rq"
    assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick"
    assert envelope["extra"]["rq-job"] == DictionaryContaining(
        {
            "args": ["Maisey"],
            "kwargs": {"trick": "kangaroo"},
            "func": "tests.integrations.rq.test_rq.do_trick",
            "description": "tests.integrations.rq.test_rq.do_trick('Maisey', trick='kangaroo')",
        }
    )


def test_traces_sampler_gets_correct_values_in_sampling_context(
    sentry_init, DictionaryContaining, ObjectDescribedBy  # noqa:N803
):
    traces_sampler = mock.Mock(return_value=True)
    sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler)

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(do_trick, "Bodhi", trick="roll over")
    worker.work(burst=True)

    traces_sampler.assert_any_call(
        DictionaryContaining(
            {
                "rq_job": ObjectDescribedBy(
                    type=rq.job.Job,
                    attrs={
                        "description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')",
                        "result": "Bodhi, can you roll over? Good dog!",
                        "func_name": "tests.integrations.rq.test_rq.do_trick",
                        "args": ("Bodhi",),
                        "kwargs": {"trick": "roll over"},
                    },
                ),
            }
        )
    )


@pytest.mark.skipif(
    parse_version(rq.__version__) < (1, 5), reason="At least rq-1.5 required"
)
def test_job_with_retries(sentry_init, capture_events):
    sentry_init(integrations=[RqIntegration()])
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=42, retry=rq.Retry(max=1))
    worker.work(burst=True)

    assert len(events) == 1
sentry-python-1.39.2/tests/integrations/sanic/000077500000000000000000000000001454744723200214325ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/sanic/__init__.py000066400000000000000000000000541454744723200235420ustar00rootroot00000000000000import pytest

pytest.importorskip("sanic")
sentry-python-1.39.2/tests/integrations/sanic/test_sanic.py000066400000000000000000000323021454744723200241400ustar00rootroot00000000000000import asyncio
import contextlib
import os
import random
import sys
from unittest.mock import Mock

import pytest

from sentry_sdk import capture_message, configure_scope
from sentry_sdk.integrations.sanic import SanicIntegration
from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL

from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW
from sanic.response import HTTPResponse
from sanic.exceptions import SanicException

try:
    from sanic_testing import TestManager
except ImportError:
    TestManager = None

try:
    from sanic_testing.reusable import ReusableClient
except ImportError:
    ReusableClient = None

from sentry_sdk._types import TYPE_CHECKING

if TYPE_CHECKING:
    from collections.abc import Iterable, Container
    from typing import Any, Optional

SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split(".")))
PERFORMANCE_SUPPORTED = SANIC_VERSION >= (21, 9)


@pytest.fixture
def app():
    if SANIC_VERSION < (19,):
        """
        Older Sanic versions 0.8 and 18 bind to the same fixed port which
        creates problems when we run tests concurrently.
        """
        old_test_client = Sanic.test_client.__get__

        def new_test_client(self):
            client = old_test_client(self, Sanic)
            client.port += os.getpid() % 100
            return client

        Sanic.test_client = property(new_test_client)

    if SANIC_VERSION >= (20, 12) and SANIC_VERSION < (22, 6):
        # Some builds (20.12.0 intruduced and 22.6.0 removed again) have a feature where the instance is stored in an internal class
        # registry for later retrieval, and so add register=False to disable that
        sanic_app = Sanic("Test", register=False)
    else:
        sanic_app = Sanic("Test")

    if TestManager is not None:
        TestManager(sanic_app)

    @sanic_app.route("/message")
    def hi(request):
        capture_message("hi")
        return response.text("ok")

    @sanic_app.route("/message/")
    def hi_with_id(request, message_id):
        capture_message("hi with id")
        return response.text("ok with id")

    @sanic_app.route("/500")
    def fivehundred(_):
        1 / 0

    return sanic_app


def get_client(app):
    @contextlib.contextmanager
    def simple_client(app):
        yield app.test_client

    if ReusableClient is not None:
        return ReusableClient(app)
    else:
        return simple_client(app)


def test_request_data(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    c = get_client(app)
    with c as client:
        _, response = client.get("/message?foo=bar")
        assert response.status == 200

    (event,) = events
    assert event["transaction"] == "hi"
    assert event["request"]["env"] == {"REMOTE_ADDR": ""}
    assert set(event["request"]["headers"]) >= {
        "accept",
        "accept-encoding",
        "host",
        "user-agent",
    }
    assert event["request"]["query_string"] == "foo=bar"
    assert event["request"]["url"].endswith("/message")
    assert event["request"]["method"] == "GET"

    # Assert that state is not leaked
    events.clear()
    capture_message("foo")
    (event,) = events

    assert "request" not in event
    assert "transaction" not in event


@pytest.mark.parametrize(
    "url,expected_transaction,expected_source",
    [
        ("/message", "hi", "component"),
        ("/message/123456", "hi_with_id", "component"),
    ],
)
def test_transaction_name(
    sentry_init, app, capture_events, url, expected_transaction, expected_source
):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    c = get_client(app)
    with c as client:
        _, response = client.get(url)
        assert response.status == 200

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


def test_errors(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    @app.route("/error")
    def myerror(request):
        raise ValueError("oh no")

    c = get_client(app)
    with c as client:
        _, response = client.get("/error")
        assert response.status == 500

    (event,) = events
    assert event["transaction"] == "myerror"
    (exception,) = event["exception"]["values"]

    assert exception["type"] == "ValueError"
    assert exception["value"] == "oh no"
    assert any(
        frame["filename"].endswith("test_sanic.py")
        for frame in exception["stacktrace"]["frames"]
    )


def test_bad_request_not_captured(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    @app.route("/")
    def index(request):
        raise SanicException("...", status_code=400)

    c = get_client(app)
    with c as client:
        _, response = client.get("/")
        assert response.status == 400

    assert not events


def test_error_in_errorhandler(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    @app.route("/error")
    def myerror(request):
        raise ValueError("oh no")

    @app.exception(ValueError)
    def myhandler(request, exception):
        1 / 0

    c = get_client(app)
    with c as client:
        _, response = client.get("/error")
        assert response.status == 500

    event1, event2 = events

    (exception,) = event1["exception"]["values"]
    assert exception["type"] == "ValueError"
    assert any(
        frame["filename"].endswith("test_sanic.py")
        for frame in exception["stacktrace"]["frames"]
    )

    exception = event2["exception"]["values"][-1]
    assert exception["type"] == "ZeroDivisionError"
    assert any(
        frame["filename"].endswith("test_sanic.py")
        for frame in exception["stacktrace"]["frames"]
    )


def test_concurrency(sentry_init, app):
    """
    Make sure we instrument Sanic in a way where request data does not leak
    between request handlers. This test also implicitly tests our concept of
    how async code should be instrumented, so if it breaks it likely has
    ramifications for other async integrations and async usercode.

    We directly call the request handler instead of using Sanic's test client
    because that's the only way we could reproduce leakage with such a low
    amount of concurrent tasks.
    """
    sentry_init(integrations=[SanicIntegration()])

    @app.route("/context-check/")
    async def context_check(request, i):
        with configure_scope() as scope:
            scope.set_tag("i", i)

        await asyncio.sleep(random.random())

        with configure_scope() as scope:
            assert scope._tags["i"] == i

        return response.text("ok")

    async def task(i):
        responses = []

        kwargs = {
            "url_bytes": "http://localhost/context-check/{i}".format(i=i).encode(
                "ascii"
            ),
            "headers": {},
            "version": "1.1",
            "method": "GET",
            "transport": None,
        }

        if SANIC_VERSION >= (19,):
            kwargs["app"] = app

        if SANIC_VERSION >= (21, 3):

            class MockAsyncStreamer:
                def __init__(self, request_body):
                    self.request_body = request_body
                    self.iter = iter(self.request_body)

                    if SANIC_VERSION >= (21, 12):
                        self.response = None
                        self.stage = Mock()
                    else:
                        self.response = b"success"

                def respond(self, response):
                    responses.append(response)
                    patched_response = HTTPResponse()
                    return patched_response

                def __aiter__(self):
                    return self

                async def __anext__(self):
                    try:
                        return next(self.iter)
                    except StopIteration:
                        raise StopAsyncIteration

            patched_request = request.Request(**kwargs)
            patched_request.stream = MockAsyncStreamer([b"hello", b"foo"])

            if SANIC_VERSION >= (21, 9):
                await app.dispatch(
                    "http.lifecycle.request",
                    context={"request": patched_request},
                    inline=True,
                )

            await app.handle_request(
                patched_request,
            )
        else:
            await app.handle_request(
                request.Request(**kwargs),
                write_callback=responses.append,
                stream_callback=responses.append,
            )

        (r,) = responses
        assert r.status == 200

    async def runner():
        if SANIC_VERSION >= (21, 3):
            if SANIC_VERSION >= (21, 9):
                await app._startup()
            else:
                try:
                    app.router.reset()
                    app.router.finalize()
                except AttributeError:
                    ...
        await asyncio.gather(*(task(i) for i in range(1000)))

    if sys.version_info < (3, 7):
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        loop.run_until_complete(runner())
    else:
        asyncio.run(runner())

    with configure_scope() as scope:
        assert not scope._tags


class TransactionTestConfig:
    """
    Data class to store configurations for each performance transaction test run, including
    both the inputs and relevant expected results.
    """

    def __init__(
        self,
        integration_args,
        url,
        expected_status,
        expected_transaction_name,
        expected_source=None,
    ):
        # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str]) -> None
        """
        expected_transaction_name of None indicates we expect to not receive a transaction
        """
        self.integration_args = integration_args
        self.url = url
        self.expected_status = expected_status
        self.expected_transaction_name = expected_transaction_name
        self.expected_source = expected_source


@pytest.mark.skipif(
    not PERFORMANCE_SUPPORTED, reason="Performance not supported on this Sanic version"
)
@pytest.mark.parametrize(
    "test_config",
    [
        TransactionTestConfig(
            # Transaction for successful page load
            integration_args=(),
            url="/message",
            expected_status=200,
            expected_transaction_name="hi",
            expected_source=TRANSACTION_SOURCE_COMPONENT,
        ),
        TransactionTestConfig(
            # Transaction still recorded when we have an internal server error
            integration_args=(),
            url="/500",
            expected_status=500,
            expected_transaction_name="fivehundred",
            expected_source=TRANSACTION_SOURCE_COMPONENT,
        ),
        TransactionTestConfig(
            # By default, no transaction when we have a 404 error
            integration_args=(),
            url="/404",
            expected_status=404,
            expected_transaction_name=None,
        ),
        TransactionTestConfig(
            # With no ignored HTTP statuses, we should get transactions for 404 errors
            integration_args=(None,),
            url="/404",
            expected_status=404,
            expected_transaction_name="/404",
            expected_source=TRANSACTION_SOURCE_URL,
        ),
        TransactionTestConfig(
            # Transaction can be suppressed for other HTTP statuses, too, by passing config to the integration
            integration_args=({200},),
            url="/message",
            expected_status=200,
            expected_transaction_name=None,
        ),
    ],
)
def test_transactions(test_config, sentry_init, app, capture_events):
    # type: (TransactionTestConfig, Any, Any, Any) -> None

    # Init the SanicIntegration with the desired arguments
    sentry_init(
        integrations=[SanicIntegration(*test_config.integration_args)],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    # Make request to the desired URL
    c = get_client(app)
    with c as client:
        _, response = client.get(test_config.url)
        assert response.status == test_config.expected_status

    # Extract the transaction events by inspecting the event types. We should at most have 1 transaction event.
    transaction_events = [
        e for e in events if "type" in e and e["type"] == "transaction"
    ]
    assert len(transaction_events) <= 1

    # Get the only transaction event, or set to None if there are no transaction events.
    (transaction_event, *_) = [*transaction_events, None]

    # We should have no transaction event if and only if we expect no transactions
    assert (transaction_event is None) == (
        test_config.expected_transaction_name is None
    )

    # If a transaction was expected, ensure it is correct
    assert (
        transaction_event is None
        or transaction_event["transaction"] == test_config.expected_transaction_name
    )
    assert (
        transaction_event is None
        or transaction_event["transaction_info"]["source"]
        == test_config.expected_source
    )
sentry-python-1.39.2/tests/integrations/serverless/000077500000000000000000000000001454744723200225325ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/serverless/test_serverless.py000066400000000000000000000020221454744723200263340ustar00rootroot00000000000000import pytest

from sentry_sdk.integrations.serverless import serverless_function


def test_basic(sentry_init, capture_exceptions, monkeypatch):
    sentry_init()
    exceptions = capture_exceptions()

    flush_calls = []

    @serverless_function
    def foo():
        monkeypatch.setattr(
            "sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1)
        )
        1 / 0

    with pytest.raises(ZeroDivisionError):
        foo()

    (exception,) = exceptions
    assert isinstance(exception, ZeroDivisionError)

    assert flush_calls == [1]


def test_flush_disabled(sentry_init, capture_exceptions, monkeypatch):
    sentry_init()
    exceptions = capture_exceptions()

    flush_calls = []

    monkeypatch.setattr("sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1))

    @serverless_function(flush=False)
    def foo():
        1 / 0

    with pytest.raises(ZeroDivisionError):
        foo()

    (exception,) = exceptions
    assert isinstance(exception, ZeroDivisionError)

    assert flush_calls == []
sentry-python-1.39.2/tests/integrations/socket/000077500000000000000000000000001454744723200216255ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/socket/__init__.py000066400000000000000000000000551454744723200237360ustar00rootroot00000000000000import pytest

pytest.importorskip("socket")
sentry-python-1.39.2/tests/integrations/socket/test_socket.py000066400000000000000000000027151454744723200245330ustar00rootroot00000000000000import socket

from sentry_sdk import start_transaction
from sentry_sdk.integrations.socket import SocketIntegration


def test_getaddrinfo_trace(sentry_init, capture_events):
    sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction():
        socket.getaddrinfo("example.com", 443)

    (event,) = events
    (span,) = event["spans"]

    assert span["op"] == "socket.dns"
    assert span["description"] == "example.com:443"
    assert span["data"] == {
        "host": "example.com",
        "port": 443,
    }


def test_create_connection_trace(sentry_init, capture_events):
    timeout = 10

    sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction():
        socket.create_connection(("example.com", 443), timeout, None)

    (event,) = events
    (connect_span, dns_span) = event["spans"]
    # as getaddrinfo gets called in create_connection it should also contain a dns span

    assert connect_span["op"] == "socket.connection"
    assert connect_span["description"] == "example.com:443"
    assert connect_span["data"] == {
        "address": ["example.com", 443],
        "timeout": timeout,
        "source_address": None,
    }

    assert dns_span["op"] == "socket.dns"
    assert dns_span["description"] == "example.com:443"
    assert dns_span["data"] == {
        "host": "example.com",
        "port": 443,
    }
sentry-python-1.39.2/tests/integrations/spark/000077500000000000000000000000001454744723200214555ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/spark/__init__.py000066400000000000000000000001121454744723200235600ustar00rootroot00000000000000import pytest

pytest.importorskip("pyspark")
pytest.importorskip("py4j")
sentry-python-1.39.2/tests/integrations/spark/test_spark.py000066400000000000000000000147061454744723200242160ustar00rootroot00000000000000import pytest
import sys
from sentry_sdk.integrations.spark.spark_driver import (
    _set_app_properties,
    _start_sentry_listener,
    SentryListener,
)

from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration

from pyspark import SparkContext

from py4j.protocol import Py4JJavaError

################
# DRIVER TESTS #
################


def test_set_app_properties():
    spark_context = SparkContext(appName="Testing123")
    _set_app_properties()

    assert spark_context.getLocalProperty("sentry_app_name") == "Testing123"
    # applicationId generated by sparkContext init
    assert (
        spark_context.getLocalProperty("sentry_application_id")
        == spark_context.applicationId
    )


def test_start_sentry_listener():
    spark_context = SparkContext.getOrCreate()

    gateway = spark_context._gateway
    assert gateway._callback_server is None

    _start_sentry_listener(spark_context)

    assert gateway._callback_server is not None


@pytest.fixture
def sentry_listener(monkeypatch):
    class MockHub:
        def __init__(self):
            self.args = []
            self.kwargs = {}

        def add_breadcrumb(self, *args, **kwargs):
            self.args = args
            self.kwargs = kwargs

    listener = SentryListener()
    mock_hub = MockHub()

    monkeypatch.setattr(listener, "hub", mock_hub)

    return listener, mock_hub


def test_sentry_listener_on_job_start(sentry_listener):
    listener, mock_hub = sentry_listener

    class MockJobStart:
        def jobId(self):  # noqa: N802
            return "sample-job-id-start"

    mock_job_start = MockJobStart()
    listener.onJobStart(mock_job_start)

    assert mock_hub.kwargs["level"] == "info"
    assert "sample-job-id-start" in mock_hub.kwargs["message"]


@pytest.mark.parametrize(
    "job_result, level", [("JobSucceeded", "info"), ("JobFailed", "warning")]
)
def test_sentry_listener_on_job_end(sentry_listener, job_result, level):
    listener, mock_hub = sentry_listener

    class MockJobResult:
        def toString(self):  # noqa: N802
            return job_result

    class MockJobEnd:
        def jobId(self):  # noqa: N802
            return "sample-job-id-end"

        def jobResult(self):  # noqa: N802
            result = MockJobResult()
            return result

    mock_job_end = MockJobEnd()
    listener.onJobEnd(mock_job_end)

    assert mock_hub.kwargs["level"] == level
    assert mock_hub.kwargs["data"]["result"] == job_result
    assert "sample-job-id-end" in mock_hub.kwargs["message"]


def test_sentry_listener_on_stage_submitted(sentry_listener):
    listener, mock_hub = sentry_listener

    class StageInfo:
        def stageId(self):  # noqa: N802
            return "sample-stage-id-submit"

        def name(self):
            return "run-job"

        def attemptId(self):  # noqa: N802
            return 14

    class MockStageSubmitted:
        def stageInfo(self):  # noqa: N802
            stageinf = StageInfo()
            return stageinf

    mock_stage_submitted = MockStageSubmitted()
    listener.onStageSubmitted(mock_stage_submitted)

    assert mock_hub.kwargs["level"] == "info"
    assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
    assert mock_hub.kwargs["data"]["attemptId"] == 14
    assert mock_hub.kwargs["data"]["name"] == "run-job"


@pytest.fixture
def get_mock_stage_completed():
    def _inner(failure_reason):
        class JavaException:
            def __init__(self):
                self._target_id = "id"

        class FailureReason:
            def get(self):
                if failure_reason:
                    return "failure-reason"
                else:
                    raise Py4JJavaError("msg", JavaException())

        class StageInfo:
            def stageId(self):  # noqa: N802
                return "sample-stage-id-submit"

            def name(self):
                return "run-job"

            def attemptId(self):  # noqa: N802
                return 14

            def failureReason(self):  # noqa: N802
                return FailureReason()

        class MockStageCompleted:
            def stageInfo(self):  # noqa: N802
                return StageInfo()

        return MockStageCompleted()

    return _inner


def test_sentry_listener_on_stage_completed_success(
    sentry_listener, get_mock_stage_completed
):
    listener, mock_hub = sentry_listener

    mock_stage_completed = get_mock_stage_completed(failure_reason=False)
    listener.onStageCompleted(mock_stage_completed)

    assert mock_hub.kwargs["level"] == "info"
    assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
    assert mock_hub.kwargs["data"]["attemptId"] == 14
    assert mock_hub.kwargs["data"]["name"] == "run-job"
    assert "reason" not in mock_hub.kwargs["data"]


def test_sentry_listener_on_stage_completed_failure(
    sentry_listener, get_mock_stage_completed
):
    listener, mock_hub = sentry_listener

    mock_stage_completed = get_mock_stage_completed(failure_reason=True)
    listener.onStageCompleted(mock_stage_completed)

    assert mock_hub.kwargs["level"] == "warning"
    assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
    assert mock_hub.kwargs["data"]["attemptId"] == 14
    assert mock_hub.kwargs["data"]["name"] == "run-job"
    assert mock_hub.kwargs["data"]["reason"] == "failure-reason"


################
# WORKER TESTS #
################


def test_spark_worker(monkeypatch, sentry_init, capture_events, capture_exceptions):
    import pyspark.worker as original_worker
    import pyspark.daemon as original_daemon

    from pyspark.taskcontext import TaskContext

    task_context = TaskContext._getOrCreate()

    def mock_main():
        task_context._stageId = 0
        task_context._attemptNumber = 1
        task_context._partitionId = 2
        task_context._taskAttemptId = 3

        try:
            raise ZeroDivisionError
        except ZeroDivisionError:
            sys.exit(-1)

    monkeypatch.setattr(original_worker, "main", mock_main)

    sentry_init(integrations=[SparkWorkerIntegration()])

    events = capture_events()
    exceptions = capture_exceptions()

    original_daemon.worker_main()

    # SystemExit called, but not recorded as part of event
    assert type(exceptions.pop()) == SystemExit
    assert len(events[0]["exception"]["values"]) == 1
    assert events[0]["exception"]["values"][0]["type"] == "ZeroDivisionError"

    assert events[0]["tags"] == {
        "stageId": "0",
        "attemptNumber": "1",
        "partitionId": "2",
        "taskAttemptId": "3",
    }
sentry-python-1.39.2/tests/integrations/sqlalchemy/000077500000000000000000000000001454744723200224775ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/sqlalchemy/__init__.py000066400000000000000000000000611454744723200246050ustar00rootroot00000000000000import pytest

pytest.importorskip("sqlalchemy")
sentry-python-1.39.2/tests/integrations/sqlalchemy/test_sqlalchemy.py000066400000000000000000000255361454744723200262650ustar00rootroot00000000000000import os
import pytest
import sys

from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import text

from sentry_sdk import capture_message, start_transaction, configure_scope
from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from sentry_sdk.serializer import MAX_EVENT_BYTES
from sentry_sdk.utils import json_dumps


def test_orm_queries(sentry_init, capture_events):
    sentry_init(
        integrations=[SqlalchemyIntegration()], _experiments={"record_sql_params": True}
    )
    events = capture_events()

    Base = declarative_base()  # noqa: N806

    class Person(Base):
        __tablename__ = "person"
        id = Column(Integer, primary_key=True)
        name = Column(String(250), nullable=False)

    class Address(Base):
        __tablename__ = "address"
        id = Column(Integer, primary_key=True)
        street_name = Column(String(250))
        street_number = Column(String(250))
        post_code = Column(String(250), nullable=False)
        person_id = Column(Integer, ForeignKey("person.id"))
        person = relationship(Person)

    engine = create_engine("sqlite:///:memory:")
    Base.metadata.create_all(engine)

    Session = sessionmaker(bind=engine)  # noqa: N806
    session = Session()

    bob = Person(name="Bob")
    session.add(bob)

    assert session.query(Person).first() == bob

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"][-2:] == [
        {
            "category": "query",
            "data": {"db.params": ["Bob"], "db.paramstyle": "qmark"},
            "message": "INSERT INTO person (name) VALUES (?)",
            "type": "default",
        },
        {
            "category": "query",
            "data": {"db.params": [1, 0], "db.paramstyle": "qmark"},
            "message": "SELECT person.id AS person_id, person.name AS person_name \n"
            "FROM person\n"
            " LIMIT ? OFFSET ?",
            "type": "default",
        },
    ]


@pytest.mark.skipif(
    sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
)
def test_transactions(sentry_init, capture_events, render_span_tree):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
    )
    events = capture_events()

    Base = declarative_base()  # noqa: N806

    class Person(Base):
        __tablename__ = "person"
        id = Column(Integer, primary_key=True)
        name = Column(String(250), nullable=False)

    class Address(Base):
        __tablename__ = "address"
        id = Column(Integer, primary_key=True)
        street_name = Column(String(250))
        street_number = Column(String(250))
        post_code = Column(String(250), nullable=False)
        person_id = Column(Integer, ForeignKey("person.id"))
        person = relationship(Person)

    engine = create_engine("sqlite:///:memory:")
    Base.metadata.create_all(engine)

    Session = sessionmaker(bind=engine)  # noqa: N806
    session = Session()

    with start_transaction(name="test_transaction", sampled=True):
        with session.begin_nested():
            session.query(Person).first()

        for _ in range(2):
            with pytest.raises(IntegrityError):
                with session.begin_nested():
                    session.add(Person(id=1, name="bob"))
                    session.add(Person(id=1, name="bob"))

        with session.begin_nested():
            session.query(Person).first()

    (event,) = events

    for span in event["spans"]:
        assert span["data"][SPANDATA.DB_SYSTEM] == "sqlite"
        assert span["data"][SPANDATA.DB_NAME] == ":memory:"
        assert SPANDATA.SERVER_ADDRESS not in span["data"]
        assert SPANDATA.SERVER_PORT not in span["data"]

    assert (
        render_span_tree(event)
        == """\
- op=null: description=null
  - op="db": description="SAVEPOINT sa_savepoint_1"
  - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
  - op="db": description="RELEASE SAVEPOINT sa_savepoint_1"
  - op="db": description="SAVEPOINT sa_savepoint_2"
  - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
  - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_2"
  - op="db": description="SAVEPOINT sa_savepoint_3"
  - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
  - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_3"
  - op="db": description="SAVEPOINT sa_savepoint_4"
  - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
  - op="db": description="RELEASE SAVEPOINT sa_savepoint_4"\
"""
    )


def test_long_sql_query_preserved(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1,
        integrations=[SqlalchemyIntegration()],
    )
    events = capture_events()

    engine = create_engine("sqlite:///:memory:")
    with start_transaction(name="test"):
        with engine.connect() as con:
            con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100))))

    (event,) = events
    description = event["spans"][0]["description"]
    assert description.startswith("SELECT 0 UNION SELECT 1")
    assert description.endswith("SELECT 98 UNION SELECT 99")


def test_large_event_not_truncated(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1,
        integrations=[SqlalchemyIntegration()],
    )
    events = capture_events()

    long_str = "x" * (DEFAULT_MAX_VALUE_LENGTH + 10)

    with configure_scope() as scope:

        @scope.add_event_processor
        def processor(event, hint):
            event["message"] = long_str
            return event

    engine = create_engine("sqlite:///:memory:")
    with start_transaction(name="test"):
        with engine.connect() as con:
            for _ in range(1500):
                con.execute(
                    text(" UNION ".join("SELECT {}".format(i) for i in range(100)))
                )

    (event,) = events

    assert len(json_dumps(event)) > MAX_EVENT_BYTES

    # Some spans are discarded.
    assert len(event["spans"]) == 1000

    # Span descriptions are not truncated.
    description = event["spans"][0]["description"]
    assert len(description) == 1583
    assert description.startswith("SELECT 0")
    assert description.endswith("SELECT 98 UNION SELECT 99")

    description = event["spans"][999]["description"]
    assert len(description) == 1583
    assert description.startswith("SELECT 0")
    assert description.endswith("SELECT 98 UNION SELECT 99")

    # Smoke check that truncation of other fields has not changed.
    assert len(event["message"]) == DEFAULT_MAX_VALUE_LENGTH

    # The _meta for other truncated fields should be there as well.
    assert event["_meta"]["message"] == {
        "": {"len": 1034, "rem": [["!limit", "x", 1021, 1024]]}
    }


def test_engine_name_not_string(sentry_init):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
    )

    engine = create_engine("sqlite:///:memory:")
    engine.dialect.name = b"sqlite"

    with engine.connect() as con:
        con.execute(text("SELECT 0"))


@pytest.mark.parametrize("enable_db_query_source", [None, False])
def test_query_source_disabled(sentry_init, capture_events, enable_db_query_source):
    sentry_options = {
        "integrations": [SqlalchemyIntegration()],
        "enable_tracing": True,
    }
    if enable_db_query_source is not None:
        sentry_options["enable_db_query_source"] = enable_db_query_source
        sentry_options["db_query_source_threshold_ms"] = 0

    sentry_init(**sentry_options)

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        Base = declarative_base()  # noqa: N806

        class Person(Base):
            __tablename__ = "person"
            id = Column(Integer, primary_key=True)
            name = Column(String(250), nullable=False)

        engine = create_engine("sqlite:///:memory:")
        Base.metadata.create_all(engine)

        Session = sessionmaker(bind=engine)  # noqa: N806
        session = Session()

        bob = Person(name="Bob")
        session.add(bob)

        assert session.query(Person).first() == bob

    (event,) = events

    for span in event["spans"]:
        if span.get("op") == "db" and span.get("description").startswith(
            "SELECT person"
        ):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO not in data
            assert SPANDATA.CODE_NAMESPACE not in data
            assert SPANDATA.CODE_FILEPATH not in data
            assert SPANDATA.CODE_FUNCTION not in data
            break
    else:
        raise AssertionError("No db span found")


def test_query_source(sentry_init, capture_events):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )
    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        Base = declarative_base()  # noqa: N806

        class Person(Base):
            __tablename__ = "person"
            id = Column(Integer, primary_key=True)
            name = Column(String(250), nullable=False)

        engine = create_engine("sqlite:///:memory:")
        Base.metadata.create_all(engine)

        Session = sessionmaker(bind=engine)  # noqa: N806
        session = Session()

        bob = Person(name="Bob")
        session.add(bob)

        assert session.query(Person).first() == bob

    (event,) = events

    for span in event["spans"]:
        if span.get("op") == "db" and span.get("description").startswith(
            "SELECT person"
        ):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0
            assert (
                data.get(SPANDATA.CODE_NAMESPACE)
                == "tests.integrations.sqlalchemy.test_sqlalchemy"
            )
            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                "tests/integrations/sqlalchemy/test_sqlalchemy.py"
            )

            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
            assert is_relative_path

            assert data.get(SPANDATA.CODE_FUNCTION) == "test_query_source"
            break
    else:
        raise AssertionError("No db span found")
sentry-python-1.39.2/tests/integrations/starlette/000077500000000000000000000000001454744723200223445ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/starlette/__init__.py000066400000000000000000000000601454744723200244510ustar00rootroot00000000000000import pytest

pytest.importorskip("starlette")
sentry-python-1.39.2/tests/integrations/starlette/photo.jpg000066400000000000000000000510261454744723200242030ustar00rootroot00000000000000JFIFHHC




C		

1x88\Ld
W-)yk\=mdPm';.6[aƠp @-'MMkGVL	ElHij.T\j:#ERpݖ-LI(MX򤪋^Zi12Nr
Kpj(}lm]jc<=L:[Ѱ|Ydy&<=h' RUmqqpNw\p2'HbNU23ZO#043i
5C~V-
^?.~7Py .xpUtX&QrIϧkqqMxyzIڹ.j\Ҁ(IEI4Rܭ{WJe-tc?PR[OL+Bi4ʡL܈湯0nk:O%ѹ<ۇmQOHUI)F:DH
6Sc4YmJP$R)g']lYRQ\Ӟ1@9c\۝-5]SԺ8w:0UK˓8gOA`1Tm"a
gݪhۮ(5!k
oHzUO6ځQ???W7zuTގJwG+YiZ751trin=TU;:9}lZi6`rpJ㞻
]#tɇLN.qNn`KVzQ)8⾸{?g\]MAi'ژ>`84Ij^J͜>&@}0kvp]l'磕-b-w*U#,:(I7zǫ(Ί3Z};jW>vXϼ4JL+bZX7Xޥ@\y$Pp\?UOe-IL-r΍tqp3{4Y?H~DuyR~JƆTC3E
zڷ[lJp@gfG9c4rN"Mhjb"&7G?:>-t"q]A:	͆X|	y&	NHJnx=򌩁ЁЀh: @}P2!jh@nNZs7Kɺv @p:|v4ܺx45ٳ]B$6'8/k9sϝYG
s6B.QrSY	״Ͻ/!X"̽!juJis-:H7|ςL;@Sc=JU)	E0\>X,5Ɣi(J`'MZ/}O7P1	634
^k O?o9L0fz9
Hp(3^M9s8% @{4"z"b?O,L*VmrÍ/§K*=47~F~򓒓Z"@ǙqzCeq_(GFX `w7Ho|[RIK+tglw_s,GEw^ʝ	WdX'~A(NK&0	id@I9e{!{$fjb2r0X|Xū]R^oT$#n/yGIғز$Z>5FHG,ܩ/؄UVGa|\čf9IߊFaǖ"NJe0dv3壒NÑFC-SɎϒH=y$+͚#Dq*f#ʖLM1㯷><)E6PCׇB:T,8G
"Mh-\>],9d(e`	L?v_>/oӢ}7)#bXF9W @,c	և#G2LDjiW%ɿA䱭YFο'
=^[iGX@1ECj!#9CL5xq!zHĹ8ZN~%6p-~Y0t+^CĽFH/
SJ|A/̓)$ct]eHsr3 P8"NEeq<@#Td>VE߫kefJ1pr~lj,tzv
F<ww˲km*VXՙ|9ьBuؽEwzf"O=V1Zt8uW/M좓#×?W_9mHj[pTOt=$h?r^>JlewҢ"eH7Id{V}OieWeÝ>RuɀAq'+uT'~H|\#u/9PX!gxN#5u]=J[1A$P6뚹]tC{I8u|8z/G4s'
y+9LYi\TKIpc]4l~4ƹμ:5R^_U\ǚFzېHBą#9cu5J4Ŏs%d^V|ThPuݞM ē\Y߅wϺ!q[udB]s2RRW:b\Yk_#w!#㖕~ꆻ? "h{d^)~'{ݛ_^?~qv?+ !01A2"Q3@#Baq?CQYa{}FHMt8aK\^)	y`Xck>@14Ƣٯ%/1Rfjf#%Cō봙3'b%G#Lig0PYB}?xj-_?CF
Z7-q/!l϶q=t"=v+p2LcAdlw/H|;vFCbcD	{q
`"&a>U00۝gMO8x:Knv'\!6I_"N{AOY>S@[ۯXe8)l hvl֡rzۡ*!-ӷ3[[78>T=?Y8ͽ?MvSR1/Uv@4ƋgL0,g6wZ|v39#c
頎N[߮FG% 0!12A@BQ?ژlgheeJl:e7ɆI|8F|PPwe&II-wAgT@sߘ%	>O8;Eɖa<>O,"[#gɶ-mh6,[pt鍪kNjFcւrgXjtVB;^^[W[}Kl1FAjgG%[d
Jf^ADl*ZW(OOIN@D&44F|0S!%=qxѧtXh?
|X@B;EB@mQm:ga!HŬ_PD6Gi	e|$橳Vqxm0ICM[GJ'5s<^EcU*Q
AyZ&
>D96ЕGL>vPརTEKJbc(U[|NquTKhTU'@V/nmbX@Hf|
xo"rց@T	RFx*(v
B%,t<cpw75UGwB.t9q^daUq<#BUgU
WO6SVP@A4crQfkkUltrqRLF%Yj(ݤU1|]iSHZbA1>[5h:;F؅Te|}QOƒtYE)bU@VfY^;+0Y&p94aXe^]ϟE7tqs(HvX)}U^B`/_cj;r;U*Tkm^Lha2P<Mz5$xk=yYE#*0siTU:>'z.Q7(fcnEUje뼽k
z$:L4fve]o(cӴl]96>Xs<|DW4GTOWX1dh#%~.8jDIb0`4}LQry΄΋sq<0R%:v 7,JMs5k¿U6(⠆?*rXhfwI&?~G\o䳛h,
Rq`F߽ѡDa%ł%NUFY,>buLmDMC4LncY-]ҶX)ꂝy#}$<^gP%>ՠs`n,]LQ$d΃?\?Xs *6sV}+
&r0]0\YşeE'p6'Lje#	zdPQ,v1eF/9U7f7U9.,*i7Uu_pktNL#Hvy,~*20cu'JL[>Fh	?U*FqFLE&E=5s\HECY6?)bXС/f)s7'a)_~ČG-9{80p>4QߐJg).$<#D	58h2l,
C92cF%K͙jσUbfFa7F+^^<O?*1UU`~꒩E!t*osgnSL
fOm]JUANb>62Ĵ`.{>f)
Lߛl$FH-d'x>k?Bŵk1A:5_Kj[#PD	Y{rCbYYdE=;F9h{{cbך(dwqGu!udq۰
qW,}F[t=Y#~5!r_*VqX"1@z=\ּWB?éc\bBd1xaSDVMB73˚+,&}G@ۇ͓Kl=s~lLk>sruEd}k,FAIp/zE鳙o?%[7cy64yrʃAEo6]e!)B
,ԋZ)s JЄifj8y0(RW6~+·n[E㛤ê;3v)>9X[ХgF3i(ӀJwiuS
0*ҝ㺳9>F,[JnYsM۷`".Py\GJ죚dekgёg騺MbFl)3FjK]幘K\KRǨ6.¹6SDt/:"UV&?upTCڸO(UƸcfdFi5ϲ*E
*5`&g!Nj85#D)-V˦17T&*a`ע{.	"dwŒbhdSed'-TO.HQ'Q>3/
rqT}ŌPrl+Gѹ3U 
Ok/AY\@jѥ =G&B\h&hŞFj`(%BGU,YONWCv!^'NP݆t/1qvEhb.ALDe͓UZ,ʕUkX`;>@4r]x&c^=[_|7Zi:z?
T_7=_;o4mM!s}dE$_s)aVi&)hNma0U1޻=Ҳѿҁffʪ(IbT'q!Z1Bڋob8ZSiS?&W;ZI5"&fbOܛb))%o	X ZYV@ZR9_'~D=fz˕	YKǥgn\`G^yt]c'.Fٛb~HOٶX7ZR7s#Y=w߈?
V2@xs䑗ӱ؞o0m#1P$dyklGwfSz\ KC?a$,=n3k0z
vk>r;Iݒ66"R:.c#r8m9ndyr\f2 LlE<rp݌y؛=fYEv{k97g-O}+sCO>1r3#뿱?EO],m,GR4ٌavKϧY/0r|DFzLHG#!@[›qzO\	ynRI5ٟ\slgmیRr^H)0B2388
w(p˙gRsʔ Dc$ٯB.	\@ P
f+O	t	t@5}>@h#&Dᑆy0/蠈|]^`	"q3 @K|dasHO-xZJxBAtIIHS4#)8+GcB.h"r
	+;LܤQRC`Ca	5	,?x6'
.phvNIs@oD)їCcp硼490M#N7v
a%#!QP93bKyZ)f	vBP(20]5xgoiNm=Gps\@GEr뀘1~C')pIp!HrykAA׼ p4B҂44z\,y{R4ܐ-
0p!؝
tdc'A$
HHÒx?rBr)IBF_#x(
%< %~1xhE19$%.a()dp\u]i7P$y
yi|)obҐAODPacj+M]M`##kr=88Lf: @\Z6ѦNF)ǥ̞F\:a掾ےhsDsnïc&!1AQaq?ካ1ZuO!lhT%:,PB`yz$%t))=p>.pIIT*C^Ph}hh ٨Iʼnv( @LjH6dYI#|V@)@f0~X3΢Cc޴ZFYM=zM$Uyo:]s8~~
"1(̽1fn:?ڊܼ%˟Afr韥}u'а"i1p@`)(R}"cYA#7,OJ02eaYS1'OqԌeL)=y}# dOK9,D^aRyS1IyNJmd.MF~ * 4™+a18PaH'L0
F=Ɣ!f^J(R`Lcb71ۜFTRN19lFJ2!30b
9Br:|l䘃ESd0>j,0䉓JiH0GT΃
-C)T+{1 G`PVl{Ge"bxoT"(Hu{I81kΉՅNKR,u&0H @U^?jW+Ɏg,!fv;lrSOd])xy ()`u:Pp@$jyOz'C6[#A$t7YsjgeǵB8z20歕!9͚ѐL۵0<5sRoW&áez8༾I{ǰn,LkHcpb zL;"{qH!5e3\,OA`׽R0
C-`Z͛,|O3d) TI	XQh{5Ab*Cf?)"9Ӑ&Q4q-G>Ujxwv'qY3)`~/ز=8HzKN dpEVB7P5aNd0MBep5Ht,̂f$xN8BA86đP^S3
QD4[#;6Oi:Y0zpWqO0gk:x<ƑzXyiۂ wO@J5D}4|^&	Yc(}CϚGɞƬB|Rg;@g`V@o@"nyrd O[#l'."T
IV+9f*J:"\:)eƘb~lōŋczutgS;ui"ul/Zv'YV|ɞ1IdknK$α9iIaHN
b3K1?6VhRsgL2񦅬)g1LU(eD}Ჟb!3#|k^׀pOZrij)Nn)4)=튌@H^su&S	l20.lj"DO4N[I@G
h1EG͋GN0OGj
j$<J&=s4	Sl&~cV(I3fa|.1CLo*02Ň0pq8Ic	
>&j2=ɰljΎƹN';Y)Ew21@D?#ĉOK6)>jle("I	N[3F$ٺLrPCIS8|Q
5#WޔD36TD>Juf}+TX$:֪$h U
	T񯯵yAWd61jgcǵG1R&P8-*2}Cӊ\-{fEH9zRc((IP9eRahR~fXƂ'XVDx!A6 |=moJMɌ481%x^ֺ3J函ac#c$7OZr;!Ycƫ@CswbG|P`v$g#2Ozaԥdj@!DMd0LO)0/&GŔEp\1Jc֘(T|DE#FB9W_#[>{/Q]ʰ[`\hH{W2Y3pFv06D?VONj(	>	{?fJń?=lX=,Bs6Hlt!sIRbtesiXwHa,NڷCDs9(<"VQ'+6Pk_j$9K(,dERu+t^c\8D+/DJ°	qPf}eRF'V)eI[$Y/(g`8!itQc}b`e`''l6z㭂HG*l;?53JSVÊhfX>'il]hc5``	8L#t&f-L!20T$!"@$|SJE\F|PDz
;@`ChethO?Қ?M`c:1F#%X)@pZ. }nte2m2NkPH8{Q.DD&,D+Ϲ1QaK1b
ճTBj
32dmJILB=CqRb*}T`+\b4 Ԫ#q򲃂%MTbwT@ܨz\0#NnBGzLGspG_€2B.2yL؅,8IxM%F6$OXሊY-zJ=yi܁NbM~nƭGDc`\zYf#ȱ/?'8ޠR%y*ha0k&~jX9"K@D>uL,C<܇xoTfA6|J%|)]xlY|)ȹ!+<]SIqV.2@`U>!%ߥbL!7!2s{L֜j$b/HLc"Nx@ԏjQC$!J@$7`4sXː7saRp@s2[vǀSZKRRY{Y0rY4NfȢ

dFˬKL&úDaM %q:0ī/@]%ŏ@s+0~>=4}L|lb%yegRM$BaYE
ZD)a'ڐdžE)8KûfM3b1Su}fe`He>530GxxIp{ 
1]@Tiag"fT]R˵T+>^bl:0DL3@Asrke Pfr@e`uLTGb^‚9i.@G j:RdaTihFr֒'CNevF@TV*0&h0j'tAIB˝JX`k>]dDb)- d~}ѳ⡂z9QyGQL4w=Dw!&%*`:G!x6a(&$mHh/UȐMA~{	8c
T4;z<^J9dë\g'CP6Y'g:nA4`h0vą>C>1E2Lw)4L
C5!c`U<4pi
#F!ѫd0=%c&ΫX\λЅ䑯|h,(G(B Eܪ(nl1%J>DfPpaj3nVԝC":UL*X
ICLaZ0z_d;Y0<BJu[$lHvH"KHrgؖ
ks{:gpgCW'EP%33fوNP	"tq!eSDy6V,GBLđHdi1Ȫ>qGMܨFBKU48?{\✤W雷 |`L|QYYl̡0L%nx^~b&,*&cɻ._xy<߫_/_fn 7//o}sentry-python-1.39.2/tests/integrations/starlette/templates/000077500000000000000000000000001454744723200243425ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/starlette/templates/trace_meta.html000066400000000000000000000000301454744723200273250ustar00rootroot00000000000000{{ sentry_trace_meta }}
sentry-python-1.39.2/tests/integrations/starlette/test_starlette.py000066400000000000000000001043551454744723200257740ustar00rootroot00000000000000import asyncio
import base64
import functools
import json
import logging
import os
import re
import threading

import pytest

from sentry_sdk import last_event_id, capture_exception
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
from sentry_sdk.utils import parse_version

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3

from sentry_sdk import capture_message
from sentry_sdk.integrations.starlette import (
    StarletteIntegration,
    StarletteRequestExtractor,
)

import starlette
from starlette.authentication import (
    AuthCredentials,
    AuthenticationBackend,
    AuthenticationError,
    SimpleUser,
)
from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.trustedhost import TrustedHostMiddleware
from starlette.testclient import TestClient


STARLETTE_VERSION = parse_version(starlette.__version__)

PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "photo.jpg")

BODY_JSON = {"some": "json", "for": "testing", "nested": {"numbers": 123}}

BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="photo.jpg"\r\nContent-Type: image/jpg\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace(
    "{{image_data}}", str(base64.b64encode(open(PICTURE, "rb").read()))
)

FORM_RECEIVE_MESSAGES = [
    {"type": "http.request", "body": BODY_FORM.encode("utf-8")},
    {"type": "http.disconnect"},
]

JSON_RECEIVE_MESSAGES = [
    {"type": "http.request", "body": json.dumps(BODY_JSON).encode("utf-8")},
    {"type": "http.disconnect"},
]

PARSED_FORM = starlette.datastructures.FormData(
    [
        ("username", "Jane"),
        ("password", "hello123"),
        (
            "photo",
            starlette.datastructures.UploadFile(
                filename="photo.jpg",
                file=open(PICTURE, "rb"),
            ),
        ),
    ]
)

# Dummy ASGI scope for creating mock Starlette requests
SCOPE = {
    "client": ("172.29.0.10", 34784),
    "headers": [
        [b"host", b"example.com"],
        [b"user-agent", b"Mozilla/5.0 Gecko/20100101 Firefox/60.0"],
        [b"content-type", b"application/json"],
        [b"accept-language", b"en-US,en;q=0.5"],
        [b"accept-encoding", b"gzip, deflate, br"],
        [b"upgrade-insecure-requests", b"1"],
        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
    ],
    "http_version": "0.0",
    "method": "GET",
    "path": "/path",
    "query_string": b"qs=hello",
    "scheme": "http",
    "server": ("172.28.0.10", 8000),
    "type": "http",
}


async def _mock_receive(msg):
    return msg


from sentry_sdk import Hub
from starlette.templating import Jinja2Templates


def starlette_app_factory(middleware=None, debug=True):
    template_dir = os.path.join(
        os.getcwd(), "tests", "integrations", "starlette", "templates"
    )
    templates = Jinja2Templates(directory=template_dir)

    async def _homepage(request):
        1 / 0
        return starlette.responses.JSONResponse({"status": "ok"})

    async def _custom_error(request):
        raise Exception("Too Hot")

    async def _message(request):
        capture_message("hi")
        return starlette.responses.JSONResponse({"status": "ok"})

    async def _message_with_id(request):
        capture_message("hi")
        return starlette.responses.JSONResponse({"status": "ok"})

    def _thread_ids_sync(request):
        return starlette.responses.JSONResponse(
            {
                "main": threading.main_thread().ident,
                "active": threading.current_thread().ident,
            }
        )

    async def _thread_ids_async(request):
        return starlette.responses.JSONResponse(
            {
                "main": threading.main_thread().ident,
                "active": threading.current_thread().ident,
            }
        )

    async def _render_template(request):
        hub = Hub.current
        capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())

        template_context = {
            "request": request,
            "msg": "Hello Template World!",
        }
        return templates.TemplateResponse("trace_meta.html", template_context)

    app = starlette.applications.Starlette(
        debug=debug,
        routes=[
            starlette.routing.Route("/some_url", _homepage),
            starlette.routing.Route("/custom_error", _custom_error),
            starlette.routing.Route("/message", _message),
            starlette.routing.Route("/message/{message_id}", _message_with_id),
            starlette.routing.Route("/sync/thread_ids", _thread_ids_sync),
            starlette.routing.Route("/async/thread_ids", _thread_ids_async),
            starlette.routing.Route("/render_template", _render_template),
        ],
        middleware=middleware,
    )

    return app


def async_return(result):
    f = asyncio.Future()
    f.set_result(result)
    return f


class BasicAuthBackend(AuthenticationBackend):
    async def authenticate(self, conn):
        if "Authorization" not in conn.headers:
            return

        auth = conn.headers["Authorization"]
        try:
            scheme, credentials = auth.split()
            if scheme.lower() != "basic":
                return
            decoded = base64.b64decode(credentials).decode("ascii")
        except (ValueError, UnicodeDecodeError):
            raise AuthenticationError("Invalid basic auth credentials")

        username, _, password = decoded.partition(":")

        # TODO: You'd want to verify the username and password here.

        return AuthCredentials(["authenticated"]), SimpleUser(username)


class AsyncIterator:
    def __init__(self, data):
        self.iter = iter(bytes(data, "utf-8"))

    def __aiter__(self):
        return self

    async def __anext__(self):
        try:
            return bytes([next(self.iter)])
        except StopIteration:
            raise StopAsyncIteration


class SampleMiddleware:
    def __init__(self, app):
        self.app = app

    async def __call__(self, scope, receive, send):
        # only handle http requests
        if scope["type"] != "http":
            await self.app(scope, receive, send)
            return

        async def do_stuff(message):
            if message["type"] == "http.response.start":
                # do something here.
                pass

            await send(message)

        await self.app(scope, receive, do_stuff)


class SampleReceiveSendMiddleware:
    def __init__(self, app):
        self.app = app

    async def __call__(self, scope, receive, send):
        message = await receive()
        assert message
        assert message["type"] == "http.request"

        send_output = await send({"type": "something-unimportant"})
        assert send_output is None

        await self.app(scope, receive, send)


class SamplePartialReceiveSendMiddleware:
    def __init__(self, app):
        self.app = app

    async def __call__(self, scope, receive, send):
        message = await receive()
        assert message
        assert message["type"] == "http.request"

        send_output = await send({"type": "something-unimportant"})
        assert send_output is None

        async def my_receive(*args, **kwargs):
            pass

        async def my_send(*args, **kwargs):
            pass

        partial_receive = functools.partial(my_receive)
        partial_send = functools.partial(my_send)

        await self.app(scope, partial_receive, partial_send)


@pytest.mark.asyncio
async def test_starlettrequestextractor_content_length(sentry_init):
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
    ]
    starlette_request = starlette.requests.Request(scope)
    extractor = StarletteRequestExtractor(starlette_request)

    assert await extractor.content_length() == len(json.dumps(BODY_JSON))


@pytest.mark.asyncio
async def test_starlettrequestextractor_cookies(sentry_init):
    starlette_request = starlette.requests.Request(SCOPE)
    extractor = StarletteRequestExtractor(starlette_request)

    assert extractor.cookies() == {
        "tasty_cookie": "strawberry",
        "yummy_cookie": "choco",
    }


@pytest.mark.asyncio
async def test_starlettrequestextractor_json(sentry_init):
    starlette_request = starlette.requests.Request(SCOPE)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    assert extractor.is_json()
    assert await extractor.json() == BODY_JSON


@pytest.mark.asyncio
async def test_starlettrequestextractor_form(sentry_init):
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
    ]
    # TODO add test for content-type: "application/x-www-form-urlencoded"

    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    form_data = await extractor.form()
    assert form_data.keys() == PARSED_FORM.keys()
    assert form_data["username"] == PARSED_FORM["username"]
    assert form_data["password"] == PARSED_FORM["password"]
    assert form_data["photo"].filename == PARSED_FORM["photo"].filename

    # Make sure we still can read the body
    # after alreading it with extractor.form() above.
    body = await extractor.request.body()
    assert body


@pytest.mark.asyncio
async def test_starlettrequestextractor_body_consumed_twice(
    sentry_init, capture_events
):
    """
    Starlette does cache when you read the request data via `request.json()`
    or `request.body()`, but it does NOT when using `request.form()`.
    So we have an edge case when the Sentry Starlette reads the body using `.form()`
    and the user wants to read the body using `.body()`.
    Because the underlying stream can not be consumed twice and is not cached.

    We have fixed this in `StarletteRequestExtractor.form()` by consuming the body
    first with `.body()` (to put it into the `_body` cache and then consume it with `.form()`.

    If this behavior is changed in Starlette and the `request.form()` in Starlette
    is also caching the body, this test will fail.

    See also https://github.com/encode/starlette/discussions/1933
    """
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
    ]

    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    await extractor.request.form()

    with pytest.raises(RuntimeError):
        await extractor.request.body()


@pytest.mark.asyncio
async def test_starlettrequestextractor_extract_request_info_too_big(sentry_init):
    sentry_init(
        send_default_pii=True,
        integrations=[StarletteIntegration()],
    )
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
        [b"content-length", str(len(BODY_FORM)).encode()],
        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
    ]
    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    request_info = await extractor.extract_request_info()

    assert request_info
    assert request_info["cookies"] == {
        "tasty_cookie": "strawberry",
        "yummy_cookie": "choco",
    }
    # Because request is too big only the AnnotatedValue is extracted.
    assert request_info["data"].metadata == {"rem": [["!config", "x"]]}


@pytest.mark.asyncio
async def test_starlettrequestextractor_extract_request_info(sentry_init):
    sentry_init(
        send_default_pii=True,
        integrations=[StarletteIntegration()],
    )
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"application/json"],
        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
    ]

    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    request_info = await extractor.extract_request_info()

    assert request_info
    assert request_info["cookies"] == {
        "tasty_cookie": "strawberry",
        "yummy_cookie": "choco",
    }
    assert request_info["data"] == BODY_JSON


@pytest.mark.asyncio
async def test_starlettrequestextractor_extract_request_info_no_pii(sentry_init):
    sentry_init(
        send_default_pii=False,
        integrations=[StarletteIntegration()],
    )
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"application/json"],
        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
    ]

    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    request_info = await extractor.extract_request_info()

    assert request_info
    assert "cookies" not in request_info
    assert request_info["data"] == BODY_JSON


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        (
            "/message",
            "url",
            "/message",
            "route",
        ),
        (
            "/message",
            "endpoint",
            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "/message/{message_id}",
            "route",
        ),
        (
            "/message/123456",
            "endpoint",
            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message_with_id",
            "component",
        ),
    ],
)
def test_transaction_style(
    sentry_init,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[StarletteIntegration(transaction_style=transaction_style)],
    )
    starlette_app = starlette_app_factory()

    events = capture_events()

    client = TestClient(starlette_app)
    client.get(url)

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


@pytest.mark.parametrize(
    "test_url,expected_error,expected_message",
    [
        ("/some_url", ZeroDivisionError, "division by zero"),
        ("/custom_error", Exception, "Too Hot"),
    ],
)
def test_catch_exceptions(
    sentry_init,
    capture_exceptions,
    capture_events,
    test_url,
    expected_error,
    expected_message,
):
    sentry_init(integrations=[StarletteIntegration()])
    starlette_app = starlette_app_factory()
    exceptions = capture_exceptions()
    events = capture_events()

    client = TestClient(starlette_app)
    try:
        client.get(test_url)
    except Exception:
        pass

    (exc,) = exceptions
    assert isinstance(exc, expected_error)
    assert str(exc) == expected_message

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlette"


def test_user_information_error(sentry_init, capture_events):
    sentry_init(
        send_default_pii=True,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/custom_error", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (event,) = events
    user = event.get("user", None)
    assert user
    assert "username" in user
    assert user["username"] == "Gabriela"


def test_user_information_error_no_pii(sentry_init, capture_events):
    sentry_init(
        send_default_pii=False,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/custom_error", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (event,) = events
    assert "user" not in event


def test_user_information_transaction(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=True,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    client.get("/message", auth=("Gabriela", "hello123"))

    (_, transaction_event) = events
    user = transaction_event.get("user", None)
    assert user
    assert "username" in user
    assert user["username"] == "Gabriela"


def test_user_information_transaction_no_pii(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=False,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    client.get("/message", auth=("Gabriela", "hello123"))

    (_, transaction_event) = events
    assert "user" not in transaction_event


def test_middleware_spans(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (_, transaction_event) = events

    expected = [
        "ServerErrorMiddleware",
        "AuthenticationMiddleware",
        "ExceptionMiddleware",
    ]

    idx = 0
    for span in transaction_event["spans"]:
        if span["op"] == "middleware.starlette":
            assert span["description"] == expected[idx]
            assert span["tags"]["starlette.middleware_name"] == expected[idx]
            idx += 1


def test_middleware_callback_spans(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(middleware=[Middleware(SampleMiddleware)])
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (_, transaction_event) = events

    expected = [
        {
            "op": "middleware.starlette",
            "description": "ServerErrorMiddleware",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette",
            "description": "SampleMiddleware",
            "tags": {"starlette.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlette",
            "description": "ExceptionMiddleware",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SampleMiddleware.__call__..do_stuff",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "ServerErrorMiddleware.__call__.._send",
            "tags": {"starlette.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SampleMiddleware.__call__..do_stuff",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "ServerErrorMiddleware.__call__.._send",
            "tags": {"starlette.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
    ]

    idx = 0
    for span in transaction_event["spans"]:
        assert span["op"] == expected[idx]["op"]
        assert span["description"] == expected[idx]["description"]
        assert span["tags"] == expected[idx]["tags"]
        idx += 1


def test_middleware_receive_send(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(SampleReceiveSendMiddleware)]
    )

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        # NOTE: the assert statements checking
        # for correct behaviour are in `SampleReceiveSendMiddleware`!
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass


def test_middleware_partial_receive_send(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(SamplePartialReceiveSendMiddleware)]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (_, transaction_event) = events

    expected = [
        {
            "op": "middleware.starlette",
            "description": "ServerErrorMiddleware",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette",
            "description": "SamplePartialReceiveSendMiddleware",
            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.starlette.receive",
            "description": "_ASGIAdapter.send..receive"
            if STARLETTE_VERSION < (0, 21)
            else "_TestClientTransport.handle_request..receive",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "ServerErrorMiddleware.__call__.._send",
            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette",
            "description": "ExceptionMiddleware",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "functools.partial(.my_send at ",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "functools.partial(.my_send at ",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
    ]

    idx = 0
    for span in transaction_event["spans"]:
        assert span["op"] == expected[idx]["op"]
        assert span["description"].startswith(expected[idx]["description"])
        assert span["tags"] == expected[idx]["tags"]
        idx += 1


def test_last_event_id(sentry_init, capture_events):
    sentry_init(
        integrations=[StarletteIntegration()],
    )
    events = capture_events()

    def handler(request, exc):
        capture_exception(exc)
        return starlette.responses.PlainTextResponse(last_event_id(), status_code=500)

    app = starlette_app_factory(debug=False)
    app.add_exception_handler(500, handler)

    client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False)
    response = client.get("/custom_error")
    assert response.status_code == 500

    event = events[0]
    assert response.content.strip().decode("ascii") == event["event_id"]
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Too Hot"


def test_legacy_setup(
    sentry_init,
    capture_events,
):
    # Check that behaviour does not change
    # if the user just adds the new Integration
    # and forgets to remove SentryAsgiMiddleware
    sentry_init()
    app = starlette_app_factory()
    asgi_app = SentryAsgiMiddleware(app)

    events = capture_events()

    client = TestClient(asgi_app)
    client.get("/message/123456")

    (event,) = events
    assert event["transaction"] == "/message/{message_id}"


@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )
    app = starlette_app_factory()
    asgi_app = SentryAsgiMiddleware(app)

    envelopes = capture_envelopes()

    client = TestClient(asgi_app)
    response = client.get(endpoint)
    assert response.status_code == 200

    data = json.loads(response.content)

    envelopes = [envelope for envelope in envelopes]
    assert len(envelopes) == 1

    profiles = [item for item in envelopes[0].items if item.type == "profile"]
    assert len(profiles) == 1

    for profile in profiles:
        transactions = profile.payload.json["transactions"]
        assert len(transactions) == 1
        assert str(data["active"]) == transactions[0]["active_thread_id"]


def test_original_request_not_scrubbed(sentry_init, capture_events):
    sentry_init(integrations=[StarletteIntegration()])

    events = capture_events()

    async def _error(request):
        logging.critical("Oh no!")
        assert request.headers["Authorization"] == "Bearer ohno"
        assert await request.json() == {"password": "ohno"}
        return starlette.responses.JSONResponse({"status": "Oh no!"})

    app = starlette.applications.Starlette(
        routes=[
            starlette.routing.Route("/error", _error, methods=["POST"]),
        ],
    )

    client = TestClient(app)
    client.post(
        "/error",
        json={"password": "ohno"},
        headers={"Authorization": "Bearer ohno"},
    )

    event = events[0]
    assert event["request"]["data"] == {"password": "[Filtered]"}
    assert event["request"]["headers"]["authorization"] == "[Filtered]"


@pytest.mark.skipif(STARLETTE_VERSION < (0, 24), reason="Requires Starlette >= 0.24")
def test_template_tracing_meta(sentry_init, capture_events):
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[StarletteIntegration()],
    )
    events = capture_events()

    app = starlette_app_factory()

    client = TestClient(app)
    response = client.get("/render_template")
    assert response.status_code == 200

    rendered_meta = response.text
    traceparent, baggage = events[0]["message"].split("\n")
    assert traceparent != ""
    assert baggage != ""

    match = re.match(
        r'^',
        rendered_meta,
    )
    assert match is not None
    assert match.group(1) == traceparent

    # Python 2 does not preserve sort order
    rendered_baggage = match.group(2)
    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message_with_id",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "/message/{message_id}",
            "route",
        ),
    ],
)
def test_transaction_name(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[StarletteIntegration(transaction_style=transaction_style)],
        traces_sample_rate=1.0,
        debug=True,
    )

    envelopes = capture_envelopes()

    app = starlette_app_factory()
    client = TestClient(app)
    client.get(request_url)

    (_, transaction_envelope) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "http://testserver/message/123456",
            "url",
        ),
        (
            "/message/123456",
            "url",
            "http://testserver/message/123456",
            "url",
        ),
    ],
)
def test_transaction_name_in_traces_sampler(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
):
    """
    Tests that a custom traces_sampler has a meaningful transaction name.
    In this case the URL or endpoint, because we do not have the route yet.
    """

    def dummy_traces_sampler(sampling_context):
        assert (
            sampling_context["transaction_context"]["name"] == expected_transaction_name
        )
        assert (
            sampling_context["transaction_context"]["source"]
            == expected_transaction_source
        )

    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[StarletteIntegration(transaction_style=transaction_style)],
        traces_sampler=dummy_traces_sampler,
        traces_sample_rate=1.0,
        debug=True,
    )

    app = starlette_app_factory()
    client = TestClient(app)
    client.get(request_url)


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "starlette.middleware.trustedhost.TrustedHostMiddleware",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "http://testserver/message/123456",
            "url",
        ),
    ],
)
def test_transaction_name_in_middleware(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[
            StarletteIntegration(transaction_style=transaction_style),
        ],
        traces_sample_rate=1.0,
        debug=True,
    )

    envelopes = capture_envelopes()

    middleware = [
        Middleware(
            TrustedHostMiddleware,
            allowed_hosts=["example.com", "*.example.com"],
        ),
    ]

    app = starlette_app_factory(middleware=middleware)
    client = TestClient(app)
    client.get(request_url)

    (transaction_envelope,) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["contexts"]["response"]["status_code"] == 400
    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )
sentry-python-1.39.2/tests/integrations/starlite/000077500000000000000000000000001454744723200221645ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/starlite/__init__.py000066400000000000000000000000571454744723200242770ustar00rootroot00000000000000import pytest

pytest.importorskip("starlite")
sentry-python-1.39.2/tests/integrations/starlite/test_starlite.py000066400000000000000000000226571454744723200254400ustar00rootroot00000000000000import functools

import pytest

from sentry_sdk import capture_exception, capture_message, last_event_id
from sentry_sdk.integrations.starlite import StarliteIntegration

from typing import Any, Dict

import starlite
from starlite import AbstractMiddleware, LoggingConfig, Starlite, get, Controller
from starlite.middleware import LoggingMiddlewareConfig, RateLimitConfig
from starlite.middleware.session.memory_backend import MemoryBackendConfig
from starlite.status_codes import HTTP_500_INTERNAL_SERVER_ERROR
from starlite.testing import TestClient


class SampleMiddleware(AbstractMiddleware):
    async def __call__(self, scope, receive, send) -> None:
        async def do_stuff(message):
            if message["type"] == "http.response.start":
                # do something here.
                pass
            await send(message)

        await self.app(scope, receive, do_stuff)


class SampleReceiveSendMiddleware(AbstractMiddleware):
    async def __call__(self, scope, receive, send):
        message = await receive()
        assert message
        assert message["type"] == "http.request"

        send_output = await send({"type": "something-unimportant"})
        assert send_output is None

        await self.app(scope, receive, send)


class SamplePartialReceiveSendMiddleware(AbstractMiddleware):
    async def __call__(self, scope, receive, send):
        message = await receive()
        assert message
        assert message["type"] == "http.request"

        send_output = await send({"type": "something-unimportant"})
        assert send_output is None

        async def my_receive(*args, **kwargs):
            pass

        async def my_send(*args, **kwargs):
            pass

        partial_receive = functools.partial(my_receive)
        partial_send = functools.partial(my_send)

        await self.app(scope, partial_receive, partial_send)


def starlite_app_factory(middleware=None, debug=True, exception_handlers=None):
    class MyController(Controller):
        path = "/controller"

        @get("/error")
        async def controller_error(self) -> None:
            raise Exception("Whoa")

    @get("/some_url")
    async def homepage_handler() -> Dict[str, Any]:
        1 / 0
        return {"status": "ok"}

    @get("/custom_error", name="custom_name")
    async def custom_error() -> Any:
        raise Exception("Too Hot")

    @get("/message")
    async def message() -> Dict[str, Any]:
        capture_message("hi")
        return {"status": "ok"}

    @get("/message/{message_id:str}")
    async def message_with_id() -> Dict[str, Any]:
        capture_message("hi")
        return {"status": "ok"}

    logging_config = LoggingConfig()

    app = Starlite(
        route_handlers=[
            homepage_handler,
            custom_error,
            message,
            message_with_id,
            MyController,
        ],
        debug=debug,
        middleware=middleware,
        logging_config=logging_config,
        exception_handlers=exception_handlers,
    )

    return app


@pytest.mark.parametrize(
    "test_url,expected_error,expected_message,expected_tx_name",
    [
        (
            "/some_url",
            ZeroDivisionError,
            "division by zero",
            "tests.integrations.starlite.test_starlite.starlite_app_factory..homepage_handler",
        ),
        (
            "/custom_error",
            Exception,
            "Too Hot",
            "custom_name",
        ),
        (
            "/controller/error",
            Exception,
            "Whoa",
            "partial(.MyController.controller_error>)",
        ),
    ],
)
def test_catch_exceptions(
    sentry_init,
    capture_exceptions,
    capture_events,
    test_url,
    expected_error,
    expected_message,
    expected_tx_name,
):
    sentry_init(integrations=[StarliteIntegration()])
    starlite_app = starlite_app_factory()
    exceptions = capture_exceptions()
    events = capture_events()

    client = TestClient(starlite_app)
    try:
        client.get(test_url)
    except Exception:
        pass

    (exc,) = exceptions
    assert isinstance(exc, expected_error)
    assert str(exc) == expected_message

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlite"
    assert event["transaction"] == expected_tx_name


def test_middleware_spans(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarliteIntegration()],
    )

    logging_config = LoggingMiddlewareConfig()
    session_config = MemoryBackendConfig()
    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))

    starlite_app = starlite_app_factory(
        middleware=[
            session_config.middleware,
            logging_config.middleware,
            rate_limit_config.middleware,
        ]
    )
    events = capture_events()

    client = TestClient(
        starlite_app, raise_server_exceptions=False, base_url="http://testserver.local"
    )
    try:
        client.get("/message")
    except Exception:
        pass

    (_, transaction_event) = events

    expected = ["SessionMiddleware", "LoggingMiddleware", "RateLimitMiddleware"]

    idx = 0
    for span in transaction_event["spans"]:
        if span["op"] == "middleware.starlite":
            assert span["description"] == expected[idx]
            assert span["tags"]["starlite.middleware_name"] == expected[idx]
            idx += 1


def test_middleware_callback_spans(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarliteIntegration()],
    )
    starlette_app = starlite_app_factory(middleware=[SampleMiddleware])
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message")
    except Exception:
        pass

    (_, transaction_event) = events

    expected = [
        {
            "op": "middleware.starlite",
            "description": "SampleMiddleware",
            "tags": {"starlite.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlite.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlite.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlite.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlite.middleware_name": "SampleMiddleware"},
        },
    ]
    for idx, span in enumerate(transaction_event["spans"]):
        assert span["op"] == expected[idx]["op"]
        assert span["description"] == expected[idx]["description"]
        assert span["tags"] == expected[idx]["tags"]


def test_middleware_receive_send(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarliteIntegration()],
    )
    starlette_app = starlite_app_factory(middleware=[SampleReceiveSendMiddleware])

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        # NOTE: the assert statements checking
        # for correct behaviour are in `SampleReceiveSendMiddleware`!
        client.get("/message")
    except Exception:
        pass


def test_middleware_partial_receive_send(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarliteIntegration()],
    )
    starlette_app = starlite_app_factory(
        middleware=[SamplePartialReceiveSendMiddleware]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message")
    except Exception:
        pass

    (_, transaction_event) = events

    expected = [
        {
            "op": "middleware.starlite",
            "description": "SamplePartialReceiveSendMiddleware",
            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.starlite.receive",
            "description": "TestClientTransport.create_receive..receive",
            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.starlite.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
    ]

    for idx, span in enumerate(transaction_event["spans"]):
        assert span["op"] == expected[idx]["op"]
        assert span["description"].startswith(expected[idx]["description"])
        assert span["tags"] == expected[idx]["tags"]


def test_last_event_id(sentry_init, capture_events):
    sentry_init(
        integrations=[StarliteIntegration()],
    )
    events = capture_events()

    def handler(request, exc):
        capture_exception(exc)
        return starlite.response.Response(last_event_id(), status_code=500)

    app = starlite_app_factory(
        debug=False, exception_handlers={HTTP_500_INTERNAL_SERVER_ERROR: handler}
    )

    client = TestClient(app, raise_server_exceptions=False)
    response = client.get("/custom_error")
    assert response.status_code == 500
    event = events[-1]
    assert response.content.strip().decode("ascii").strip('"') == event["event_id"]
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Too Hot"
sentry-python-1.39.2/tests/integrations/stdlib/000077500000000000000000000000001454744723200216165ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/stdlib/test_httplib.py000066400000000000000000000243551454744723200247060ustar00rootroot00000000000000import random

import pytest

try:
    # py3
    from urllib.request import urlopen
except ImportError:
    # py2
    from urllib import urlopen

try:
    # py2
    from httplib import HTTPConnection, HTTPSConnection
except ImportError:
    # py3
    from http.client import HTTPConnection, HTTPSConnection

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import MATCH_ALL, SPANDATA
from sentry_sdk.tracing import Transaction
from sentry_sdk.integrations.stdlib import StdlibIntegration

from tests.conftest import create_mock_http_server

PORT = create_mock_http_server()


def test_crumb_capture(sentry_init, capture_events):
    sentry_init(integrations=[StdlibIntegration()])
    events = capture_events()

    url = "http://localhost:{}/some/random/url".format(PORT)
    urlopen(url)

    capture_message("Testing!")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb["type"] == "http"
    assert crumb["category"] == "httplib"
    assert crumb["data"] == {
        "url": url,
        SPANDATA.HTTP_METHOD: "GET",
        SPANDATA.HTTP_STATUS_CODE: 200,
        "reason": "OK",
        SPANDATA.HTTP_FRAGMENT: "",
        SPANDATA.HTTP_QUERY: "",
    }


def test_crumb_capture_hint(sentry_init, capture_events):
    def before_breadcrumb(crumb, hint):
        crumb["data"]["extra"] = "foo"
        return crumb

    sentry_init(integrations=[StdlibIntegration()], before_breadcrumb=before_breadcrumb)
    events = capture_events()

    url = "http://localhost:{}/some/random/url".format(PORT)
    urlopen(url)

    capture_message("Testing!")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]
    assert crumb["type"] == "http"
    assert crumb["category"] == "httplib"
    assert crumb["data"] == {
        "url": url,
        SPANDATA.HTTP_METHOD: "GET",
        SPANDATA.HTTP_STATUS_CODE: 200,
        "reason": "OK",
        "extra": "foo",
        SPANDATA.HTTP_FRAGMENT: "",
        SPANDATA.HTTP_QUERY: "",
    }


def test_empty_realurl(sentry_init):
    """
    Ensure that after using sentry_sdk.init you can putrequest a
    None url.
    """

    sentry_init(dsn="")
    HTTPConnection("example.com", port=443).putrequest("POST", None)


def test_httplib_misuse(sentry_init, capture_events, request):
    """HTTPConnection.getresponse must be called after every call to
    HTTPConnection.request. However, if somebody does not abide by
    this contract, we still should handle this gracefully and not
    send mixed breadcrumbs.

    Test whether our breadcrumbs are coherent when somebody uses HTTPConnection
    wrongly.
    """

    sentry_init()
    events = capture_events()

    conn = HTTPConnection("localhost", PORT)

    # make sure we release the resource, even if the test fails
    request.addfinalizer(conn.close)

    conn.request("GET", "/200")

    with pytest.raises(Exception):  # noqa: B017
        # This raises an exception, because we didn't call `getresponse` for
        # the previous request yet.
        #
        # This call should not affect our breadcrumb.
        conn.request("POST", "/200")

    response = conn.getresponse()
    assert response._method == "GET"

    capture_message("Testing!")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb["type"] == "http"
    assert crumb["category"] == "httplib"
    assert crumb["data"] == {
        "url": "http://localhost:{}/200".format(PORT),
        SPANDATA.HTTP_METHOD: "GET",
        SPANDATA.HTTP_STATUS_CODE: 200,
        "reason": "OK",
        SPANDATA.HTTP_FRAGMENT: "",
        SPANDATA.HTTP_QUERY: "",
    }


def test_outgoing_trace_headers(sentry_init, monkeypatch):
    # HTTPSConnection.send is passed a string containing (among other things)
    # the headers on the request. Mock it so we can check the headers, and also
    # so it doesn't try to actually talk to the internet.
    mock_send = mock.Mock()
    monkeypatch.setattr(HTTPSConnection, "send", mock_send)

    sentry_init(traces_sample_rate=1.0)

    headers = {}
    headers["baggage"] = (
        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
    )

    transaction = Transaction.continue_from_headers(headers)

    with start_transaction(
        transaction=transaction,
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="12312012123120121231201212312012",
    ) as transaction:
        HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")

        (request_str,) = mock_send.call_args[0]
        request_headers = {}
        for line in request_str.decode("utf-8").split("\r\n")[1:]:
            if line:
                key, val = line.split(": ")
                request_headers[key] = val

        request_span = transaction._span_recorder.spans[-1]
        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )
        assert request_headers["sentry-trace"] == expected_sentry_trace

        expected_outgoing_baggage_items = [
            "sentry-trace_id=771a43a4192642f0b136d5159a501700",
            "sentry-public_key=49d0f7386ad645858ae85020e393bef3",
            "sentry-sample_rate=0.01337",
            "sentry-user_id=Am%C3%A9lie",
        ]

        assert sorted(request_headers["baggage"].split(",")) == sorted(
            expected_outgoing_baggage_items
        )


def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
    # HTTPSConnection.send is passed a string containing (among other things)
    # the headers on the request. Mock it so we can check the headers, and also
    # so it doesn't try to actually talk to the internet.
    mock_send = mock.Mock()
    monkeypatch.setattr(HTTPSConnection, "send", mock_send)

    # make sure transaction is always sampled
    monkeypatch.setattr(random, "random", lambda: 0.1)

    sentry_init(traces_sample_rate=0.5, release="foo")
    transaction = Transaction.continue_from_headers({})

    with start_transaction(transaction=transaction, name="Head SDK tx") as transaction:
        HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")

        (request_str,) = mock_send.call_args[0]
        request_headers = {}
        for line in request_str.decode("utf-8").split("\r\n")[1:]:
            if line:
                key, val = line.split(": ")
                request_headers[key] = val

        request_span = transaction._span_recorder.spans[-1]
        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )
        assert request_headers["sentry-trace"] == expected_sentry_trace

        expected_outgoing_baggage_items = [
            "sentry-trace_id=%s" % transaction.trace_id,
            "sentry-sample_rate=0.5",
            "sentry-sampled=%s" % "true" if transaction.sampled else "false",
            "sentry-release=foo",
            "sentry-environment=production",
        ]

        assert sorted(request_headers["baggage"].split(",")) == sorted(
            expected_outgoing_baggage_items
        )


@pytest.mark.parametrize(
    "trace_propagation_targets,host,path,trace_propagated",
    [
        [
            [],
            "example.com",
            "/",
            False,
        ],
        [
            None,
            "example.com",
            "/",
            False,
        ],
        [
            [MATCH_ALL],
            "example.com",
            "/",
            True,
        ],
        [
            ["https://example.com/"],
            "example.com",
            "/",
            True,
        ],
        [
            ["https://example.com/"],
            "example.com",
            "",
            False,
        ],
        [
            ["https://example.com"],
            "example.com",
            "",
            True,
        ],
        [
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "example.net",
            "",
            False,
        ],
        [
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "good.example.net",
            "",
            True,
        ],
        [
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "good.example.net",
            "/some/thing",
            True,
        ],
    ],
)
def test_option_trace_propagation_targets(
    sentry_init, monkeypatch, trace_propagation_targets, host, path, trace_propagated
):
    # HTTPSConnection.send is passed a string containing (among other things)
    # the headers on the request. Mock it so we can check the headers, and also
    # so it doesn't try to actually talk to the internet.
    mock_send = mock.Mock()
    monkeypatch.setattr(HTTPSConnection, "send", mock_send)

    sentry_init(
        trace_propagation_targets=trace_propagation_targets,
        traces_sample_rate=1.0,
    )

    headers = {
        "baggage": (
            "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
            "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
        )
    }

    transaction = Transaction.continue_from_headers(headers)

    with start_transaction(
        transaction=transaction,
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="12312012123120121231201212312012",
    ) as transaction:
        HTTPSConnection(host).request("GET", path)

        (request_str,) = mock_send.call_args[0]
        request_headers = {}
        for line in request_str.decode("utf-8").split("\r\n")[1:]:
            if line:
                key, val = line.split(": ")
                request_headers[key] = val

        if trace_propagated:
            assert "sentry-trace" in request_headers
            assert "baggage" in request_headers
        else:
            assert "sentry-trace" not in request_headers
            assert "baggage" not in request_headers
sentry-python-1.39.2/tests/integrations/stdlib/test_subprocess.py000066400000000000000000000116031454744723200254200ustar00rootroot00000000000000import os
import platform
import subprocess
import sys

import pytest

from sentry_sdk import capture_message, start_transaction
from sentry_sdk._compat import PY2
from sentry_sdk.integrations.stdlib import StdlibIntegration


if PY2:
    from collections import Mapping
else:
    from collections.abc import Mapping


class ImmutableDict(Mapping):
    def __init__(self, inner):
        self.inner = inner

    def __getitem__(self, key):
        return self.inner[key]

    def __iter__(self):
        return iter(self.inner)

    def __len__(self):
        return len(self.inner)


@pytest.mark.parametrize("positional_args", [True, False])
@pytest.mark.parametrize(
    "iterator",
    [
        pytest.param(
            True,
            marks=pytest.mark.skipif(
                platform.python_implementation() == "PyPy",
                reason="https://bitbucket.org/pypy/pypy/issues/3050/subprocesspopen-only-accepts-sequences",
            ),
        ),
        False,
    ],
    ids=("as_iterator", "as_list"),
)
@pytest.mark.parametrize("env_mapping", [None, os.environ, ImmutableDict(os.environ)])
@pytest.mark.parametrize("with_cwd", [True, False])
def test_subprocess_basic(
    sentry_init,
    capture_events,
    monkeypatch,
    positional_args,
    iterator,
    env_mapping,
    with_cwd,
):
    monkeypatch.setenv("FOO", "bar")

    old_environ = dict(os.environ)

    sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction(name="foo") as transaction:
        args = [
            sys.executable,
            "-c",
            "import os; "
            "import sentry_sdk; "
            "from sentry_sdk.integrations.stdlib import get_subprocess_traceparent_headers; "
            "sentry_sdk.init(); "
            "assert os.environ['FOO'] == 'bar'; "
            "print(dict(get_subprocess_traceparent_headers()))",
        ]

        if iterator:
            args = iter(args)

        if positional_args:
            a = (
                args,
                0,  # bufsize
                None,  # executable
                None,  # stdin
                subprocess.PIPE,  # stdout
                None,  # stderr
                None,  # preexec_fn
                False,  # close_fds
                False,  # shell
                os.getcwd() if with_cwd else None,  # cwd
            )

            if env_mapping is not None:
                a += (env_mapping,)

            popen = subprocess.Popen(*a)

        else:
            kw = {"args": args, "stdout": subprocess.PIPE}

            if with_cwd:
                kw["cwd"] = os.getcwd()

            if env_mapping is not None:
                kw["env"] = env_mapping

            popen = subprocess.Popen(**kw)

        output, unused_err = popen.communicate()
        retcode = popen.poll()
        assert not retcode

    assert os.environ == old_environ

    assert transaction.trace_id in str(output)

    capture_message("hi")

    (
        transaction_event,
        message_event,
    ) = events

    assert message_event["message"] == "hi"

    data = {"subprocess.cwd": os.getcwd()} if with_cwd else {}

    (crumb,) = message_event["breadcrumbs"]["values"]
    assert crumb == {
        "category": "subprocess",
        "data": data,
        "message": crumb["message"],
        "timestamp": crumb["timestamp"],
        "type": "subprocess",
    }

    if not iterator:
        assert crumb["message"].startswith(sys.executable + " ")

    assert transaction_event["type"] == "transaction"

    (
        subprocess_init_span,
        subprocess_communicate_span,
        subprocess_wait_span,
    ) = transaction_event["spans"]

    assert (
        subprocess_init_span["op"],
        subprocess_communicate_span["op"],
        subprocess_wait_span["op"],
    ) == ("subprocess", "subprocess.communicate", "subprocess.wait")

    # span hierarchy
    assert (
        subprocess_wait_span["parent_span_id"] == subprocess_communicate_span["span_id"]
    )
    assert (
        subprocess_communicate_span["parent_span_id"]
        == subprocess_init_span["parent_span_id"]
        == transaction_event["contexts"]["trace"]["span_id"]
    )

    # common data
    assert (
        subprocess_init_span["tags"]["subprocess.pid"]
        == subprocess_wait_span["tags"]["subprocess.pid"]
        == subprocess_communicate_span["tags"]["subprocess.pid"]
    )

    # data of init span
    assert subprocess_init_span.get("data", {}) == data
    if iterator:
        assert "iterator" in subprocess_init_span["description"]
        assert subprocess_init_span["description"].startswith("<")
    else:
        assert sys.executable + " -c" in subprocess_init_span["description"]


def test_subprocess_invalid_args(sentry_init):
    sentry_init(integrations=[StdlibIntegration()])

    with pytest.raises(TypeError) as excinfo:
        subprocess.Popen(1)

    assert "'int' object is not iterable" in str(excinfo.value)
sentry-python-1.39.2/tests/integrations/strawberry/000077500000000000000000000000001454744723200225415ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/strawberry/__init__.py000066400000000000000000000000001454744723200246400ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/strawberry/test_strawberry_py3.py000066400000000000000000000451531454744723200271610ustar00rootroot00000000000000import pytest

strawberry = pytest.importorskip("strawberry")
pytest.importorskip("fastapi")
pytest.importorskip("flask")

from unittest import mock

from fastapi import FastAPI
from fastapi.testclient import TestClient
from flask import Flask
from strawberry.extensions.tracing import (
    SentryTracingExtension,
    SentryTracingExtensionSync,
)
from strawberry.fastapi import GraphQLRouter
from strawberry.flask.views import GraphQLView

from sentry_sdk.consts import OP
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration
from sentry_sdk.integrations.strawberry import (
    StrawberryIntegration,
    SentryAsyncExtension,
    SentrySyncExtension,
)


parameterize_strawberry_test = pytest.mark.parametrize(
    "client_factory,async_execution,framework_integrations",
    (
        (
            "async_app_client_factory",
            True,
            [FastApiIntegration(), StarletteIntegration()],
        ),
        ("sync_app_client_factory", False, [FlaskIntegration()]),
    ),
)


@strawberry.type
class Query:
    @strawberry.field
    def hello(self) -> str:
        return "Hello World"

    @strawberry.field
    def error(self) -> int:
        return 1 / 0


@strawberry.type
class Mutation:
    @strawberry.mutation
    def change(self, attribute: str) -> str:
        return attribute


@pytest.fixture
def async_app_client_factory():
    def create_app(schema):
        async_app = FastAPI()
        async_app.include_router(GraphQLRouter(schema), prefix="/graphql")
        return TestClient(async_app)

    return create_app


@pytest.fixture
def sync_app_client_factory():
    def create_app(schema):
        sync_app = Flask(__name__)
        sync_app.add_url_rule(
            "/graphql",
            view_func=GraphQLView.as_view("graphql_view", schema=schema),
        )
        return sync_app.test_client()

    return create_app


def test_async_execution_uses_async_extension(sentry_init):
    sentry_init(integrations=[StrawberryIntegration(async_execution=True)])

    with mock.patch(
        "sentry_sdk.integrations.strawberry._get_installed_modules",
        return_value={"flask": "2.3.3"},
    ):
        # actual installed modules should not matter, the explicit option takes
        # precedence
        schema = strawberry.Schema(Query)
        assert SentryAsyncExtension in schema.extensions


def test_sync_execution_uses_sync_extension(sentry_init):
    sentry_init(integrations=[StrawberryIntegration(async_execution=False)])

    with mock.patch(
        "sentry_sdk.integrations.strawberry._get_installed_modules",
        return_value={"fastapi": "0.103.1", "starlette": "0.27.0"},
    ):
        # actual installed modules should not matter, the explicit option takes
        # precedence
        schema = strawberry.Schema(Query)
        assert SentrySyncExtension in schema.extensions


def test_infer_execution_type_from_installed_packages_async(sentry_init):
    sentry_init(integrations=[StrawberryIntegration()])

    with mock.patch(
        "sentry_sdk.integrations.strawberry._get_installed_modules",
        return_value={"fastapi": "0.103.1", "starlette": "0.27.0"},
    ):
        schema = strawberry.Schema(Query)
        assert SentryAsyncExtension in schema.extensions


def test_infer_execution_type_from_installed_packages_sync(sentry_init):
    sentry_init(integrations=[StrawberryIntegration()])

    with mock.patch(
        "sentry_sdk.integrations.strawberry._get_installed_modules",
        return_value={"flask": "2.3.3"},
    ):
        schema = strawberry.Schema(Query)
        assert SentrySyncExtension in schema.extensions


def test_replace_existing_sentry_async_extension(sentry_init):
    sentry_init(integrations=[StrawberryIntegration()])

    schema = strawberry.Schema(Query, extensions=[SentryTracingExtension])
    assert SentryTracingExtension not in schema.extensions
    assert SentrySyncExtension not in schema.extensions
    assert SentryAsyncExtension in schema.extensions


def test_replace_existing_sentry_sync_extension(sentry_init):
    sentry_init(integrations=[StrawberryIntegration()])

    schema = strawberry.Schema(Query, extensions=[SentryTracingExtensionSync])
    assert SentryTracingExtensionSync not in schema.extensions
    assert SentryAsyncExtension not in schema.extensions
    assert SentrySyncExtension in schema.extensions


@parameterize_strawberry_test
def test_capture_request_if_available_and_send_pii_is_on(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        send_default_pii=True,
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query ErrorQuery { error }"
    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})

    assert len(events) == 1

    (error_event,) = events

    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry"
    assert error_event["request"]["api_target"] == "graphql"
    assert error_event["request"]["data"] == {
        "query": query,
        "operationName": "ErrorQuery",
    }
    assert error_event["contexts"]["response"] == {
        "data": {
            "data": None,
            "errors": [
                {
                    "message": "division by zero",
                    "locations": [{"line": 1, "column": 20}],
                    "path": ["error"],
                }
            ],
        }
    }
    assert len(error_event["breadcrumbs"]["values"]) == 1
    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
    assert error_event["breadcrumbs"]["values"][0]["data"] == {
        "operation_name": "ErrorQuery",
        "operation_type": "query",
    }


@parameterize_strawberry_test
def test_do_not_capture_request_if_send_pii_is_off(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query ErrorQuery { error }"
    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})

    assert len(events) == 1

    (error_event,) = events
    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry"
    assert "data" not in error_event["request"]
    assert "response" not in error_event["contexts"]

    assert len(error_event["breadcrumbs"]["values"]) == 1
    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
    assert error_event["breadcrumbs"]["values"][0]["data"] == {
        "operation_name": "ErrorQuery",
        "operation_type": "query",
    }


@parameterize_strawberry_test
def test_breadcrumb_no_operation_name(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "{ error }"
    client.post("/graphql", json={"query": query})

    assert len(events) == 1

    (error_event,) = events

    assert len(error_event["breadcrumbs"]["values"]) == 1
    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
    assert error_event["breadcrumbs"]["values"][0]["data"] == {
        "operation_name": None,
        "operation_type": "query",
    }


@parameterize_strawberry_test
def test_capture_transaction_on_error(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        send_default_pii=True,
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query ErrorQuery { error }"
    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})

    assert len(events) == 2
    (_, transaction_event) = events

    if async_execution:
        assert transaction_event["transaction"] == "/graphql"
    else:
        assert transaction_event["transaction"] == "graphql_view"

    assert transaction_event["spans"]

    query_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
    ]
    assert len(query_spans) == 1, "exactly one query span expected"
    query_span = query_spans[0]
    assert query_span["description"] == "query ErrorQuery"
    assert query_span["data"]["graphql.operation.type"] == "query"
    assert query_span["data"]["graphql.operation.name"] == "ErrorQuery"
    assert query_span["data"]["graphql.document"] == query
    assert query_span["data"]["graphql.resource_name"]

    parse_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
    ]
    assert len(parse_spans) == 1, "exactly one parse span expected"
    parse_span = parse_spans[0]
    assert parse_span["parent_span_id"] == query_span["span_id"]
    assert parse_span["description"] == "parsing"

    validate_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
    ]
    assert len(validate_spans) == 1, "exactly one validate span expected"
    validate_span = validate_spans[0]
    assert validate_span["parent_span_id"] == query_span["span_id"]
    assert validate_span["description"] == "validation"

    resolve_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
    ]
    assert len(resolve_spans) == 1, "exactly one resolve span expected"
    resolve_span = resolve_spans[0]
    assert resolve_span["parent_span_id"] == query_span["span_id"]
    assert resolve_span["description"] == "resolving Query.error"
    assert resolve_span["data"] == {
        "graphql.field_name": "error",
        "graphql.parent_type": "Query",
        "graphql.field_path": "Query.error",
        "graphql.path": "error",
    }


@parameterize_strawberry_test
def test_capture_transaction_on_success(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query GreetingQuery { hello }"
    client.post("/graphql", json={"query": query, "operationName": "GreetingQuery"})

    assert len(events) == 1
    (transaction_event,) = events

    if async_execution:
        assert transaction_event["transaction"] == "/graphql"
    else:
        assert transaction_event["transaction"] == "graphql_view"

    assert transaction_event["spans"]

    query_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
    ]
    assert len(query_spans) == 1, "exactly one query span expected"
    query_span = query_spans[0]
    assert query_span["description"] == "query GreetingQuery"
    assert query_span["data"]["graphql.operation.type"] == "query"
    assert query_span["data"]["graphql.operation.name"] == "GreetingQuery"
    assert query_span["data"]["graphql.document"] == query
    assert query_span["data"]["graphql.resource_name"]

    parse_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
    ]
    assert len(parse_spans) == 1, "exactly one parse span expected"
    parse_span = parse_spans[0]
    assert parse_span["parent_span_id"] == query_span["span_id"]
    assert parse_span["description"] == "parsing"

    validate_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
    ]
    assert len(validate_spans) == 1, "exactly one validate span expected"
    validate_span = validate_spans[0]
    assert validate_span["parent_span_id"] == query_span["span_id"]
    assert validate_span["description"] == "validation"

    resolve_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
    ]
    assert len(resolve_spans) == 1, "exactly one resolve span expected"
    resolve_span = resolve_spans[0]
    assert resolve_span["parent_span_id"] == query_span["span_id"]
    assert resolve_span["description"] == "resolving Query.hello"
    assert resolve_span["data"] == {
        "graphql.field_name": "hello",
        "graphql.parent_type": "Query",
        "graphql.field_path": "Query.hello",
        "graphql.path": "hello",
    }


@parameterize_strawberry_test
def test_transaction_no_operation_name(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "{ hello }"
    client.post("/graphql", json={"query": query})

    assert len(events) == 1
    (transaction_event,) = events

    if async_execution:
        assert transaction_event["transaction"] == "/graphql"
    else:
        assert transaction_event["transaction"] == "graphql_view"

    assert transaction_event["spans"]

    query_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
    ]
    assert len(query_spans) == 1, "exactly one query span expected"
    query_span = query_spans[0]
    assert query_span["description"] == "query"
    assert query_span["data"]["graphql.operation.type"] == "query"
    assert query_span["data"]["graphql.operation.name"] is None
    assert query_span["data"]["graphql.document"] == query
    assert query_span["data"]["graphql.resource_name"]

    parse_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
    ]
    assert len(parse_spans) == 1, "exactly one parse span expected"
    parse_span = parse_spans[0]
    assert parse_span["parent_span_id"] == query_span["span_id"]
    assert parse_span["description"] == "parsing"

    validate_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
    ]
    assert len(validate_spans) == 1, "exactly one validate span expected"
    validate_span = validate_spans[0]
    assert validate_span["parent_span_id"] == query_span["span_id"]
    assert validate_span["description"] == "validation"

    resolve_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
    ]
    assert len(resolve_spans) == 1, "exactly one resolve span expected"
    resolve_span = resolve_spans[0]
    assert resolve_span["parent_span_id"] == query_span["span_id"]
    assert resolve_span["description"] == "resolving Query.hello"
    assert resolve_span["data"] == {
        "graphql.field_name": "hello",
        "graphql.parent_type": "Query",
        "graphql.field_path": "Query.hello",
        "graphql.path": "hello",
    }


@parameterize_strawberry_test
def test_transaction_mutation(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query, mutation=Mutation)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = 'mutation Change { change(attribute: "something") }'
    client.post("/graphql", json={"query": query})

    assert len(events) == 1
    (transaction_event,) = events

    if async_execution:
        assert transaction_event["transaction"] == "/graphql"
    else:
        assert transaction_event["transaction"] == "graphql_view"

    assert transaction_event["spans"]

    query_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_MUTATION
    ]
    assert len(query_spans) == 1, "exactly one mutation span expected"
    query_span = query_spans[0]
    assert query_span["description"] == "mutation"
    assert query_span["data"]["graphql.operation.type"] == "mutation"
    assert query_span["data"]["graphql.operation.name"] is None
    assert query_span["data"]["graphql.document"] == query
    assert query_span["data"]["graphql.resource_name"]

    parse_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
    ]
    assert len(parse_spans) == 1, "exactly one parse span expected"
    parse_span = parse_spans[0]
    assert parse_span["parent_span_id"] == query_span["span_id"]
    assert parse_span["description"] == "parsing"

    validate_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
    ]
    assert len(validate_spans) == 1, "exactly one validate span expected"
    validate_span = validate_spans[0]
    assert validate_span["parent_span_id"] == query_span["span_id"]
    assert validate_span["description"] == "validation"

    resolve_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
    ]
    assert len(resolve_spans) == 1, "exactly one resolve span expected"
    resolve_span = resolve_spans[0]
    assert resolve_span["parent_span_id"] == query_span["span_id"]
    assert resolve_span["description"] == "resolving Mutation.change"
    assert resolve_span["data"] == {
        "graphql.field_name": "change",
        "graphql.parent_type": "Mutation",
        "graphql.field_path": "Mutation.change",
        "graphql.path": "change",
    }
sentry-python-1.39.2/tests/integrations/test_gnu_backtrace.py000066400000000000000000000252011454744723200245360ustar00rootroot00000000000000import pytest

from sentry_sdk import capture_exception
from sentry_sdk.integrations.gnu_backtrace import GnuBacktraceIntegration

LINES = r"""
0. clickhouse-server(StackTrace::StackTrace()+0x16) [0x99d31a6]
1. clickhouse-server(DB::Exception::Exception(std::__cxx11::basic_string, std::allocator > const&, int)+0x22) [0x372c822]
10. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1a12) [0x6ae45d2]
10. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr const&, bool)+0x11af) [0x75c68ff]
10. clickhouse-server(ThreadPoolImpl::worker(std::_List_iterator)+0x1ab) [0x6f90c1b]
11. clickhouse-server() [0xae06ddf]
11. clickhouse-server(DB::ExpressionAnalyzer::getRootActions(std::shared_ptr const&, bool, std::shared_ptr&, bool)+0xdb) [0x6a0a63b]
11. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr const&, DB::Context const&, std::shared_ptr const&, std::shared_ptr const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x5e6) [0x75c7516]
12. /lib/x86_64-linux-gnu/libpthread.so.0(+0x8184) [0x7f3bbc568184]
12. clickhouse-server(DB::ExpressionAnalyzer::getConstActions()+0xc9) [0x6a0b059]
12. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr const&, DB::Context const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x56) [0x75c8276]
13. /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7f3bbbb8303d]
13. clickhouse-server(DB::InterpreterSelectWithUnionQuery::InterpreterSelectWithUnionQuery(std::shared_ptr const&, DB::Context const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x7e7) [0x75d4067]
13. clickhouse-server(DB::evaluateConstantExpression(std::shared_ptr const&, DB::Context const&)+0x3ed) [0x656bfdd]
14. clickhouse-server(DB::InterpreterFactory::get(std::shared_ptr&, DB::Context&, DB::QueryProcessingStage::Enum)+0x3a8) [0x75b0298]
14. clickhouse-server(DB::makeExplicitSet(DB::ASTFunction const*, DB::Block const&, bool, DB::Context const&, DB::SizeLimits const&, std::unordered_map, DB::PreparedSetKey::Hash, std::equal_to, std::allocator > > >&)+0x382) [0x6adf692]
15. clickhouse-server() [0x7664c79]
15. clickhouse-server(DB::ActionsVisitor::makeSet(DB::ASTFunction const*, DB::Block const&)+0x2a7) [0x6ae2227]
16. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1973) [0x6ae4533]
16. clickhouse-server(DB::executeQuery(std::__cxx11::basic_string, std::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum)+0x8a) [0x76669fa]
17. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1324) [0x6ae3ee4]
17. clickhouse-server(DB::TCPHandler::runImpl()+0x4b9) [0x30973c9]
18. clickhouse-server(DB::ExpressionAnalyzer::getRootActions(std::shared_ptr const&, bool, std::shared_ptr&, bool)+0xdb) [0x6a0a63b]
18. clickhouse-server(DB::TCPHandler::run()+0x2b) [0x30985ab]
19. clickhouse-server(DB::ExpressionAnalyzer::appendGroupBy(DB::ExpressionActionsChain&, bool)+0x100) [0x6a0b4f0]
19. clickhouse-server(Poco::Net::TCPServerConnection::start()+0xf) [0x9b53e4f]
2. clickhouse-server(DB::FunctionTuple::getReturnTypeImpl(std::vector, std::allocator > > const&) const+0x122) [0x3a2a0f2]
2. clickhouse-server(DB::readException(DB::Exception&, DB::ReadBuffer&, std::__cxx11::basic_string, std::allocator > const&)+0x21f) [0x6fb253f]
2. clickhouse-server(void DB::readDateTimeTextFallback(long&, DB::ReadBuffer&, DateLUTImpl const&)+0x318) [0x99ffed8]
20. clickhouse-server(DB::InterpreterSelectQuery::analyzeExpressions(DB::QueryProcessingStage::Enum, bool)+0x364) [0x6437fa4]
20. clickhouse-server(Poco::Net::TCPServerDispatcher::run()+0x16a) [0x9b5422a]
21. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr const&, bool)+0x36d) [0x643c28d]
21. clickhouse-server(Poco::PooledThread::run()+0x77) [0x9c70f37]
22. clickhouse-server(DB::InterpreterSelectQuery::executeWithMultipleStreams()+0x50) [0x643ecd0]
22. clickhouse-server(Poco::ThreadImpl::runnableEntry(void*)+0x38) [0x9c6caa8]
23. clickhouse-server() [0xa3c68cf]
23. clickhouse-server(DB::InterpreterSelectWithUnionQuery::executeWithMultipleStreams()+0x6c) [0x644805c]
24. /lib/x86_64-linux-gnu/libpthread.so.0(+0x8184) [0x7fe839d2d184]
24. clickhouse-server(DB::InterpreterSelectWithUnionQuery::execute()+0x38) [0x6448658]
25. /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7fe83934803d]
25. clickhouse-server() [0x65744ef]
26. clickhouse-server(DB::executeQuery(std::__cxx11::basic_string, std::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum, bool)+0x81) [0x6576141]
27. clickhouse-server(DB::TCPHandler::runImpl()+0x752) [0x3739f82]
28. clickhouse-server(DB::TCPHandler::run()+0x2b) [0x373a5cb]
29. clickhouse-server(Poco::Net::TCPServerConnection::start()+0xf) [0x708e63f]
3. clickhouse-server(DB::Connection::receiveException()+0x81) [0x67d3ad1]
3. clickhouse-server(DB::DefaultFunctionBuilder::getReturnTypeImpl(std::vector > const&) const+0x223) [0x38ac3b3]
3. clickhouse-server(DB::FunctionComparison::executeDateOrDateTimeOrEnumOrUUIDWithConstString(DB::Block&, unsigned long, DB::IColumn const*, DB::IColumn const*, std::shared_ptr const&, std::shared_ptr const&, bool, unsigned long)+0xbb3) [0x411dee3]
30. clickhouse-server(Poco::Net::TCPServerDispatcher::run()+0xe9) [0x708ed79]
31. clickhouse-server(Poco::PooledThread::run()+0x81) [0x7142011]
4. clickhouse-server(DB::Connection::receivePacket()+0x767) [0x67d9cd7]
4. clickhouse-server(DB::FunctionBuilderImpl::getReturnTypeWithoutLowCardinality(std::vector > const&) const+0x75) [0x6869635]
4. clickhouse-server(DB::FunctionComparison::executeImpl(DB::Block&, std::vector > const&, unsigned long, unsigned long)+0x576) [0x41ab006]
5. clickhouse-server(DB::FunctionBuilderImpl::getReturnType(std::vector > const&) const+0x350) [0x6869f10]
5. clickhouse-server(DB::MultiplexedConnections::receivePacket()+0x7e) [0x67e7ede]
5. clickhouse-server(DB::PreparedFunctionImpl::execute(DB::Block&, std::vector > const&, unsigned long, unsigned long)+0x3e2) [0x7933492]
6. clickhouse-server(DB::ExpressionAction::execute(DB::Block&, std::unordered_map, std::allocator >, unsigned long, std::hash, std::allocator > >, std::equal_to, std::allocator > >, std::allocator, std::allocator > const, unsigned long> > >&) const+0x61a) [0x7ae093a]
6. clickhouse-server(DB::FunctionBuilderImpl::build(std::vector > const&) const+0x3c) [0x38accfc]
6. clickhouse-server(DB::RemoteBlockInputStream::readImpl()+0x87) [0x631da97]
7. clickhouse-server(DB::ExpressionActions::addImpl(DB::ExpressionAction, std::vector, std::allocator >, std::allocator, std::allocator > > >&)+0x552) [0x6a00052]
7. clickhouse-server(DB::ExpressionActions::execute(DB::Block&) const+0xe6) [0x7ae1e06]
7. clickhouse-server(DB::IBlockInputStream::read()+0x178) [0x63075e8]
8. clickhouse-server(DB::ExpressionActions::add(DB::ExpressionAction const&, std::vector, std::allocator >, std::allocator, std::allocator > > >&)+0x42) [0x6a00422]
8. clickhouse-server(DB::FilterBlockInputStream::FilterBlockInputStream(std::shared_ptr const&, std::shared_ptr const&, std::__cxx11::basic_string, std::allocator > const&, bool)+0x711) [0x79970d1]
8. clickhouse-server(DB::ParallelInputsProcessor::thread(std::shared_ptr, unsigned long)+0x2f1) [0x64467c1]
9. clickhouse-server() [0x75bd5a3]
9. clickhouse-server(DB::ScopeStack::addAction(DB::ExpressionAction const&)+0xd2) [0x6ae04d2]
9. clickhouse-server(ThreadFromGlobalPool::ThreadFromGlobalPool::process()::{lambda()#1}>(DB::ParallelInputsProcessor::process()::{lambda()#1}&&)::{lambda()#1}::operator()() const+0x6d) [0x644722d]
"""


@pytest.mark.parametrize("input", LINES.strip().splitlines())
def test_basic(sentry_init, capture_events, input):
    sentry_init(integrations=[GnuBacktraceIntegration()])
    events = capture_events()

    try:
        raise ValueError(input)
    except ValueError:
        capture_exception()

    (event,) = events
    (exception,) = event["exception"]["values"]

    assert (
        exception["value"]
        == ""
    )
    (frame,) = exception["stacktrace"]["frames"][1:]

    if frame.get("function") is None:
        assert "clickhouse-server()" in input or "pthread" in input
    else:
        assert ")" not in frame["function"] and "(" not in frame["function"]
        assert frame["function"] in input
sentry-python-1.39.2/tests/integrations/threading/000077500000000000000000000000001454744723200223025ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/threading/test_threading.py000066400000000000000000000131321454744723200256600ustar00rootroot00000000000000import gc
import sys
from threading import Thread

try:
    from concurrent import futures
except ImportError:
    futures = None

import pytest

import sentry_sdk
from sentry_sdk import configure_scope, capture_message
from sentry_sdk.integrations.threading import ThreadingIntegration

original_start = Thread.start
original_run = Thread.run


@pytest.mark.forked
@pytest.mark.parametrize("integrations", [[ThreadingIntegration()], []])
def test_handles_exceptions(sentry_init, capture_events, integrations):
    sentry_init(default_integrations=False, integrations=integrations)
    events = capture_events()

    def crash():
        1 / 0

    t = Thread(target=crash)
    t.start()
    t.join()

    if integrations:
        (event,) = events

        (exception,) = event["exception"]["values"]
        assert exception["type"] == "ZeroDivisionError"
        assert exception["mechanism"]["type"] == "threading"
        assert not exception["mechanism"]["handled"]
    else:
        assert not events


@pytest.mark.forked
@pytest.mark.parametrize("propagate_hub", (True, False))
def test_propagates_hub(sentry_init, capture_events, propagate_hub):
    sentry_init(
        default_integrations=False,
        integrations=[ThreadingIntegration(propagate_hub=propagate_hub)],
    )
    events = capture_events()

    def stage1():
        with configure_scope() as scope:
            scope.set_tag("stage1", "true")

        t = Thread(target=stage2)
        t.start()
        t.join()

    def stage2():
        1 / 0

    t = Thread(target=stage1)
    t.start()
    t.join()

    (event,) = events

    (exception,) = event["exception"]["values"]

    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "threading"
    assert not exception["mechanism"]["handled"]

    if propagate_hub:
        assert event["tags"]["stage1"] == "true"
    else:
        assert "stage1" not in event.get("tags", {})


@pytest.mark.skipif(
    futures is None,
    reason="ThreadPool was added in 3.2",
)
@pytest.mark.parametrize("propagate_hub", (True, False))
def test_propagates_threadpool_hub(sentry_init, capture_events, propagate_hub):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[ThreadingIntegration(propagate_hub=propagate_hub)],
    )
    events = capture_events()

    def double(number):
        with sentry_sdk.start_span(op="task", description=str(number)):
            return number * 2

    with sentry_sdk.start_transaction(name="test_handles_threadpool"):
        with futures.ThreadPoolExecutor(max_workers=1) as executor:
            tasks = [executor.submit(double, number) for number in [1, 2, 3, 4]]
            for future in futures.as_completed(tasks):
                print("Getting future value!", future.result())

    sentry_sdk.flush()

    if propagate_hub:
        assert len(events) == 1
        (event,) = events
        assert event["spans"][0]["trace_id"] == event["spans"][1]["trace_id"]
        assert event["spans"][1]["trace_id"] == event["spans"][2]["trace_id"]
        assert event["spans"][2]["trace_id"] == event["spans"][3]["trace_id"]
        assert event["spans"][3]["trace_id"] == event["spans"][0]["trace_id"]
    else:
        (event,) = events
        assert len(event["spans"]) == 0


def test_circular_references(sentry_init, request):
    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])

    gc.collect()
    gc.disable()
    request.addfinalizer(gc.enable)

    class MyThread(Thread):
        def run(self):
            pass

    t = MyThread()
    t.start()
    t.join()
    del t

    assert not gc.collect()


@pytest.mark.forked
def test_double_patching(sentry_init, capture_events):
    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
    events = capture_events()

    # XXX: Workaround for race condition in the py library's magic import
    # system (py is a dependency of pytest)
    capture_message("hi")
    del events[:]

    class MyThread(Thread):
        def run(self):
            1 / 0

    ts = []
    for _ in range(10):
        t = MyThread()
        t.start()
        ts.append(t)

    for t in ts:
        t.join()

    assert len(events) == 10
    for event in events:
        (exception,) = event["exception"]["values"]
        assert exception["type"] == "ZeroDivisionError"


@pytest.mark.skipif(sys.version_info < (3, 2), reason="no __qualname__ in older python")
def test_wrapper_attributes(sentry_init):
    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])

    def target():
        assert t.run.__name__ == "run"
        assert t.run.__qualname__ == original_run.__qualname__

    t = Thread(target=target)
    t.start()
    t.join()

    assert Thread.start.__name__ == "start"
    assert Thread.start.__qualname__ == original_start.__qualname__
    assert t.start.__name__ == "start"
    assert t.start.__qualname__ == original_start.__qualname__

    assert Thread.run.__name__ == "run"
    assert Thread.run.__qualname__ == original_run.__qualname__
    assert t.run.__name__ == "run"
    assert t.run.__qualname__ == original_run.__qualname__


@pytest.mark.skipif(
    sys.version_info > (2, 7),
    reason="simpler test for py2.7 without py3 only __qualname__",
)
def test_wrapper_attributes_no_qualname(sentry_init):
    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])

    def target():
        assert t.run.__name__ == "run"

    t = Thread(target=target)
    t.start()
    t.join()

    assert Thread.start.__name__ == "start"
    assert t.start.__name__ == "start"

    assert Thread.run.__name__ == "run"
    assert t.run.__name__ == "run"
sentry-python-1.39.2/tests/integrations/tornado/000077500000000000000000000000001454744723200220035ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/tornado/__init__.py000066400000000000000000000000561454744723200241150ustar00rootroot00000000000000import pytest

pytest.importorskip("tornado")
sentry-python-1.39.2/tests/integrations/tornado/test_tornado.py000066400000000000000000000320441454744723200250650ustar00rootroot00000000000000import json

import pytest

from sentry_sdk import configure_scope, start_transaction, capture_message
from sentry_sdk.integrations.tornado import TornadoIntegration

from tornado.web import RequestHandler, Application, HTTPError
from tornado.testing import AsyncHTTPTestCase


@pytest.fixture
def tornado_testcase(request):
    # Take the unittest class provided by tornado and manually call its setUp
    # and tearDown.
    #
    # The pytest plugins for tornado seem too complicated to use, as they for
    # some reason assume I want to write my tests in async code.
    def inner(app):
        class TestBogus(AsyncHTTPTestCase):
            def get_app(self):
                return app

            def bogustest(self):
                # We need to pass a valid test method name to the ctor, so this
                # is the method. It does nothing.
                pass

        self = TestBogus("bogustest")
        self.setUp()
        request.addfinalizer(self.tearDown)
        return self

    return inner


class CrashingHandler(RequestHandler):
    def get(self):
        with configure_scope() as scope:
            scope.set_tag("foo", "42")
        1 / 0

    def post(self):
        with configure_scope() as scope:
            scope.set_tag("foo", "43")
        1 / 0


class CrashingWithMessageHandler(RequestHandler):
    def get(self):
        capture_message("hi")
        1 / 0


class HelloHandler(RequestHandler):
    async def get(self):
        with configure_scope() as scope:
            scope.set_tag("foo", "42")

        return b"hello"

    async def post(self):
        with configure_scope() as scope:
            scope.set_tag("foo", "43")

        return b"hello"


def test_basic(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
    events = capture_events()
    client = tornado_testcase(Application([(r"/hi", CrashingHandler)]))

    response = client.fetch(
        "/hi?foo=bar", headers={"Cookie": "name=value; name2=value2; name3=value3"}
    )
    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "tornado"

    request = event["request"]
    host = request["headers"]["Host"]
    assert event["request"] == {
        "env": {"REMOTE_ADDR": "127.0.0.1"},
        "headers": {
            "Accept-Encoding": "gzip",
            "Connection": "close",
            "Cookie": "name=value; name2=value2; name3=value3",
            **request["headers"],
        },
        "cookies": {"name": "value", "name2": "value2", "name3": "value3"},
        "method": "GET",
        "query_string": "foo=bar",
        "url": "http://{host}/hi".format(host=host),
    }

    assert event["tags"] == {"foo": "42"}
    assert (
        event["transaction"]
        == "tests.integrations.tornado.test_tornado.CrashingHandler.get"
    )
    assert event["transaction_info"] == {"source": "component"}

    with configure_scope() as scope:
        assert not scope._tags


@pytest.mark.parametrize(
    "handler,code",
    [
        (CrashingHandler, 500),
        (HelloHandler, 200),
    ],
)
def test_transactions(tornado_testcase, sentry_init, capture_events, handler, code):
    sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0, debug=True)
    events = capture_events()
    client = tornado_testcase(Application([(r"/hi", handler)]))

    with start_transaction(name="client") as span:
        pass

    response = client.fetch(
        "/hi", method="POST", body=b"heyoo", headers=dict(span.iter_headers())
    )
    assert response.code == code

    if code == 200:
        client_tx, server_tx = events
        server_error = None
    else:
        client_tx, server_error, server_tx = events

    assert client_tx["type"] == "transaction"
    assert client_tx["transaction"] == "client"
    assert client_tx["transaction_info"] == {
        "source": "custom"
    }  # because this is just the start_transaction() above.

    if server_error is not None:
        assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError"
        assert (
            server_error["transaction"]
            == "tests.integrations.tornado.test_tornado.CrashingHandler.post"
        )
        assert server_error["transaction_info"] == {"source": "component"}

    if code == 200:
        assert (
            server_tx["transaction"]
            == "tests.integrations.tornado.test_tornado.HelloHandler.post"
        )
    else:
        assert (
            server_tx["transaction"]
            == "tests.integrations.tornado.test_tornado.CrashingHandler.post"
        )

    assert server_tx["transaction_info"] == {"source": "component"}
    assert server_tx["type"] == "transaction"

    request = server_tx["request"]
    host = request["headers"]["Host"]
    assert server_tx["request"] == {
        "env": {"REMOTE_ADDR": "127.0.0.1"},
        "headers": {
            "Accept-Encoding": "gzip",
            "Connection": "close",
            **request["headers"],
        },
        "method": "POST",
        "query_string": "",
        "data": {"heyoo": [""]},
        "url": "http://{host}/hi".format(host=host),
    }

    assert (
        client_tx["contexts"]["trace"]["trace_id"]
        == server_tx["contexts"]["trace"]["trace_id"]
    )

    if server_error is not None:
        assert (
            server_error["contexts"]["trace"]["trace_id"]
            == server_tx["contexts"]["trace"]["trace_id"]
        )


def test_400_not_logged(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()])
    events = capture_events()

    class CrashingHandler(RequestHandler):
        def get(self):
            raise HTTPError(400, "Oops")

    client = tornado_testcase(Application([(r"/", CrashingHandler)]))

    response = client.fetch("/")
    assert response.code == 400

    assert not events


def test_user_auth(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
    events = capture_events()

    class UserHandler(RequestHandler):
        def get(self):
            1 / 0

        def get_current_user(self):
            return 42

    class NoUserHandler(RequestHandler):
        def get(self):
            1 / 0

    client = tornado_testcase(
        Application([(r"/auth", UserHandler), (r"/noauth", NoUserHandler)])
    )

    # has user
    response = client.fetch("/auth")
    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    assert event["user"] == {"is_authenticated": True}

    events.clear()

    # has no user
    response = client.fetch("/noauth")
    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    assert "user" not in event


def test_formdata(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
    events = capture_events()

    class FormdataHandler(RequestHandler):
        def post(self):
            raise ValueError(json.dumps(sorted(self.request.body_arguments)))

    client = tornado_testcase(Application([(r"/form", FormdataHandler)]))

    response = client.fetch(
        "/form?queryarg=1",
        method="POST",
        headers={"Content-Type": "application/x-www-form-urlencoded"},
        body=b"field1=value1&field2=value2",
    )

    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["value"] == '["field1", "field2"]'
    assert event["request"]["data"] == {"field1": ["value1"], "field2": ["value2"]}


def test_json(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
    events = capture_events()

    class FormdataHandler(RequestHandler):
        def post(self):
            raise ValueError(json.dumps(sorted(self.request.body_arguments)))

    client = tornado_testcase(Application([(r"/form", FormdataHandler)]))

    response = client.fetch(
        "/form?queryarg=1",
        method="POST",
        headers={"Content-Type": "application/json"},
        body=b"""
        {"foo": {"bar": 42}}
        """,
    )

    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["value"] == "[]"
    assert event
    assert event["request"]["data"] == {"foo": {"bar": 42}}


def test_error_has_new_trace_context_performance_enabled(
    tornado_testcase, sentry_init, capture_events
):
    """
    Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
    """
    sentry_init(
        integrations=[TornadoIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
    client.fetch("/hi")

    (msg_event, error_event, transaction_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_new_trace_context_performance_disabled(
    tornado_testcase, sentry_init, capture_events
):
    """
    Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
    """
    sentry_init(
        integrations=[TornadoIntegration()],
        traces_sample_rate=None,  # this is the default, just added for clarity
    )
    events = capture_events()

    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
    client.fetch("/hi")

    (msg_event, error_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_existing_trace_context_performance_enabled(
    tornado_testcase, sentry_init, capture_events
):
    """
    Check if an 'trace' context is added to errros and transactions
    from the incoming 'sentry-trace' header when performance monitoring is enabled.
    """
    sentry_init(
        integrations=[TornadoIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    headers = {"sentry-trace": sentry_trace_header}

    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
    client.fetch("/hi", headers=headers)

    (msg_event, error_event, transaction_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_error_has_existing_trace_context_performance_disabled(
    tornado_testcase, sentry_init, capture_events
):
    """
    Check if an 'trace' context is added to errros and transactions
    from the incoming 'sentry-trace' header when performance monitoring is disabled.
    """
    sentry_init(
        integrations=[TornadoIntegration()],
        traces_sample_rate=None,  # this is the default, just added for clarity
    )
    events = capture_events()

    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    headers = {"sentry-trace": sentry_trace_header}

    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
    client.fetch("/hi", headers=headers)

    (msg_event, error_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )
sentry-python-1.39.2/tests/integrations/trytond/000077500000000000000000000000001454744723200220405ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/trytond/__init__.py000066400000000000000000000000561454744723200241520ustar00rootroot00000000000000import pytest

pytest.importorskip("trytond")
sentry-python-1.39.2/tests/integrations/trytond/test_trytond.py000066400000000000000000000070001454744723200251510ustar00rootroot00000000000000import json
import unittest.mock

import pytest

import trytond
from trytond.exceptions import TrytonException as TrytondBaseException
from trytond.exceptions import UserError as TrytondUserError
from trytond.exceptions import UserWarning as TrytondUserWarning
from trytond.exceptions import LoginException
from trytond.wsgi import app as trytond_app

from werkzeug.test import Client
from sentry_sdk import last_event_id
from sentry_sdk.integrations.trytond import TrytondWSGIIntegration


@pytest.fixture(scope="function")
def app(sentry_init):
    yield trytond_app


@pytest.fixture
def get_client(app):
    def inner():
        return Client(app)

    return inner


@pytest.mark.parametrize(
    "exception", [Exception("foo"), type("FooException", (Exception,), {})("bar")]
)
def test_exceptions_captured(
    sentry_init, app, capture_exceptions, get_client, exception
):
    sentry_init(integrations=[TrytondWSGIIntegration()])
    exceptions = capture_exceptions()

    unittest.mock.sentinel.exception = exception

    @app.route("/exception")
    def _(request):
        raise unittest.mock.sentinel.exception

    client = get_client()
    _ = client.get("/exception")

    (e,) = exceptions
    assert e is exception


@pytest.mark.parametrize(
    "exception",
    [
        TrytondUserError("title"),
        TrytondUserWarning("title", "details"),
        LoginException("title", "details"),
    ],
)
def test_trytonderrors_not_captured(
    sentry_init, app, capture_exceptions, get_client, exception
):
    sentry_init(integrations=[TrytondWSGIIntegration()])
    exceptions = capture_exceptions()

    unittest.mock.sentinel.exception = exception

    @app.route("/usererror")
    def _(request):
        raise unittest.mock.sentinel.exception

    client = get_client()
    _ = client.get("/usererror")

    assert not exceptions


@pytest.mark.skipif(
    trytond.__version__.split(".") < ["5", "4"], reason="At least Trytond-5.4 required"
)
def test_rpc_error_page(sentry_init, app, capture_events, get_client):
    """Test that, after initializing the Trytond-SentrySDK integration
    a custom error handler can be registered to the Trytond WSGI app so as to
    inform the event identifiers to the Tryton RPC client"""

    sentry_init(integrations=[TrytondWSGIIntegration()])
    events = capture_events()

    @app.route("/rpcerror", methods=["POST"])
    def _(request):
        raise Exception("foo")

    @app.error_handler
    def _(app, request, e):
        if isinstance(e, TrytondBaseException):
            return
        else:
            event_id = last_event_id()
            data = TrytondUserError(str(event_id), str(e))
            return app.make_response(request, data)

    client = get_client()

    # This would look like a natural Tryton RPC call
    _data = dict(
        id=42,  # request sequence
        method="class.method",  # rpc call
        params=[
            [1234],  # ids
            ["bar", "baz"],  # values
            dict(  # context
                client="12345678-9abc-def0-1234-56789abc",
                groups=[1],
                language="ca",
                language_direction="ltr",
            ),
        ],
    )
    response = client.post(
        "/rpcerror", content_type="application/json", data=json.dumps(_data)
    )

    (event,) = events
    (content, status, headers) = response
    data = json.loads(next(content))
    assert status == "200 OK"
    assert headers.get("Content-Type") == "application/json"
    assert data == dict(id=42, error=["UserError", [event["event_id"], "foo", None]])
sentry-python-1.39.2/tests/integrations/wsgi/000077500000000000000000000000001454744723200213065ustar00rootroot00000000000000sentry-python-1.39.2/tests/integrations/wsgi/test_wsgi.py000066400000000000000000000316651454744723200237030ustar00rootroot00000000000000import sys

from werkzeug.test import Client

import pytest

import sentry_sdk
from sentry_sdk import capture_message
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from collections import Counter

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


@pytest.fixture
def crashing_app():
    def app(environ, start_response):
        1 / 0

    return app


class IterableApp(object):
    def __init__(self, iterable):
        self.iterable = iterable

    def __call__(self, environ, start_response):
        return self.iterable


class ExitingIterable(object):
    def __init__(self, exc_func):
        self._exc_func = exc_func

    def __iter__(self):
        return self

    def __next__(self):
        raise self._exc_func()

    def next(self):
        return type(self).__next__(self)


def test_basic(sentry_init, crashing_app, capture_events):
    sentry_init(send_default_pii=True)
    app = SentryWsgiMiddleware(crashing_app)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ZeroDivisionError):
        client.get("/")

    (event,) = events

    assert event["transaction"] == "generic WSGI request"

    assert event["request"] == {
        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
        "headers": {"Host": "localhost"},
        "method": "GET",
        "query_string": "",
        "url": "http://localhost/",
    }


@pytest.fixture(params=[0, None])
def test_systemexit_zero_is_ignored(sentry_init, capture_events, request):
    zero_code = request.param
    sentry_init(send_default_pii=True)
    iterable = ExitingIterable(lambda: SystemExit(zero_code))
    app = SentryWsgiMiddleware(IterableApp(iterable))
    client = Client(app)
    events = capture_events()

    with pytest.raises(SystemExit):
        client.get("/")

    assert len(events) == 0


@pytest.fixture(params=["", "foo", 1, 2])
def test_systemexit_nonzero_is_captured(sentry_init, capture_events, request):
    nonzero_code = request.param
    sentry_init(send_default_pii=True)
    iterable = ExitingIterable(lambda: SystemExit(nonzero_code))
    app = SentryWsgiMiddleware(IterableApp(iterable))
    client = Client(app)
    events = capture_events()

    with pytest.raises(SystemExit):
        client.get("/")

    (event,) = events

    assert "exception" in event
    exc = event["exception"]["values"][-1]
    assert exc["type"] == "SystemExit"
    assert exc["value"] == nonzero_code
    assert event["level"] == "error"


def test_keyboard_interrupt_is_captured(sentry_init, capture_events):
    sentry_init(send_default_pii=True)
    iterable = ExitingIterable(lambda: KeyboardInterrupt())
    app = SentryWsgiMiddleware(IterableApp(iterable))
    client = Client(app)
    events = capture_events()

    with pytest.raises(KeyboardInterrupt):
        client.get("/")

    (event,) = events

    assert "exception" in event
    exc = event["exception"]["values"][-1]
    assert exc["type"] == "KeyboardInterrupt"
    assert exc["value"] == ""
    assert event["level"] == "error"


def test_transaction_with_error(
    sentry_init, crashing_app, capture_events, DictionaryContaining  # noqa:N803
):
    def dogpark(environ, start_response):
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ValueError):
        client.get("http://dogs.are.great/sit/stay/rollover/")

    error_event, envelope = events

    assert error_event["transaction"] == "generic WSGI request"
    assert error_event["contexts"]["trace"]["op"] == "http.server"
    assert error_event["exception"]["values"][0]["type"] == "ValueError"
    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"
    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
    assert (
        error_event["exception"]["values"][0]["value"]
        == "Fetch aborted. The ball was not returned."
    )

    assert envelope["type"] == "transaction"

    # event trace context is a subset of envelope trace context
    assert envelope["contexts"]["trace"] == DictionaryContaining(
        error_event["contexts"]["trace"]
    )
    assert envelope["contexts"]["trace"]["status"] == "internal_error"
    assert envelope["transaction"] == error_event["transaction"]
    assert envelope["request"] == error_event["request"]


def test_transaction_no_error(
    sentry_init, capture_events, DictionaryContaining  # noqa:N803
):
    def dogpark(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    client.get("/dogs/are/great/")

    envelope = events[0]

    assert envelope["type"] == "transaction"
    assert envelope["transaction"] == "generic WSGI request"
    assert envelope["contexts"]["trace"]["op"] == "http.server"
    assert envelope["request"] == DictionaryContaining(
        {"method": "GET", "url": "http://localhost/dogs/are/great/"}
    )


def test_has_trace_if_performance_enabled(
    sentry_init,
    capture_events,
):
    def dogpark(environ, start_response):
        capture_message("Attempting to fetch the ball")
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init(traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ValueError):
        client.get("http://dogs.are.great/sit/stay/rollover/")

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_has_trace_if_performance_disabled(
    sentry_init,
    capture_events,
):
    def dogpark(environ, start_response):
        capture_message("Attempting to fetch the ball")
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init()
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ValueError):
        client.get("http://dogs.are.great/sit/stay/rollover/")

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]


def test_trace_from_headers_if_performance_enabled(
    sentry_init,
    capture_events,
):
    def dogpark(environ, start_response):
        capture_message("Attempting to fetch the ball")
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init(traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    with pytest.raises(ValueError):
        client.get(
            "http://dogs.are.great/sit/stay/rollover/",
            headers={"sentry-trace": sentry_trace_header},
        )

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


def test_trace_from_headers_if_performance_disabled(
    sentry_init,
    capture_events,
):
    def dogpark(environ, start_response):
        capture_message("Attempting to fetch the ball")
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init()
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    with pytest.raises(ValueError):
        client.get(
            "http://dogs.are.great/sit/stay/rollover/",
            headers={"sentry-trace": sentry_trace_header},
        )

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]
    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


def test_traces_sampler_gets_correct_values_in_sampling_context(
    sentry_init,
    DictionaryContaining,  # noqa:N803
):
    def app(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    traces_sampler = mock.Mock(return_value=True)
    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
    app = SentryWsgiMiddleware(app)
    client = Client(app)

    client.get("/dogs/are/great/")

    traces_sampler.assert_any_call(
        DictionaryContaining(
            {
                "wsgi_environ": DictionaryContaining(
                    {
                        "PATH_INFO": "/dogs/are/great/",
                        "REQUEST_METHOD": "GET",
                    },
                ),
            }
        )
    )


def test_session_mode_defaults_to_request_mode_in_wsgi_handler(
    capture_envelopes, sentry_init
):
    """
    Test that ensures that even though the default `session_mode` for
    auto_session_tracking is `application`, that flips to `request` when we are
    in the WSGI handler
    """

    def app(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    traces_sampler = mock.Mock(return_value=True)
    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
    app = SentryWsgiMiddleware(app)
    envelopes = capture_envelopes()

    client = Client(app)

    client.get("/dogs/are/great/")

    sentry_sdk.flush()

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json

    aggregates = sess_event["aggregates"]
    assert len(aggregates) == 1
    assert aggregates[0]["exited"] == 1


def test_auto_session_tracking_with_aggregates(sentry_init, capture_envelopes):
    """
    Test for correct session aggregates in auto session tracking.
    """

    def sample_app(environ, start_response):
        if environ["REQUEST_URI"] != "/dogs/are/great/":
            1 / 0

        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    traces_sampler = mock.Mock(return_value=True)
    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
    app = SentryWsgiMiddleware(sample_app)
    envelopes = capture_envelopes()
    assert len(envelopes) == 0

    client = Client(app)
    client.get("/dogs/are/great/")
    client.get("/dogs/are/great/")
    try:
        client.get("/trigger/an/error/")
    except ZeroDivisionError:
        pass

    sentry_sdk.flush()

    count_item_types = Counter()
    for envelope in envelopes:
        count_item_types[envelope.items[0].type] += 1

    assert count_item_types["transaction"] == 3
    assert count_item_types["event"] == 1
    assert count_item_types["sessions"] == 1
    assert len(envelopes) == 5

    session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
    assert session_aggregates[0]["exited"] == 2
    assert session_aggregates[0]["crashed"] == 1
    assert len(session_aggregates) == 1


@pytest.mark.skipif(
    sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
)
@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_profile_sent(
    sentry_init,
    capture_envelopes,
    teardown_profiling,
):
    def test_app(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )
    app = SentryWsgiMiddleware(test_app)
    envelopes = capture_envelopes()

    client = Client(app)
    client.get("/")

    envelopes = [envelope for envelope in envelopes]
    assert len(envelopes) == 1

    profiles = [item for item in envelopes[0].items if item.type == "profile"]
    assert len(profiles) == 1
sentry-python-1.39.2/tests/test_api.py000066400000000000000000000071411454744723200200140ustar00rootroot00000000000000from sentry_sdk import (
    configure_scope,
    continue_trace,
    get_baggage,
    get_current_span,
    get_traceparent,
    start_transaction,
)
from sentry_sdk.hub import Hub

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


def test_get_current_span():
    fake_hub = mock.MagicMock()
    fake_hub.scope = mock.MagicMock()

    fake_hub.scope.span = mock.MagicMock()
    assert get_current_span(fake_hub) == fake_hub.scope.span

    fake_hub.scope.span = None
    assert get_current_span(fake_hub) is None


def test_get_current_span_default_hub(sentry_init):
    sentry_init()

    assert get_current_span() is None

    with configure_scope() as scope:
        fake_span = mock.MagicMock()
        scope.span = fake_span

        assert get_current_span() == fake_span


def test_get_current_span_default_hub_with_transaction(sentry_init):
    sentry_init()

    assert get_current_span() is None

    with start_transaction() as new_transaction:
        assert get_current_span() == new_transaction


def test_traceparent_with_tracing_enabled(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    with start_transaction() as transaction:
        expected_traceparent = "%s-%s-1" % (
            transaction.trace_id,
            transaction.span_id,
        )
        assert get_traceparent() == expected_traceparent


def test_traceparent_with_tracing_disabled(sentry_init):
    sentry_init()

    propagation_context = Hub.current.scope._propagation_context
    expected_traceparent = "%s-%s" % (
        propagation_context["trace_id"],
        propagation_context["span_id"],
    )
    assert get_traceparent() == expected_traceparent


def test_baggage_with_tracing_disabled(sentry_init):
    sentry_init(release="1.0.0", environment="dev")
    propagation_context = Hub.current.scope._propagation_context
    expected_baggage = (
        "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0".format(
            propagation_context["trace_id"]
        )
    )
    # order not guaranteed in older python versions
    assert sorted(get_baggage().split(",")) == sorted(expected_baggage.split(","))


def test_baggage_with_tracing_enabled(sentry_init):
    sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev")
    with start_transaction() as transaction:
        expected_baggage = "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format(
            transaction.trace_id, "true" if transaction.sampled else "false"
        )
        # order not guaranteed in older python versions
        assert sorted(get_baggage().split(",")) == sorted(expected_baggage.split(","))


def test_continue_trace(sentry_init):
    sentry_init()

    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    transaction = continue_trace(
        {
            "sentry-trace": "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled),
            "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19",
        },
        name="some name",
    )
    with start_transaction(transaction):
        assert transaction.name == "some name"

        propagation_context = Hub.current.scope._propagation_context
        assert propagation_context["trace_id"] == transaction.trace_id == trace_id
        assert propagation_context["parent_span_id"] == parent_span_id
        assert propagation_context["parent_sampled"] == parent_sampled
        assert propagation_context["dynamic_sampling_context"] == {
            "trace_id": "566e3688a61d4bc888951642d6f14a19"
        }
sentry-python-1.39.2/tests/test_basics.py000066400000000000000000000561331454744723200205140ustar00rootroot00000000000000import logging
import os
import sys
import time

import pytest

from tests.conftest import patch_start_tracing_child

from sentry_sdk import (
    Client,
    push_scope,
    configure_scope,
    capture_event,
    capture_exception,
    capture_message,
    start_transaction,
    add_breadcrumb,
    last_event_id,
    Hub,
)
from sentry_sdk._compat import reraise, PY2
from sentry_sdk.integrations import (
    _AUTO_ENABLING_INTEGRATIONS,
    Integration,
    setup_integrations,
)
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.scope import (  # noqa: F401
    add_global_event_processor,
    global_event_processors,
)
from sentry_sdk.utils import get_sdk_name
from sentry_sdk.tracing_utils import has_tracing_enabled


def _redis_installed():  # type: () -> bool
    """
    Determines whether Redis is installed.
    """
    try:
        import redis  # noqa: F401
    except ImportError:
        return False

    return True


class NoOpIntegration(Integration):
    """
    A simple no-op integration for testing purposes.
    """

    identifier = "noop"

    @staticmethod
    def setup_once():  # type: () -> None
        pass

    def __eq__(self, __value):  # type: (object) -> bool
        """
        All instances of NoOpIntegration should be considered equal to each other.
        """
        return type(__value) == type(self)


def test_processors(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    with configure_scope() as scope:

        def error_processor(event, exc_info):
            event["exception"]["values"][0]["value"] += " whatever"
            return event

        scope.add_error_processor(error_processor, ValueError)

    try:
        raise ValueError("aha!")
    except Exception:
        capture_exception()

    (event,) = events

    assert event["exception"]["values"][0]["value"] == "aha! whatever"


def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
    caplog.set_level(logging.DEBUG)
    redis_index = _AUTO_ENABLING_INTEGRATIONS.index(
        "sentry_sdk.integrations.redis.RedisIntegration"
    )  # noqa: N806

    sentry_init(auto_enabling_integrations=True, debug=True)

    for import_string in _AUTO_ENABLING_INTEGRATIONS:
        # Ignore redis in the test case, because it does not raise a DidNotEnable
        # exception on import; rather, it raises the exception upon enabling.
        if _AUTO_ENABLING_INTEGRATIONS[redis_index] == import_string:
            continue

        assert any(
            record.message.startswith(
                "Did not import default integration {}:".format(import_string)
            )
            for record in caplog.records
        ), "Problem with checking auto enabling {}".format(import_string)


def test_event_id(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        raise ValueError("aha!")
    except Exception:
        event_id = capture_exception()
        int(event_id, 16)
        assert len(event_id) == 32

    (event,) = events
    assert event["event_id"] == event_id
    assert last_event_id() == event_id
    assert Hub.current.last_event_id() == event_id

    new_event_id = Hub.current.capture_event({"type": "transaction"})
    assert new_event_id is not None
    assert new_event_id != event_id
    assert Hub.current.last_event_id() == event_id


def test_generic_mechanism(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        raise ValueError("aha!")
    except Exception:
        capture_exception()

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "generic"
    assert event["exception"]["values"][0]["mechanism"]["handled"]


def test_option_before_send(sentry_init, capture_events):
    def before_send(event, hint):
        event["extra"] = {"before_send_called": True}
        return event

    def do_this():
        try:
            raise ValueError("aha!")
        except Exception:
            capture_exception()

    sentry_init(before_send=before_send)
    events = capture_events()

    do_this()

    (event,) = events
    assert event["extra"] == {"before_send_called": True}


def test_option_before_send_discard(sentry_init, capture_events):
    def before_send_discard(event, hint):
        return None

    def do_this():
        try:
            raise ValueError("aha!")
        except Exception:
            capture_exception()

    sentry_init(before_send=before_send_discard)
    events = capture_events()

    do_this()

    assert len(events) == 0


def test_option_before_send_transaction(sentry_init, capture_events):
    def before_send_transaction(event, hint):
        assert event["type"] == "transaction"
        event["extra"] = {"before_send_transaction_called": True}
        return event

    sentry_init(
        before_send_transaction=before_send_transaction,
        traces_sample_rate=1.0,
    )
    events = capture_events()
    transaction = start_transaction(name="foo")
    transaction.finish()

    (event,) = events
    assert event["transaction"] == "foo"
    assert event["extra"] == {"before_send_transaction_called": True}


def test_option_before_send_transaction_discard(sentry_init, capture_events):
    def before_send_transaction_discard(event, hint):
        return None

    sentry_init(
        before_send_transaction=before_send_transaction_discard,
        traces_sample_rate=1.0,
    )
    events = capture_events()
    transaction = start_transaction(name="foo")
    transaction.finish()

    assert len(events) == 0


def test_option_before_breadcrumb(sentry_init, capture_events, monkeypatch):
    drop_events = False
    drop_breadcrumbs = False
    reports = []

    def record_lost_event(reason, data_category=None, item=None):
        reports.append((reason, data_category))

    def before_send(event, hint):
        assert isinstance(hint["exc_info"][1], ValueError)
        if not drop_events:
            event["extra"] = {"foo": "bar"}
            return event

    def before_breadcrumb(crumb, hint):
        assert hint == {"foo": 42}
        if not drop_breadcrumbs:
            crumb["data"] = {"foo": "bar"}
            return crumb

    sentry_init(before_send=before_send, before_breadcrumb=before_breadcrumb)
    events = capture_events()

    monkeypatch.setattr(
        Hub.current.client.transport, "record_lost_event", record_lost_event
    )

    def do_this():
        add_breadcrumb(message="Hello", hint={"foo": 42})
        try:
            raise ValueError("aha!")
        except Exception:
            capture_exception()

    do_this()
    drop_breadcrumbs = True
    do_this()
    assert not reports
    drop_events = True
    do_this()
    assert reports == [("before_send", "error")]

    normal, no_crumbs = events

    assert normal["exception"]["values"][0]["type"] == "ValueError"
    (crumb,) = normal["breadcrumbs"]["values"]
    assert "timestamp" in crumb
    assert crumb["message"] == "Hello"
    assert crumb["data"] == {"foo": "bar"}
    assert crumb["type"] == "default"


@pytest.mark.parametrize(
    "enable_tracing, traces_sample_rate, tracing_enabled, updated_traces_sample_rate",
    [
        (None, None, False, None),
        (False, 0.0, False, 0.0),
        (False, 1.0, False, 1.0),
        (None, 1.0, True, 1.0),
        (True, 1.0, True, 1.0),
        (None, 0.0, True, 0.0),  # We use this as - it's configured but turned off
        (True, 0.0, True, 0.0),  # We use this as - it's configured but turned off
        (True, None, True, 1.0),
    ],
)
def test_option_enable_tracing(
    sentry_init,
    enable_tracing,
    traces_sample_rate,
    tracing_enabled,
    updated_traces_sample_rate,
):
    sentry_init(enable_tracing=enable_tracing, traces_sample_rate=traces_sample_rate)
    options = Hub.current.client.options
    assert has_tracing_enabled(options) is tracing_enabled
    assert options["traces_sample_rate"] == updated_traces_sample_rate


def test_breadcrumb_arguments(sentry_init, capture_events):
    assert_hint = {"bar": 42}

    def before_breadcrumb(crumb, hint):
        assert crumb["foo"] == 42
        assert hint == assert_hint

    sentry_init(before_breadcrumb=before_breadcrumb)

    add_breadcrumb(foo=42, hint=dict(bar=42))
    add_breadcrumb(dict(foo=42), dict(bar=42))
    add_breadcrumb(dict(foo=42), hint=dict(bar=42))
    add_breadcrumb(crumb=dict(foo=42), hint=dict(bar=42))

    assert_hint.clear()
    add_breadcrumb(foo=42)
    add_breadcrumb(crumb=dict(foo=42))


def test_push_scope(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    with push_scope() as scope:
        scope.level = "warning"
        try:
            1 / 0
        except Exception as e:
            capture_exception(e)

    (event,) = events

    assert event["level"] == "warning"
    assert "exception" in event


def test_push_scope_null_client(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    Hub.current.bind_client(None)

    with push_scope() as scope:
        scope.level = "warning"
        try:
            1 / 0
        except Exception as e:
            capture_exception(e)

    assert len(events) == 0


@pytest.mark.parametrize("null_client", (True, False))
def test_push_scope_callback(sentry_init, null_client, capture_events):
    sentry_init()

    if null_client:
        Hub.current.bind_client(None)

    outer_scope = Hub.current.scope

    calls = []

    @push_scope
    def _(scope):
        assert scope is Hub.current.scope
        assert scope is not outer_scope
        calls.append(1)

    # push_scope always needs to execute the callback regardless of
    # client state, because that actually runs usercode in it, not
    # just scope config code
    assert calls == [1]

    # Assert scope gets popped correctly
    assert Hub.current.scope is outer_scope


def test_breadcrumbs(sentry_init, capture_events):
    sentry_init(max_breadcrumbs=10)
    events = capture_events()

    for i in range(20):
        add_breadcrumb(
            category="auth", message="Authenticated user %s" % i, level="info"
        )

    capture_exception(ValueError())
    (event,) = events

    assert len(event["breadcrumbs"]["values"]) == 10
    assert "user 10" in event["breadcrumbs"]["values"][0]["message"]
    assert "user 19" in event["breadcrumbs"]["values"][-1]["message"]

    del events[:]

    for i in range(2):
        add_breadcrumb(
            category="auth", message="Authenticated user %s" % i, level="info"
        )

    with configure_scope() as scope:
        scope.clear()

    capture_exception(ValueError())
    (event,) = events
    assert len(event["breadcrumbs"]["values"]) == 0


def test_attachments(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    this_file = os.path.abspath(__file__.rstrip("c"))

    with configure_scope() as scope:
        scope.add_attachment(bytes=b"Hello World!", filename="message.txt")
        scope.add_attachment(path=this_file)

    capture_exception(ValueError())

    (envelope,) = envelopes

    assert len(envelope.items) == 3
    assert envelope.get_event()["exception"] is not None

    attachments = [x for x in envelope.items if x.type == "attachment"]
    (message, pyfile) = attachments

    assert message.headers["filename"] == "message.txt"
    assert message.headers["type"] == "attachment"
    assert message.headers["content_type"] == "text/plain"
    assert message.payload.bytes == message.payload.get_bytes() == b"Hello World!"

    assert pyfile.headers["filename"] == os.path.basename(this_file)
    assert pyfile.headers["type"] == "attachment"
    assert pyfile.headers["content_type"].startswith("text/")
    assert pyfile.payload.bytes is None
    with open(this_file, "rb") as f:
        assert pyfile.payload.get_bytes() == f.read()


def test_integration_scoping(sentry_init, capture_events):
    logger = logging.getLogger("test_basics")

    # This client uses the logging integration
    logging_integration = LoggingIntegration(event_level=logging.WARNING)
    sentry_init(default_integrations=False, integrations=[logging_integration])
    events = capture_events()
    logger.warning("This is a warning")
    assert len(events) == 1

    # This client does not
    sentry_init(default_integrations=False)
    events = capture_events()
    logger.warning("This is not a warning")
    assert not events


def test_client_initialized_within_scope(sentry_init, caplog):
    caplog.set_level(logging.WARNING)

    sentry_init(debug=True)

    with push_scope():
        Hub.current.bind_client(Client())

    (record,) = (x for x in caplog.records if x.levelname == "WARNING")

    assert record.msg.startswith("init() called inside of pushed scope.")


def test_scope_leaks_cleaned_up(sentry_init, caplog):
    caplog.set_level(logging.WARNING)

    sentry_init(debug=True)

    old_stack = list(Hub.current._stack)

    with push_scope():
        push_scope()

    assert Hub.current._stack == old_stack

    (record,) = (x for x in caplog.records if x.levelname == "WARNING")

    assert record.message.startswith("Leaked 1 scopes:")


def test_scope_popped_too_soon(sentry_init, caplog):
    caplog.set_level(logging.ERROR)

    sentry_init(debug=True)

    old_stack = list(Hub.current._stack)

    with push_scope():
        Hub.current.pop_scope_unsafe()

    assert Hub.current._stack == old_stack

    (record,) = (x for x in caplog.records if x.levelname == "ERROR")

    assert record.message == ("Scope popped too soon. Popped 1 scopes too many.")


def test_scope_event_processor_order(sentry_init, capture_events):
    def before_send(event, hint):
        event["message"] += "baz"
        return event

    sentry_init(debug=True, before_send=before_send)
    events = capture_events()

    with push_scope() as scope:

        @scope.add_event_processor
        def foo(event, hint):
            event["message"] += "foo"
            return event

        with push_scope() as scope:

            @scope.add_event_processor
            def bar(event, hint):
                event["message"] += "bar"
                return event

            capture_message("hi")

    (event,) = events

    assert event["message"] == "hifoobarbaz"


def test_capture_event_with_scope_kwargs(sentry_init, capture_events):
    sentry_init(debug=True)
    events = capture_events()
    capture_event({}, level="info", extras={"foo": "bar"})
    (event,) = events
    assert event["level"] == "info"
    assert event["extra"]["foo"] == "bar"


def test_dedupe_event_processor_drop_records_client_report(
    sentry_init, capture_events, capture_client_reports
):
    """
    DedupeIntegration internally has an event_processor that filters duplicate exceptions.
    We want a duplicate exception to be captured only once and the drop being recorded as
    a client report.
    """
    sentry_init()
    events = capture_events()
    reports = capture_client_reports()

    try:
        raise ValueError("aha!")
    except Exception:
        try:
            capture_exception()
            reraise(*sys.exc_info())
        except Exception:
            capture_exception()

    (event,) = events
    (report,) = reports

    assert event["level"] == "error"
    assert "exception" in event
    assert report == ("event_processor", "error")


def test_event_processor_drop_records_client_report(
    sentry_init, capture_events, capture_client_reports
):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()
    reports = capture_client_reports()

    global global_event_processors

    @add_global_event_processor
    def foo(event, hint):
        return None

    capture_message("dropped")

    with start_transaction(name="dropped"):
        pass

    assert len(events) == 0
    assert reports == [("event_processor", "error"), ("event_processor", "transaction")]

    global_event_processors.pop()


@pytest.mark.parametrize(
    "installed_integrations, expected_name",
    [
        # integrations with own name
        (["django"], "sentry.python.django"),
        (["flask"], "sentry.python.flask"),
        (["fastapi"], "sentry.python.fastapi"),
        (["bottle"], "sentry.python.bottle"),
        (["falcon"], "sentry.python.falcon"),
        (["quart"], "sentry.python.quart"),
        (["sanic"], "sentry.python.sanic"),
        (["starlette"], "sentry.python.starlette"),
        (["chalice"], "sentry.python.chalice"),
        (["serverless"], "sentry.python.serverless"),
        (["pyramid"], "sentry.python.pyramid"),
        (["tornado"], "sentry.python.tornado"),
        (["aiohttp"], "sentry.python.aiohttp"),
        (["aws_lambda"], "sentry.python.aws_lambda"),
        (["gcp"], "sentry.python.gcp"),
        (["beam"], "sentry.python.beam"),
        (["asgi"], "sentry.python.asgi"),
        (["wsgi"], "sentry.python.wsgi"),
        # integrations without name
        (["argv"], "sentry.python"),
        (["atexit"], "sentry.python"),
        (["boto3"], "sentry.python"),
        (["celery"], "sentry.python"),
        (["dedupe"], "sentry.python"),
        (["excepthook"], "sentry.python"),
        (["executing"], "sentry.python"),
        (["modules"], "sentry.python"),
        (["pure_eval"], "sentry.python"),
        (["redis"], "sentry.python"),
        (["rq"], "sentry.python"),
        (["sqlalchemy"], "sentry.python"),
        (["stdlib"], "sentry.python"),
        (["threading"], "sentry.python"),
        (["trytond"], "sentry.python"),
        (["logging"], "sentry.python"),
        (["gnu_backtrace"], "sentry.python"),
        (["httpx"], "sentry.python"),
        # precedence of frameworks
        (["flask", "django", "celery"], "sentry.python.django"),
        (["fastapi", "flask", "redis"], "sentry.python.flask"),
        (["bottle", "fastapi", "httpx"], "sentry.python.fastapi"),
        (["falcon", "bottle", "logging"], "sentry.python.bottle"),
        (["quart", "falcon", "gnu_backtrace"], "sentry.python.falcon"),
        (["sanic", "quart", "sqlalchemy"], "sentry.python.quart"),
        (["starlette", "sanic", "rq"], "sentry.python.sanic"),
        (["chalice", "starlette", "modules"], "sentry.python.starlette"),
        (["serverless", "chalice", "pure_eval"], "sentry.python.chalice"),
        (["pyramid", "serverless", "modules"], "sentry.python.serverless"),
        (["tornado", "pyramid", "executing"], "sentry.python.pyramid"),
        (["aiohttp", "tornado", "dedupe"], "sentry.python.tornado"),
        (["aws_lambda", "aiohttp", "boto3"], "sentry.python.aiohttp"),
        (["gcp", "aws_lambda", "atexit"], "sentry.python.aws_lambda"),
        (["beam", "gcp", "argv"], "sentry.python.gcp"),
        (["asgi", "beam", "stdtlib"], "sentry.python.beam"),
        (["wsgi", "asgi", "boto3"], "sentry.python.asgi"),
        (["wsgi", "celery", "redis"], "sentry.python.wsgi"),
    ],
)
def test_get_sdk_name(installed_integrations, expected_name):
    assert get_sdk_name(installed_integrations) == expected_name


def _hello_world(word):
    return "Hello, {}".format(word)


def test_functions_to_trace(sentry_init, capture_events):
    functions_to_trace = [
        {"qualified_name": "tests.test_basics._hello_world"},
        {"qualified_name": "time.sleep"},
    ]

    sentry_init(
        traces_sample_rate=1.0,
        functions_to_trace=functions_to_trace,
    )

    events = capture_events()

    with start_transaction(name="something"):
        time.sleep(0)

        for word in ["World", "You"]:
            _hello_world(word)

    assert len(events) == 1

    (event,) = events

    assert len(event["spans"]) == 3
    assert event["spans"][0]["description"] == "time.sleep"
    assert event["spans"][1]["description"] == "tests.test_basics._hello_world"
    assert event["spans"][2]["description"] == "tests.test_basics._hello_world"


class WorldGreeter:
    def __init__(self, word):
        self.word = word

    def greet(self, new_word=None):
        return "Hello, {}".format(new_word if new_word else self.word)


def test_functions_to_trace_with_class(sentry_init, capture_events):
    functions_to_trace = [
        {"qualified_name": "tests.test_basics.WorldGreeter.greet"},
    ]

    sentry_init(
        traces_sample_rate=1.0,
        functions_to_trace=functions_to_trace,
    )

    events = capture_events()

    with start_transaction(name="something"):
        wg = WorldGreeter("World")
        wg.greet()
        wg.greet("You")

    assert len(events) == 1

    (event,) = events

    assert len(event["spans"]) == 2
    assert event["spans"][0]["description"] == "tests.test_basics.WorldGreeter.greet"
    assert event["spans"][1]["description"] == "tests.test_basics.WorldGreeter.greet"


@pytest.mark.skipif(_redis_installed(), reason="skipping because redis is installed")
def test_redis_disabled_when_not_installed(sentry_init):
    sentry_init()

    assert Hub.current.get_integration(RedisIntegration) is None


def test_multiple_setup_integrations_calls():
    first_call_return = setup_integrations([NoOpIntegration()], with_defaults=False)
    assert first_call_return == {NoOpIntegration.identifier: NoOpIntegration()}

    second_call_return = setup_integrations([NoOpIntegration()], with_defaults=False)
    assert second_call_return == {NoOpIntegration.identifier: NoOpIntegration()}


class TracingTestClass:
    @staticmethod
    def static(arg):
        return arg

    @classmethod
    def class_(cls, arg):
        return cls, arg


def test_staticmethod_tracing(sentry_init):
    test_staticmethod_name = "tests.test_basics.TracingTestClass.static"
    if not PY2:
        # Skip this check on Python 2 since __qualname__ is available in Python 3 only. Skipping is okay,
        # since the assertion would be expected to fail in Python 3 if there is any problem.
        assert (
            ".".join(
                [
                    TracingTestClass.static.__module__,
                    TracingTestClass.static.__qualname__,
                ]
            )
            == test_staticmethod_name
        ), "The test static method was moved or renamed. Please update the name accordingly"

    sentry_init(functions_to_trace=[{"qualified_name": test_staticmethod_name}])

    for instance_or_class in (TracingTestClass, TracingTestClass()):
        with patch_start_tracing_child() as fake_start_child:
            assert instance_or_class.static(1) == 1
            assert fake_start_child.call_count == 1


def test_classmethod_tracing(sentry_init):
    test_classmethod_name = "tests.test_basics.TracingTestClass.class_"
    if not PY2:
        # Skip this check on Python 2 since __qualname__ is available in Python 3 only. Skipping is okay,
        # since the assertion would be expected to fail in Python 3 if there is any problem.
        assert (
            ".".join(
                [
                    TracingTestClass.class_.__module__,
                    TracingTestClass.class_.__qualname__,
                ]
            )
            == test_classmethod_name
        ), "The test class method was moved or renamed. Please update the name accordingly"

    sentry_init(functions_to_trace=[{"qualified_name": test_classmethod_name}])

    for instance_or_class in (TracingTestClass, TracingTestClass()):
        with patch_start_tracing_child() as fake_start_child:
            assert instance_or_class.class_(1) == (TracingTestClass, 1)
            assert fake_start_child.call_count == 1
sentry-python-1.39.2/tests/test_client.py000066400000000000000000001144351454744723200205260ustar00rootroot00000000000000# coding: utf-8
import os
import json
import pytest
import subprocess
import sys
import time

from textwrap import dedent
from sentry_sdk import (
    Hub,
    Client,
    add_breadcrumb,
    configure_scope,
    capture_message,
    capture_exception,
    capture_event,
    start_transaction,
    set_tag,
)
from sentry_sdk.integrations.executing import ExecutingIntegration
from sentry_sdk.transport import Transport
from sentry_sdk._compat import reraise, text_type, PY2
from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS
from sentry_sdk.utils import logger
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, DEFAULT_MAX_VALUE_LENGTH
from sentry_sdk._types import TYPE_CHECKING

if TYPE_CHECKING:
    from collections.abc import Callable
    from typing import Any, Optional, Union
    from sentry_sdk._types import Event

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3

if PY2:
    # Importing ABCs from collections is deprecated, and will stop working in 3.8
    # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
    from collections import Mapping
else:
    # New in 3.3
    # https://docs.python.org/3/library/collections.abc.html
    from collections.abc import Mapping


class EventCapturedError(Exception):
    pass


class _TestTransport(Transport):
    def capture_event(self, event):
        raise EventCapturedError(event)


def test_transport_option(monkeypatch):
    if "SENTRY_DSN" in os.environ:
        monkeypatch.delenv("SENTRY_DSN")

    dsn = "https://foo@sentry.io/123"
    dsn2 = "https://bar@sentry.io/124"
    assert str(Client(dsn=dsn).dsn) == dsn
    assert Client().dsn is None

    monkeypatch.setenv("SENTRY_DSN", dsn)
    transport = Transport({"dsn": dsn2})
    assert text_type(transport.parsed_dsn) == dsn2
    assert str(Client(transport=transport).dsn) == dsn


@pytest.mark.parametrize(
    "testcase",
    [
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "https://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": "https://localhost/123",
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": "https://localhost/123",
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": None,
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": None,
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": "",
            "arg_https_proxy": "",
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": None,
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "",
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": "",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "",
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        # NO_PROXY testcases
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": None,
            "env_no_proxy": "sentry.io,example.com",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": "https://localhost/123",
            "env_no_proxy": "example.com,sentry.io",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "env_no_proxy": "sentry.io,example.com",
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "env_no_proxy": "sentry.io,example.com",
            "arg_http_proxy": None,
            "arg_https_proxy": "https://localhost/123",
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "env_no_proxy": "sentry.io,example.com",
            "arg_http_proxy": None,
            "arg_https_proxy": "https://localhost/123",
            "expected_proxy_scheme": "https",
            "arg_proxy_headers": {"Test-Header": "foo-bar"},
        },
    ],
)
def test_proxy(monkeypatch, testcase):
    if testcase["env_http_proxy"] is not None:
        monkeypatch.setenv("HTTP_PROXY", testcase["env_http_proxy"])
    if testcase["env_https_proxy"] is not None:
        monkeypatch.setenv("HTTPS_PROXY", testcase["env_https_proxy"])
    if testcase.get("env_no_proxy") is not None:
        monkeypatch.setenv("NO_PROXY", testcase["env_no_proxy"])

    kwargs = {}

    if testcase["arg_http_proxy"] is not None:
        kwargs["http_proxy"] = testcase["arg_http_proxy"]
    if testcase["arg_https_proxy"] is not None:
        kwargs["https_proxy"] = testcase["arg_https_proxy"]
    if testcase.get("arg_proxy_headers") is not None:
        kwargs["proxy_headers"] = testcase["arg_proxy_headers"]

    client = Client(testcase["dsn"], **kwargs)

    if testcase["expected_proxy_scheme"] is None:
        assert client.transport._pool.proxy is None
    else:
        assert client.transport._pool.proxy.scheme == testcase["expected_proxy_scheme"]

        if testcase.get("arg_proxy_headers") is not None:
            assert client.transport._pool.proxy_headers == testcase["arg_proxy_headers"]


@pytest.mark.parametrize(
    "testcase",
    [
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_class": "",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "socks4a://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_class": "",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "socks4://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_class": "",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "socks5h://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_class": "",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "socks5://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_class": "",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "socks4a://localhost/123",
            "expected_proxy_class": "",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "socks4://localhost/123",
            "expected_proxy_class": "",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "socks5h://localhost/123",
            "expected_proxy_class": "",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "socks5://localhost/123",
            "expected_proxy_class": "",
        },
    ],
)
def test_socks_proxy(testcase):
    kwargs = {}

    if testcase["arg_http_proxy"] is not None:
        kwargs["http_proxy"] = testcase["arg_http_proxy"]
    if testcase["arg_https_proxy"] is not None:
        kwargs["https_proxy"] = testcase["arg_https_proxy"]

    client = Client(testcase["dsn"], **kwargs)
    assert str(type(client.transport._pool)) == testcase["expected_proxy_class"]


def test_simple_transport(sentry_init):
    events = []
    sentry_init(transport=events.append)
    capture_message("Hello World!")
    assert events[0]["message"] == "Hello World!"


def test_ignore_errors(sentry_init, capture_events):
    class MyDivisionError(ZeroDivisionError):
        pass

    def raise_it(exc_info):
        reraise(*exc_info)

    sentry_init(ignore_errors=[ZeroDivisionError], transport=_TestTransport())
    Hub.current._capture_internal_exception = raise_it

    def e(exc):
        try:
            raise exc
        except Exception:
            capture_exception()

    e(ZeroDivisionError())
    e(MyDivisionError())
    pytest.raises(EventCapturedError, lambda: e(ValueError()))


def test_with_locals_deprecation_enabled(sentry_init):
    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
        sentry_init(with_locals=True)

        client = Hub.current.client
        assert "with_locals" not in client.options
        assert "include_local_variables" in client.options
        assert client.options["include_local_variables"]

        fake_warning.assert_called_once_with(
            "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
        )


def test_with_locals_deprecation_disabled(sentry_init):
    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
        sentry_init(with_locals=False)

        client = Hub.current.client
        assert "with_locals" not in client.options
        assert "include_local_variables" in client.options
        assert not client.options["include_local_variables"]

        fake_warning.assert_called_once_with(
            "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
        )


def test_include_local_variables_deprecation(sentry_init):
    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
        sentry_init(include_local_variables=False)

        client = Hub.current.client
        assert "with_locals" not in client.options
        assert "include_local_variables" in client.options
        assert not client.options["include_local_variables"]

        fake_warning.assert_not_called()


def test_request_bodies_deprecation(sentry_init):
    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
        sentry_init(request_bodies="small")

        client = Hub.current.client
        assert "request_bodies" not in client.options
        assert "max_request_body_size" in client.options
        assert client.options["max_request_body_size"] == "small"

        fake_warning.assert_called_once_with(
            "Deprecated: The option 'request_bodies' was renamed to 'max_request_body_size'. Please use 'max_request_body_size'. The option 'request_bodies' will be removed in the future."
        )


def test_include_local_variables_enabled(sentry_init, capture_events):
    sentry_init(include_local_variables=True)
    events = capture_events()
    try:
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    assert all(
        frame["vars"]
        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
    )


def test_include_local_variables_disabled(sentry_init, capture_events):
    sentry_init(include_local_variables=False)
    events = capture_events()
    try:
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    assert all(
        "vars" not in frame
        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
    )


def test_include_source_context_enabled(sentry_init, capture_events):
    sentry_init(include_source_context=True)
    events = capture_events()
    try:
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    frame = event["exception"]["values"][0]["stacktrace"]["frames"][0]
    assert "post_context" in frame
    assert "pre_context" in frame
    assert "context_line" in frame


def test_include_source_context_disabled(sentry_init, capture_events):
    sentry_init(include_source_context=False)
    events = capture_events()
    try:
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    frame = event["exception"]["values"][0]["stacktrace"]["frames"][0]
    assert "post_context" not in frame
    assert "pre_context" not in frame
    assert "context_line" not in frame


@pytest.mark.parametrize("integrations", [[], [ExecutingIntegration()]])
def test_function_names(sentry_init, capture_events, integrations):
    sentry_init(integrations=integrations)
    events = capture_events()

    def foo():
        try:
            bar()
        except Exception:
            capture_exception()

    def bar():
        1 / 0

    foo()

    (event,) = events
    (thread,) = event["exception"]["values"]
    functions = [x["function"] for x in thread["stacktrace"]["frames"]]

    if integrations:
        assert functions == [
            "test_function_names..foo",
            "test_function_names..bar",
        ]
    else:
        assert functions == ["foo", "bar"]


def test_attach_stacktrace_enabled(sentry_init, capture_events):
    sentry_init(attach_stacktrace=True)
    events = capture_events()

    def foo():
        bar()

    def bar():
        capture_message("HI")

    foo()

    (event,) = events
    (thread,) = event["threads"]["values"]
    functions = [x["function"] for x in thread["stacktrace"]["frames"]]

    assert functions[-2:] == ["foo", "bar"]


def test_attach_stacktrace_enabled_no_locals(sentry_init, capture_events):
    sentry_init(attach_stacktrace=True, include_local_variables=False)
    events = capture_events()

    def foo():
        bar()

    def bar():
        capture_message("HI")

    foo()

    (event,) = events
    (thread,) = event["threads"]["values"]
    local_vars = [x.get("vars") for x in thread["stacktrace"]["frames"]]
    assert local_vars[-2:] == [None, None]


def test_attach_stacktrace_in_app(sentry_init, capture_events):
    sentry_init(attach_stacktrace=True, in_app_exclude=["_pytest"])
    events = capture_events()

    capture_message("hi")

    (event,) = events
    (thread,) = event["threads"]["values"]
    frames = thread["stacktrace"]["frames"]
    pytest_frames = [f for f in frames if f["module"].startswith("_pytest")]
    assert pytest_frames
    assert all(f["in_app"] is False for f in pytest_frames)


def test_attach_stacktrace_disabled(sentry_init, capture_events):
    sentry_init(attach_stacktrace=False)
    events = capture_events()
    capture_message("HI")

    (event,) = events
    assert "threads" not in event


def test_capture_event_works(sentry_init):
    sentry_init(transport=_TestTransport())
    pytest.raises(EventCapturedError, lambda: capture_event({}))
    pytest.raises(EventCapturedError, lambda: capture_event({}))


@pytest.mark.parametrize("num_messages", [10, 20])
def test_atexit(tmpdir, monkeypatch, num_messages):
    app = tmpdir.join("app.py")
    app.write(
        dedent(
            """
    import time
    from sentry_sdk import init, transport, capture_message

    def send_event(self, event):
        time.sleep(0.1)
        print(event["message"])

    transport.HttpTransport._send_event = send_event
    init("http://foobar@localhost/123", shutdown_timeout={num_messages})

    for _ in range({num_messages}):
        capture_message("HI")
    """.format(
                num_messages=num_messages
            )
        )
    )

    start = time.time()
    output = subprocess.check_output([sys.executable, str(app)])
    end = time.time()

    # Each message takes at least 0.1 seconds to process
    assert int(end - start) >= num_messages / 10

    assert output.count(b"HI") == num_messages


def test_configure_scope_available(sentry_init, request, monkeypatch):
    # Test that scope is configured if client is configured
    sentry_init()

    with configure_scope() as scope:
        assert scope is Hub.current.scope
        scope.set_tag("foo", "bar")

    calls = []

    def callback(scope):
        calls.append(scope)
        scope.set_tag("foo", "bar")

    assert configure_scope(callback) is None
    assert len(calls) == 1
    assert calls[0] is Hub.current.scope


@pytest.mark.tests_internal_exceptions
def test_client_debug_option_enabled(sentry_init, caplog):
    sentry_init(debug=True)

    Hub.current._capture_internal_exception((ValueError, ValueError("OK"), None))
    assert "OK" in caplog.text


@pytest.mark.tests_internal_exceptions
@pytest.mark.parametrize("with_client", (True, False))
def test_client_debug_option_disabled(with_client, sentry_init, caplog):
    if with_client:
        sentry_init()

    Hub.current._capture_internal_exception((ValueError, ValueError("OK"), None))
    assert "OK" not in caplog.text


def test_scope_initialized_before_client(sentry_init, capture_events):
    """
    This is a consequence of how configure_scope() works. We must
    make `configure_scope()` a noop if no client is configured. Even
    if the user later configures a client: We don't know that.
    """
    with configure_scope() as scope:
        scope.set_tag("foo", 42)

    sentry_init()

    events = capture_events()
    capture_message("hi")
    (event,) = events

    assert "tags" not in event


def test_weird_chars(sentry_init, capture_events):
    sentry_init()
    events = capture_events()
    # fmt: off
    capture_message(u"föö".encode("latin1"))
    # fmt: on
    (event,) = events
    assert json.loads(json.dumps(event)) == event


def test_nan(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        # should_repr_strings=False
        set_tag("mynan", float("nan"))

        # should_repr_strings=True
        nan = float("nan")  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events
    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert frame["vars"]["nan"] == "nan"
    assert event["tags"]["mynan"] == "nan"


def test_cyclic_frame_vars(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        a = {}
        a["a"] = a
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events
    assert event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"] == {
        "a": ""
    }


def test_cyclic_data(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    with configure_scope() as scope:
        data = {}
        data["is_cyclic"] = data

        other_data = ""
        data["not_cyclic"] = other_data
        data["not_cyclic2"] = other_data
        scope.set_extra("foo", data)

    capture_message("hi")
    (event,) = events

    data = event["extra"]["foo"]
    assert data == {"not_cyclic2": "", "not_cyclic": "", "is_cyclic": ""}


def test_databag_depth_stripping(sentry_init, capture_events, benchmark):
    sentry_init()
    events = capture_events()

    value = ["a"]
    for _ in range(100000):
        value = [value]

    @benchmark
    def inner():
        del events[:]
        try:
            a = value  # noqa
            1 / 0
        except Exception:
            capture_exception()

        (event,) = events

        assert len(json.dumps(event)) < 10000


def test_databag_string_stripping(sentry_init, capture_events, benchmark):
    sentry_init()
    events = capture_events()

    @benchmark
    def inner():
        del events[:]
        try:
            a = "A" * 1000000  # noqa
            1 / 0
        except Exception:
            capture_exception()

        (event,) = events

        assert len(json.dumps(event)) < 10000


def test_databag_breadth_stripping(sentry_init, capture_events, benchmark):
    sentry_init()
    events = capture_events()

    @benchmark
    def inner():
        del events[:]
        try:
            a = ["a"] * 1000000  # noqa
            1 / 0
        except Exception:
            capture_exception()

        (event,) = events

        assert (
            len(event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"])
            == MAX_DATABAG_BREADTH
        )
        assert len(json.dumps(event)) < 10000


@pytest.mark.skipif(not HAS_CHAINED_EXCEPTIONS, reason="Only works on 3.3+")
def test_chained_exceptions(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        try:
            raise ValueError()
        except Exception:
            1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    e1, e2 = event["exception"]["values"]

    # This is the order all other SDKs send chained exceptions in. Including
    # Raven-Python.

    assert e1["type"] == "ValueError"
    assert e2["type"] == "ZeroDivisionError"


@pytest.mark.tests_internal_exceptions
def test_broken_mapping(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    class C(Mapping):
        def broken(self, *args, **kwargs):
            raise Exception("broken")

        __getitem__ = broken
        __setitem__ = broken
        __delitem__ = broken
        __iter__ = broken
        __len__ = broken

        def __repr__(self):
            return "broken"

    try:
        a = C()  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events
    assert (
        event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]
        == ""
    )


def test_mapping_sends_exception(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    class C(Mapping):
        def __iter__(self):
            try:
                1 / 0
            except ZeroDivisionError:
                capture_exception()
            yield "hi"

        def __len__(self):
            """List length"""
            return 1

        def __getitem__(self, ii):
            """Get a list item"""
            if ii == "hi":
                return "hi"

            raise KeyError()

    try:
        a = C()  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    assert event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"] == {
        "hi": "'hi'"
    }


def test_object_sends_exception(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    class C(object):
        def __repr__(self):
            try:
                1 / 0
            except ZeroDivisionError:
                capture_exception()
            return "hi, i am a repr"

    try:
        a = C()  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    assert (
        event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]
        == "hi, i am a repr"
    )


def test_errno_errors(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    class FooError(Exception):
        errno = 69

    capture_exception(FooError())

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["mechanism"]["meta"]["errno"]["number"] == 69


def test_non_string_variables(sentry_init, capture_events):
    """There is some extremely terrible code in the wild that
    inserts non-strings as variable names into `locals()`."""

    sentry_init()
    events = capture_events()

    try:
        locals()[42] = True
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    (frame,) = exception["stacktrace"]["frames"]
    assert frame["vars"]["42"] == "True"


def test_dict_changed_during_iteration(sentry_init, capture_events):
    """
    Some versions of Bottle modify the WSGI environment inside of this __repr__
    impl: https://github.com/bottlepy/bottle/blob/0.12.16/bottle.py#L1386

    See https://github.com/getsentry/sentry-python/pull/298 for discussion
    """
    sentry_init(send_default_pii=True)
    events = capture_events()

    class TooSmartClass(object):
        def __init__(self, environ):
            self.environ = environ

        def __repr__(self):
            if "my_representation" in self.environ:
                return self.environ["my_representation"]

            self.environ["my_representation"] = ""
            return self.environ["my_representation"]

    try:
        environ = {}
        environ["a"] = TooSmartClass(environ)
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events
    (exception,) = event["exception"]["values"]
    (frame,) = exception["stacktrace"]["frames"]
    assert frame["vars"]["environ"] == {"a": ""}


@pytest.mark.parametrize(
    "dsn",
    [
        "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
        "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
    ],
)
def test_init_string_types(dsn, sentry_init):
    # Allow unicode strings on Python 3 and both on Python 2 (due to
    # unicode_literals)
    #
    # Supporting bytes on Python 3 is not really wrong but probably would be
    # extra code
    sentry_init(dsn)
    assert (
        Hub.current.client.dsn
        == "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2"
    )


def test_sending_events_with_tracing():
    """
    Tests for calling the right transport method (capture_event vs
    capture_envelope) from the SDK client for different data types.
    """

    envelopes = []
    events = []

    class CustomTransport(Transport):
        def capture_envelope(self, envelope):
            envelopes.append(envelope)

        def capture_event(self, event):
            events.append(event)

    with Hub(Client(enable_tracing=True, transport=CustomTransport())):
        try:
            1 / 0
        except Exception:
            event_id = capture_exception()

        # Assert error events get passed in via capture_envelope
        assert not events
        envelope = envelopes.pop()
        (item,) = envelope.items
        assert item.data_category == "error"
        assert item.headers.get("type") == "event"
        assert item.get_event()["event_id"] == event_id

        with start_transaction(name="foo"):
            pass

        # Assert transactions get passed in via capture_envelope
        assert not events
        envelope = envelopes.pop()

        (item,) = envelope.items
        assert item.data_category == "transaction"
        assert item.headers.get("type") == "transaction"

    assert not envelopes
    assert not events


def test_sending_events_with_no_tracing():
    """
    Tests for calling the right transport method (capture_event vs
    capture_envelope) from the SDK client for different data types.
    """

    envelopes = []
    events = []

    class CustomTransport(Transport):
        def capture_envelope(self, envelope):
            envelopes.append(envelope)

        def capture_event(self, event):
            events.append(event)

    with Hub(Client(enable_tracing=False, transport=CustomTransport())):
        try:
            1 / 0
        except Exception:
            event_id = capture_exception()

        # Assert error events get passed in via capture_event
        assert not envelopes
        event = events.pop()

        assert event["event_id"] == event_id
        assert "type" not in event

        with start_transaction(name="foo"):
            pass

        # Assert transactions get passed in via capture_envelope
        assert not events
        assert not envelopes

    assert not envelopes
    assert not events


@pytest.mark.parametrize(
    "sdk_options, expected_breadcrumbs",
    [({}, DEFAULT_MAX_BREADCRUMBS), ({"max_breadcrumbs": 50}, 50)],
)
def test_max_breadcrumbs_option(
    sentry_init, capture_events, sdk_options, expected_breadcrumbs
):
    sentry_init(sdk_options)
    events = capture_events()

    for _ in range(1231):
        add_breadcrumb({"type": "sourdough"})

    capture_message("dogs are great")

    assert len(events[0]["breadcrumbs"]["values"]) == expected_breadcrumbs


def test_multiple_positional_args(sentry_init):
    with pytest.raises(TypeError) as exinfo:
        sentry_init(1, None)
    assert "Only single positional argument is expected" in str(exinfo.value)


@pytest.mark.parametrize(
    "sdk_options, expected_data_length",
    [
        ({}, DEFAULT_MAX_VALUE_LENGTH),
        ({"max_value_length": 1800}, 1800),
    ],
)
def test_max_value_length_option(
    sentry_init, capture_events, sdk_options, expected_data_length
):
    sentry_init(sdk_options)
    events = capture_events()

    capture_message("a" * 2000)

    assert len(events[0]["message"]) == expected_data_length


@pytest.mark.parametrize(
    "client_option,env_var_value,debug_output_expected",
    [
        (None, "", False),
        (None, "t", True),
        (None, "1", True),
        (None, "True", True),
        (None, "true", True),
        (None, "f", False),
        (None, "0", False),
        (None, "False", False),
        (None, "false", False),
        (None, "xxx", False),
        (True, "", True),
        (True, "t", True),
        (True, "1", True),
        (True, "True", True),
        (True, "true", True),
        (True, "f", True),
        (True, "0", True),
        (True, "False", True),
        (True, "false", True),
        (True, "xxx", True),
        (False, "", False),
        (False, "t", False),
        (False, "1", False),
        (False, "True", False),
        (False, "true", False),
        (False, "f", False),
        (False, "0", False),
        (False, "False", False),
        (False, "false", False),
        (False, "xxx", False),
    ],
)
@pytest.mark.tests_internal_exceptions
def test_debug_option(
    sentry_init,
    monkeypatch,
    caplog,
    client_option,
    env_var_value,
    debug_output_expected,
):
    monkeypatch.setenv("SENTRY_DEBUG", env_var_value)

    if client_option is None:
        sentry_init()
    else:
        sentry_init(debug=client_option)

    Hub.current._capture_internal_exception(
        (ValueError, ValueError("something is wrong"), None)
    )
    if debug_output_expected:
        assert "something is wrong" in caplog.text
    else:
        assert "something is wrong" not in caplog.text


class IssuesSamplerTestConfig:
    def __init__(
        self,
        expected_events,
        sampler_function=None,
        sample_rate=None,
        exception_to_raise=Exception,
    ):
        # type: (int, Optional[Callable[[Event], Union[float, bool]]], Optional[float], type[Exception]) -> None
        self.sampler_function_mock = (
            None
            if sampler_function is None
            else mock.MagicMock(side_effect=sampler_function)
        )
        self.expected_events = expected_events
        self.sample_rate = sample_rate
        self.exception_to_raise = exception_to_raise

    def init_sdk(self, sentry_init):
        # type: (Callable[[*Any], None]) -> None
        sentry_init(
            error_sampler=self.sampler_function_mock, sample_rate=self.sample_rate
        )

    def raise_exception(self):
        # type: () -> None
        raise self.exception_to_raise()


@mock.patch("sentry_sdk.client.random.random", return_value=0.618)
@pytest.mark.parametrize(
    "test_config",
    (
        # Baseline test with error_sampler only, both floats and bools
        IssuesSamplerTestConfig(sampler_function=lambda *_: 1.0, expected_events=1),
        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.7, expected_events=1),
        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.6, expected_events=0),
        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.0, expected_events=0),
        IssuesSamplerTestConfig(sampler_function=lambda *_: True, expected_events=1),
        IssuesSamplerTestConfig(sampler_function=lambda *_: False, expected_events=0),
        # Baseline test with sample_rate only
        IssuesSamplerTestConfig(sample_rate=1.0, expected_events=1),
        IssuesSamplerTestConfig(sample_rate=0.7, expected_events=1),
        IssuesSamplerTestConfig(sample_rate=0.6, expected_events=0),
        IssuesSamplerTestConfig(sample_rate=0.0, expected_events=0),
        # error_sampler takes precedence over sample_rate
        IssuesSamplerTestConfig(
            sampler_function=lambda *_: 1.0, sample_rate=0.0, expected_events=1
        ),
        IssuesSamplerTestConfig(
            sampler_function=lambda *_: 0.0, sample_rate=1.0, expected_events=0
        ),
        # Different sample rates based on exception, retrieved both from event and hint
        IssuesSamplerTestConfig(
            sampler_function=lambda event, _: {
                "ZeroDivisionError": 1.0,
                "AttributeError": 0.0,
            }[event["exception"]["values"][0]["type"]],
            exception_to_raise=ZeroDivisionError,
            expected_events=1,
        ),
        IssuesSamplerTestConfig(
            sampler_function=lambda event, _: {
                "ZeroDivisionError": 1.0,
                "AttributeError": 0.0,
            }[event["exception"]["values"][0]["type"]],
            exception_to_raise=AttributeError,
            expected_events=0,
        ),
        IssuesSamplerTestConfig(
            sampler_function=lambda _, hint: {
                ZeroDivisionError: 1.0,
                AttributeError: 0.0,
            }[hint["exc_info"][0]],
            exception_to_raise=ZeroDivisionError,
            expected_events=1,
        ),
        IssuesSamplerTestConfig(
            sampler_function=lambda _, hint: {
                ZeroDivisionError: 1.0,
                AttributeError: 0.0,
            }[hint["exc_info"][0]],
            exception_to_raise=AttributeError,
            expected_events=0,
        ),
        # If sampler returns invalid value, we should still send the event
        IssuesSamplerTestConfig(
            sampler_function=lambda *_: "This is an invalid return value for the sampler",
            expected_events=1,
        ),
    ),
)
def test_error_sampler(_, sentry_init, capture_events, test_config):
    test_config.init_sdk(sentry_init)

    events = capture_events()

    try:
        test_config.raise_exception()
    except Exception:
        capture_exception()

    assert len(events) == test_config.expected_events

    if test_config.sampler_function_mock is not None:
        assert test_config.sampler_function_mock.call_count == 1

        # Ensure two arguments (the event and hint) were passed to the sampler function
        assert len(test_config.sampler_function_mock.call_args[0]) == 2
sentry-python-1.39.2/tests/test_conftest.py000066400000000000000000000065311454744723200210720ustar00rootroot00000000000000import pytest


@pytest.mark.parametrize(
    "test_string, expected_result",
    [
        # type matches
        ("dogs are great!", True),  # full containment - beginning
        ("go, dogs, go!", True),  # full containment - middle
        ("I like dogs", True),  # full containment - end
        ("dogs", True),  # equality
        ("", False),  # reverse containment
        ("dog", False),  # reverse containment
        ("good dog!", False),  # partial overlap
        ("cats", False),  # no overlap
        # type mismatches
        (1231, False),
        (11.21, False),
        ([], False),
        ({}, False),
        (True, False),
    ],
)
def test_string_containing(
    test_string, expected_result, StringContaining  # noqa: N803
):
    assert (test_string == StringContaining("dogs")) is expected_result


@pytest.mark.parametrize(
    "test_dict, expected_result",
    [
        # type matches
        ({"dogs": "yes", "cats": "maybe", "spiders": "nope"}, True),  # full containment
        ({"dogs": "yes", "cats": "maybe"}, True),  # equality
        ({}, False),  # reverse containment
        ({"dogs": "yes"}, False),  # reverse containment
        ({"dogs": "yes", "birds": "only outside"}, False),  # partial overlap
        ({"coyotes": "from afar"}, False),  # no overlap
        # type mismatches
        ('{"dogs": "yes", "cats": "maybe"}', False),
        (1231, False),
        (11.21, False),
        ([], False),
        (True, False),
    ],
)
def test_dictionary_containing(
    test_dict, expected_result, DictionaryContaining  # noqa: N803
):
    assert (
        test_dict == DictionaryContaining({"dogs": "yes", "cats": "maybe"})
    ) is expected_result


class Animal(object):  # noqa: B903
    def __init__(self, name=None, age=None, description=None):
        self.name = name
        self.age = age
        self.description = description


class Dog(Animal):
    pass


class Cat(Animal):
    pass


@pytest.mark.parametrize(
    "test_obj, type_and_attrs_result, type_only_result, attrs_only_result",
    [
        # type matches
        (Dog("Maisey", 7, "silly"), True, True, True),  # full attr containment
        (Dog("Maisey", 7), True, True, True),  # type and attr equality
        (Dog(), False, True, False),  # reverse attr containment
        (Dog("Maisey"), False, True, False),  # reverse attr containment
        (Dog("Charlie", 7, "goofy"), False, True, False),  # partial attr overlap
        (Dog("Bodhi", 6, "floppy"), False, True, False),  # no attr overlap
        # type mismatches
        (Cat("Maisey", 7), False, False, True),  # attr equality
        (Cat("Piper", 1, "doglike"), False, False, False),
        ("Good girl, Maisey", False, False, False),
        ({"name": "Maisey", "age": 7}, False, False, False),
        (1231, False, False, False),
        (11.21, False, False, False),
        ([], False, False, False),
        (True, False, False, False),
    ],
)
def test_object_described_by(
    test_obj,
    type_and_attrs_result,
    type_only_result,
    attrs_only_result,
    ObjectDescribedBy,  # noqa: N803
):
    assert (
        test_obj == ObjectDescribedBy(type=Dog, attrs={"name": "Maisey", "age": 7})
    ) is type_and_attrs_result

    assert (test_obj == ObjectDescribedBy(type=Dog)) is type_only_result

    assert (
        test_obj == ObjectDescribedBy(attrs={"name": "Maisey", "age": 7})
    ) is attrs_only_result
sentry-python-1.39.2/tests/test_crons.py000066400000000000000000000176731454744723200204020ustar00rootroot00000000000000import pytest
import uuid

import sentry_sdk
from sentry_sdk.crons import capture_checkin

from sentry_sdk import Hub, configure_scope, set_level

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


@sentry_sdk.monitor(monitor_slug="abc123")
def _hello_world(name):
    return "Hello, {}".format(name)


@sentry_sdk.monitor(monitor_slug="def456")
def _break_world(name):
    1 / 0
    return "Hello, {}".format(name)


def _hello_world_contextmanager(name):
    with sentry_sdk.monitor(monitor_slug="abc123"):
        return "Hello, {}".format(name)


def _break_world_contextmanager(name):
    with sentry_sdk.monitor(monitor_slug="def456"):
        1 / 0
        return "Hello, {}".format(name)


def test_decorator(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checking:
        result = _hello_world("Grace")
        assert result == "Hello, Grace"

        # Check for initial checkin
        fake_capture_checking.assert_has_calls(
            [
                mock.call(monitor_slug="abc123", status="in_progress"),
            ]
        )

        # Check for final checkin
        assert fake_capture_checking.call_args[1]["monitor_slug"] == "abc123"
        assert fake_capture_checking.call_args[1]["status"] == "ok"
        assert fake_capture_checking.call_args[1]["duration"]
        assert fake_capture_checking.call_args[1]["check_in_id"]


def test_decorator_error(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checking:
        with pytest.raises(ZeroDivisionError):
            result = _break_world("Grace")

        assert "result" not in locals()

        # Check for initial checkin
        fake_capture_checking.assert_has_calls(
            [
                mock.call(monitor_slug="def456", status="in_progress"),
            ]
        )

        # Check for final checkin
        assert fake_capture_checking.call_args[1]["monitor_slug"] == "def456"
        assert fake_capture_checking.call_args[1]["status"] == "error"
        assert fake_capture_checking.call_args[1]["duration"]
        assert fake_capture_checking.call_args[1]["check_in_id"]


def test_contextmanager(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checking:
        result = _hello_world_contextmanager("Grace")
        assert result == "Hello, Grace"

        # Check for initial checkin
        fake_capture_checking.assert_has_calls(
            [
                mock.call(monitor_slug="abc123", status="in_progress"),
            ]
        )

        # Check for final checkin
        assert fake_capture_checking.call_args[1]["monitor_slug"] == "abc123"
        assert fake_capture_checking.call_args[1]["status"] == "ok"
        assert fake_capture_checking.call_args[1]["duration"]
        assert fake_capture_checking.call_args[1]["check_in_id"]


def test_contextmanager_error(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checking:
        with pytest.raises(ZeroDivisionError):
            result = _break_world_contextmanager("Grace")

        assert "result" not in locals()

        # Check for initial checkin
        fake_capture_checking.assert_has_calls(
            [
                mock.call(monitor_slug="def456", status="in_progress"),
            ]
        )

        # Check for final checkin
        assert fake_capture_checking.call_args[1]["monitor_slug"] == "def456"
        assert fake_capture_checking.call_args[1]["status"] == "error"
        assert fake_capture_checking.call_args[1]["duration"]
        assert fake_capture_checking.call_args[1]["check_in_id"]


def test_capture_checkin_simple(sentry_init):
    sentry_init()

    check_in_id = capture_checkin(
        monitor_slug="abc123",
        check_in_id="112233",
        status=None,
        duration=None,
    )
    assert check_in_id == "112233"


def test_sample_rate_doesnt_affect_crons(sentry_init, capture_envelopes):
    sentry_init(sample_rate=0)
    envelopes = capture_envelopes()

    capture_checkin(check_in_id="112233")

    assert len(envelopes) == 1

    check_in = envelopes[0].items[0].payload.json
    assert check_in["check_in_id"] == "112233"


def test_capture_checkin_new_id(sentry_init):
    sentry_init()

    with mock.patch("uuid.uuid4") as mock_uuid:
        mock_uuid.return_value = uuid.UUID("a8098c1a-f86e-11da-bd1a-00112444be1e")
        check_in_id = capture_checkin(
            monitor_slug="abc123",
            check_in_id=None,
            status=None,
            duration=None,
        )

        assert check_in_id == "a8098c1af86e11dabd1a00112444be1e"


def test_end_to_end(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    capture_checkin(
        monitor_slug="abc123",
        check_in_id="112233",
        duration=123,
        status="ok",
    )

    check_in = envelopes[0].items[0].payload.json

    # Check for final checkin
    assert check_in["check_in_id"] == "112233"
    assert check_in["monitor_slug"] == "abc123"
    assert check_in["status"] == "ok"
    assert check_in["duration"] == 123


def test_monitor_config(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    monitor_config = {
        "schedule": {"type": "crontab", "value": "0 0 * * *"},
    }

    capture_checkin(monitor_slug="abc123", monitor_config=monitor_config)
    check_in = envelopes[0].items[0].payload.json

    # Check for final checkin
    assert check_in["monitor_slug"] == "abc123"
    assert check_in["monitor_config"] == monitor_config

    # Without passing a monitor_config the field is not in the checkin
    capture_checkin(monitor_slug="abc123")
    check_in = envelopes[1].items[0].payload.json

    assert check_in["monitor_slug"] == "abc123"
    assert "monitor_config" not in check_in


def test_capture_checkin_sdk_not_initialized():
    # Tests that the capture_checkin does not raise an error when Sentry SDK is not initialized.
    # sentry_init() is intentionally omitted.
    check_in_id = capture_checkin(
        monitor_slug="abc123",
        check_in_id="112233",
        status=None,
        duration=None,
    )
    assert check_in_id == "112233"


def test_scope_data_in_checkin(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    valid_keys = [
        # Mandatory event keys
        "type",
        "event_id",
        "timestamp",
        "platform",
        # Optional event keys
        "release",
        "environment",
        # Mandatory check-in specific keys
        "check_in_id",
        "monitor_slug",
        "status",
        # Optional check-in specific keys
        "duration",
        "monitor_config",
        "contexts",  # an event processor adds this
        # TODO: These fields need to be checked if valid for checkin:
        "_meta",
        "tags",
        "extra",  # an event processor adds this
        "modules",
        "server_name",
        "sdk",
    ]

    hub = Hub.current
    with configure_scope() as scope:
        # Add some data to the scope
        set_level("warning")
        hub.add_breadcrumb(message="test breadcrumb")
        scope.set_tag("test_tag", "test_value")
        scope.set_extra("test_extra", "test_value")
        scope.set_context("test_context", {"test_key": "test_value"})

        capture_checkin(
            monitor_slug="abc123",
            check_in_id="112233",
            status="ok",
            duration=123,
        )

        (envelope,) = envelopes
        check_in_event = envelope.items[0].payload.json

        invalid_keys = []
        for key in check_in_event.keys():
            if key not in valid_keys:
                invalid_keys.append(key)

        assert len(invalid_keys) == 0, "Unexpected keys found in checkin: {}".format(
            invalid_keys
        )
sentry-python-1.39.2/tests/test_envelope.py000066400000000000000000000177111454744723200210640ustar00rootroot00000000000000from sentry_sdk.envelope import Envelope
from sentry_sdk.session import Session
from sentry_sdk import capture_event
import sentry_sdk.client


def generate_transaction_item():
    return {
        "event_id": "15210411201320122115110420122013",
        "type": "transaction",
        "transaction": "/interactions/other-dogs/new-dog",
        "start_timestamp": 1353568872.11122131,
        "timestamp": 1356942672.09040815,
        "contexts": {
            "trace": {
                "trace_id": "12312012123120121231201212312012",
                "span_id": "0415201309082013",
                "parent_span_id": None,
                "description": "",
                "op": "greeting.sniff",
                "dynamic_sampling_context": {
                    "trace_id": "12312012123120121231201212312012",
                    "sample_rate": "1.0",
                    "environment": "dogpark",
                    "release": "off.leash.park",
                    "public_key": "dogsarebadatkeepingsecrets",
                    "user_segment": "bigs",
                    "transaction": "/interactions/other-dogs/new-dog",
                },
            }
        },
        "spans": [
            {
                "description": "",
                "op": "greeting.sniff",
                "parent_span_id": None,
                "span_id": "0415201309082013",
                "start_timestamp": 1353568872.11122131,
                "timestamp": 1356942672.09040815,
                "trace_id": "12312012123120121231201212312012",
            }
        ],
    }


def test_add_and_get_basic_event():
    envelope = Envelope()

    expected = {"message": "Hello, World!"}
    envelope.add_event(expected)

    assert envelope.get_event() == {"message": "Hello, World!"}


def test_add_and_get_transaction_event():
    envelope = Envelope()

    transaction_item = generate_transaction_item()
    transaction_item.update({"event_id": "a" * 32})
    envelope.add_transaction(transaction_item)

    # typically it should not be possible to be able to add a second transaction;
    # but we do it anyways
    another_transaction_item = generate_transaction_item()
    envelope.add_transaction(another_transaction_item)

    # should only fetch the first inserted transaction event
    assert envelope.get_transaction_event() == transaction_item


def test_add_and_get_session():
    envelope = Envelope()

    expected = Session()
    envelope.add_session(expected)

    for item in envelope:
        if item.type == "session":
            assert item.payload.json == expected.to_json()


def test_envelope_headers(sentry_init, capture_envelopes, monkeypatch):
    monkeypatch.setattr(
        sentry_sdk.client,
        "format_timestamp",
        lambda x: "2012-11-21T12:31:12.415908Z",
    )

    sentry_init(
        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
        traces_sample_rate=1.0,
    )
    envelopes = capture_envelopes()

    capture_event(generate_transaction_item())

    assert len(envelopes) == 1

    assert envelopes[0].headers == {
        "event_id": "15210411201320122115110420122013",
        "sent_at": "2012-11-21T12:31:12.415908Z",
        "trace": {
            "trace_id": "12312012123120121231201212312012",
            "sample_rate": "1.0",
            "environment": "dogpark",
            "release": "off.leash.park",
            "public_key": "dogsarebadatkeepingsecrets",
            "user_segment": "bigs",
            "transaction": "/interactions/other-dogs/new-dog",
        },
    }


def test_envelope_with_sized_items():
    """
    Tests that it successfully parses envelopes with
    the item size specified in the header
    """
    envelope_raw = (
        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
        b'{"type":"type1","length":4 }\n1234\n'
        b'{"type":"type2","length":4 }\nabcd\n'
        b'{"type":"type3","length":0}\n\n'
        b'{"type":"type4","length":4 }\nab12\n'
    )
    envelope_raw_eof_terminated = envelope_raw[:-1]

    for envelope in (envelope_raw, envelope_raw_eof_terminated):
        actual = Envelope.deserialize(envelope)

        items = [item for item in actual]

        assert len(items) == 4

        assert items[0].type == "type1"
        assert items[0].get_bytes() == b"1234"

        assert items[1].type == "type2"
        assert items[1].get_bytes() == b"abcd"

        assert items[2].type == "type3"
        assert items[2].get_bytes() == b""

        assert items[3].type == "type4"
        assert items[3].get_bytes() == b"ab12"

        assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc"


def test_envelope_with_implicitly_sized_items():
    """
    Tests that it successfully parses envelopes with
    the item size not specified in the header
    """
    envelope_raw = (
        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
        b'{"type":"type1"}\n1234\n'
        b'{"type":"type2"}\nabcd\n'
        b'{"type":"type3"}\n\n'
        b'{"type":"type4"}\nab12\n'
    )
    envelope_raw_eof_terminated = envelope_raw[:-1]

    for envelope in (envelope_raw, envelope_raw_eof_terminated):
        actual = Envelope.deserialize(envelope)
        assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc"

        items = [item for item in actual]

        assert len(items) == 4

        assert items[0].type == "type1"
        assert items[0].get_bytes() == b"1234"

        assert items[1].type == "type2"
        assert items[1].get_bytes() == b"abcd"

        assert items[2].type == "type3"
        assert items[2].get_bytes() == b""

        assert items[3].type == "type4"
        assert items[3].get_bytes() == b"ab12"


def test_envelope_with_two_attachments():
    """
    Test that items are correctly parsed in an envelope with to size specified items
    """
    two_attachments = (
        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc","dsn":"https://e12d836b15bb49d7bbf99e64295d995b:@sentry.io/42"}\n'
        + b'{"type":"attachment","length":10,"content_type":"text/plain","filename":"hello.txt"}\n'
        + b"\xef\xbb\xbfHello\r\n\n"
        + b'{"type":"event","length":41,"content_type":"application/json","filename":"application.log"}\n'
        + b'{"message":"hello world","level":"error"}\n'
    )
    two_attachments_eof_terminated = two_attachments[
        :-1
    ]  # last \n is optional, without it should still be a valid envelope

    for envelope_raw in (two_attachments, two_attachments_eof_terminated):
        actual = Envelope.deserialize(envelope_raw)
        items = [item for item in actual]

        assert len(items) == 2
        assert items[0].get_bytes() == b"\xef\xbb\xbfHello\r\n"
        assert items[1].payload.json == {"message": "hello world", "level": "error"}


def test_envelope_with_empty_attachments():
    """
    Test that items are correctly parsed in an envelope with two 0 length items (with size specified in the header
    """
    two_empty_attachments = (
        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
        + b'{"type":"attachment","length":0}\n\n'
        + b'{"type":"attachment","length":0}\n\n'
    )

    two_empty_attachments_eof_terminated = two_empty_attachments[
        :-1
    ]  # last \n is optional, without it should still be a valid envelope

    for envelope_raw in (two_empty_attachments, two_empty_attachments_eof_terminated):
        actual = Envelope.deserialize(envelope_raw)
        items = [item for item in actual]

        assert len(items) == 2
        assert items[0].get_bytes() == b""
        assert items[1].get_bytes() == b""


def test_envelope_without_headers():
    """
    Test that an envelope without headers is parsed successfully
    """
    envelope_without_headers = (
        b"{}\n" + b'{"type":"session"}\n' + b'{"started": "2020-02-07T14:16:00Z"}'
    )
    actual = Envelope.deserialize(envelope_without_headers)
    items = [item for item in actual]

    assert len(items) == 1
    assert items[0].payload.get_bytes() == b'{"started": "2020-02-07T14:16:00Z"}'
sentry-python-1.39.2/tests/test_exceptiongroup.py000066400000000000000000000206021454744723200223130ustar00rootroot00000000000000import sys
import pytest

from sentry_sdk.utils import event_from_exception


try:
    # Python 3.11
    from builtins import ExceptionGroup  # type: ignore
except ImportError:
    # Python 3.10 and below
    ExceptionGroup = None


minimum_python_311 = pytest.mark.skipif(
    sys.version_info < (3, 11), reason="ExceptionGroup tests need Python >= 3.11"
)


@minimum_python_311
def test_exceptiongroup():
    exception_group = None

    try:
        try:
            raise RuntimeError("something")
        except RuntimeError:
            raise ExceptionGroup(
                "nested",
                [
                    ValueError(654),
                    ExceptionGroup(
                        "imports",
                        [
                            ImportError("no_such_module"),
                            ModuleNotFoundError("another_module"),
                        ],
                    ),
                    TypeError("int"),
                ],
            )
    except ExceptionGroup as e:
        exception_group = e

    (event, _) = event_from_exception(
        exception_group,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    values = event["exception"]["values"]

    # For this test the stacktrace and the module is not important
    for x in values:
        if "stacktrace" in x:
            del x["stacktrace"]
        if "module" in x:
            del x["module"]

    expected_values = [
        {
            "mechanism": {
                "exception_id": 6,
                "handled": False,
                "parent_id": 0,
                "source": "exceptions[2]",
                "type": "chained",
            },
            "type": "TypeError",
            "value": "int",
        },
        {
            "mechanism": {
                "exception_id": 5,
                "handled": False,
                "parent_id": 3,
                "source": "exceptions[1]",
                "type": "chained",
            },
            "type": "ModuleNotFoundError",
            "value": "another_module",
        },
        {
            "mechanism": {
                "exception_id": 4,
                "handled": False,
                "parent_id": 3,
                "source": "exceptions[0]",
                "type": "chained",
            },
            "type": "ImportError",
            "value": "no_such_module",
        },
        {
            "mechanism": {
                "exception_id": 3,
                "handled": False,
                "is_exception_group": True,
                "parent_id": 0,
                "source": "exceptions[1]",
                "type": "chained",
            },
            "type": "ExceptionGroup",
            "value": "imports",
        },
        {
            "mechanism": {
                "exception_id": 2,
                "handled": False,
                "parent_id": 0,
                "source": "exceptions[0]",
                "type": "chained",
            },
            "type": "ValueError",
            "value": "654",
        },
        {
            "mechanism": {
                "exception_id": 1,
                "handled": False,
                "parent_id": 0,
                "source": "__context__",
                "type": "chained",
            },
            "type": "RuntimeError",
            "value": "something",
        },
        {
            "mechanism": {
                "exception_id": 0,
                "handled": False,
                "is_exception_group": True,
                "type": "test_suite",
            },
            "type": "ExceptionGroup",
            "value": "nested",
        },
    ]

    assert values == expected_values


@minimum_python_311
def test_exceptiongroup_simple():
    exception_group = None

    try:
        raise ExceptionGroup(
            "simple",
            [
                RuntimeError("something strange's going on"),
            ],
        )
    except ExceptionGroup as e:
        exception_group = e

    (event, _) = event_from_exception(
        exception_group,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    exception_values = event["exception"]["values"]

    assert len(exception_values) == 2

    assert exception_values[0]["type"] == "RuntimeError"
    assert exception_values[0]["value"] == "something strange's going on"
    assert exception_values[0]["mechanism"] == {
        "type": "chained",
        "handled": False,
        "exception_id": 1,
        "source": "exceptions[0]",
        "parent_id": 0,
    }

    assert exception_values[1]["type"] == "ExceptionGroup"
    assert exception_values[1]["value"] == "simple"
    assert exception_values[1]["mechanism"] == {
        "type": "test_suite",
        "handled": False,
        "exception_id": 0,
        "is_exception_group": True,
    }
    frame = exception_values[1]["stacktrace"]["frames"][0]
    assert frame["module"] == "tests.test_exceptiongroup"
    assert frame["context_line"] == "        raise ExceptionGroup("


@minimum_python_311
def test_exception_chain_cause():
    exception_chain_cause = ValueError("Exception with cause")
    exception_chain_cause.__context__ = TypeError("Exception in __context__")
    exception_chain_cause.__cause__ = TypeError(
        "Exception in __cause__"
    )  # this implicitly sets exception_chain_cause.__suppress_context__=True

    (event, _) = event_from_exception(
        exception_chain_cause,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    expected_exception_values = [
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "TypeError",
            "value": "Exception in __cause__",
        },
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "ValueError",
            "value": "Exception with cause",
        },
    ]

    exception_values = event["exception"]["values"]
    assert exception_values == expected_exception_values


@minimum_python_311
def test_exception_chain_context():
    exception_chain_context = ValueError("Exception with context")
    exception_chain_context.__context__ = TypeError("Exception in __context__")

    (event, _) = event_from_exception(
        exception_chain_context,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    expected_exception_values = [
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "TypeError",
            "value": "Exception in __context__",
        },
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "ValueError",
            "value": "Exception with context",
        },
    ]

    exception_values = event["exception"]["values"]
    assert exception_values == expected_exception_values


@minimum_python_311
def test_simple_exception():
    simple_excpetion = ValueError("A simple exception")

    (event, _) = event_from_exception(
        simple_excpetion,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    expected_exception_values = [
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "ValueError",
            "value": "A simple exception",
        },
    ]

    exception_values = event["exception"]["values"]
    assert exception_values == expected_exception_values
sentry-python-1.39.2/tests/test_lru_cache.py000066400000000000000000000014201454744723200211620ustar00rootroot00000000000000import pytest

from sentry_sdk._lru_cache import LRUCache


@pytest.mark.parametrize("max_size", [-10, -1, 0])
def test_illegal_size(max_size):
    with pytest.raises(AssertionError):
        LRUCache(max_size=max_size)


def test_simple_set_get():
    cache = LRUCache(1)
    assert cache.get(1) is None
    cache.set(1, 1)
    assert cache.get(1) == 1


def test_overwrite():
    cache = LRUCache(1)
    assert cache.get(1) is None
    cache.set(1, 1)
    assert cache.get(1) == 1
    cache.set(1, 2)
    assert cache.get(1) == 2


def test_cache_eviction():
    cache = LRUCache(3)
    cache.set(1, 1)
    cache.set(2, 2)
    cache.set(3, 3)
    assert cache.get(1) == 1
    assert cache.get(2) == 2
    cache.set(4, 4)
    assert cache.get(3) is None
    assert cache.get(4) == 4
sentry-python-1.39.2/tests/test_metrics.py000066400000000000000000000673721454744723200207250ustar00rootroot00000000000000# coding: utf-8

import sys
import time
import linecache

from sentry_sdk import Hub, metrics, push_scope, start_transaction
from sentry_sdk.tracing import TRANSACTION_SOURCE_ROUTE
from sentry_sdk.envelope import parse_json

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


def parse_metrics(bytes):
    rv = []
    for line in bytes.splitlines():
        pieces = line.decode("utf-8").split("|")
        payload = pieces[0].split(":")
        name = payload[0]
        values = payload[1:]
        ty = pieces[1]
        ts = None
        tags = {}
        for piece in pieces[2:]:
            if piece[0] == "#":
                for pair in piece[1:].split(","):
                    k, v = pair.split(":", 1)
                    old = tags.get(k)
                    if old is not None:
                        if isinstance(old, list):
                            old.append(v)
                        else:
                            tags[k] = [old, v]
                    else:
                        tags[k] = v
            elif piece[0] == "T":
                ts = int(piece[1:])
            else:
                raise ValueError("unknown piece %r" % (piece,))
        rv.append((ts, name, ty, values, tags))
    rv.sort(key=lambda x: (x[0], x[1], tuple(sorted(tags.items()))))
    return rv


def test_incr(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.incr("foobar", 1.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts)
    metrics.incr("foobar", 2.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts)
    Hub.current.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "foobar@none"
    assert m[0][2] == "c"
    assert m[0][3] == ["3.0"]
    assert m[0][4] == {
        "blub": "blah",
        "foo": "bar",
        "release": "fun-release",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    assert parse_json(meta_item.payload.get_bytes()) == {
        "timestamp": mock.ANY,
        "mapping": {
            "c:foobar@none": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }


def test_timing(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):
        time.sleep(0.1)
    Hub.current.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "whatever@second"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 1
    assert float(m[0][3][0]) >= 0.1
    assert m[0][4] == {
        "blub": "blah",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    json = parse_json(meta_item.payload.get_bytes())
    assert json == {
        "timestamp": mock.ANY,
        "mapping": {
            "d:whatever@second": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }

    loc = json["mapping"]["d:whatever@second"][0]
    line = linecache.getline(loc["abs_path"], loc["lineno"])
    assert (
        line.strip()
        == 'with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):'
    )


def test_timing_decorator(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    envelopes = capture_envelopes()

    @metrics.timing("whatever-1", tags={"x": "y"})
    def amazing():
        time.sleep(0.1)
        return 42

    @metrics.timing("whatever-2", tags={"x": "y"}, unit="nanosecond")
    def amazing_nano():
        time.sleep(0.01)
        return 23

    assert amazing() == 42
    assert amazing_nano() == 23
    Hub.current.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 2
    assert m[0][1] == "whatever-1@second"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 1
    assert float(m[0][3][0]) >= 0.1
    assert m[0][4] == {
        "x": "y",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[1][1] == "whatever-2@nanosecond"
    assert m[1][2] == "d"
    assert len(m[1][3]) == 1
    assert float(m[1][3][0]) >= 10000000.0
    assert m[1][4] == {
        "x": "y",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    json = parse_json(meta_item.payload.get_bytes())
    assert json == {
        "timestamp": mock.ANY,
        "mapping": {
            "d:whatever-1@second": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ],
            "d:whatever-2@nanosecond": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ],
        },
    }

    # XXX: this is not the best location.  It would probably be better to
    # report the location in the function, however that is quite a bit
    # tricker to do since we report from outside the function so we really
    # only see the callsite.
    loc = json["mapping"]["d:whatever-1@second"][0]
    line = linecache.getline(loc["abs_path"], loc["lineno"])
    assert line.strip() == "assert amazing() == 42"


def test_timing_basic(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.timing("timing", 1.0, tags={"a": "b"}, timestamp=ts)
    metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.timing("timing", 3.0, tags={"a": "b"}, timestamp=ts)
    Hub.current.flush()

    (envelope,) = envelopes
    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "timing@second"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 4
    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
    assert m[0][4] == {
        "a": "b",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    assert parse_json(meta_item.payload.get_bytes()) == {
        "timestamp": mock.ANY,
        "mapping": {
            "d:timing@second": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }


def test_distribution(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts)
    Hub.current.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "dist@none"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 4
    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
    assert m[0][4] == {
        "a": "b",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    json = parse_json(meta_item.payload.get_bytes())
    assert json == {
        "timestamp": mock.ANY,
        "mapping": {
            "d:dist@none": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }

    loc = json["mapping"]["d:dist@none"][0]
    line = linecache.getline(loc["abs_path"], loc["lineno"])
    assert (
        line.strip()
        == 'metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)'
    )


def test_set(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.set("my-set", "peter", tags={"magic": "puff"}, timestamp=ts)
    metrics.set("my-set", "paul", tags={"magic": "puff"}, timestamp=ts)
    metrics.set("my-set", "mary", tags={"magic": "puff"}, timestamp=ts)
    Hub.current.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "my-set@none"
    assert m[0][2] == "s"
    assert len(m[0][3]) == 3
    assert sorted(map(int, m[0][3])) == [354582103, 2513273657, 3329318813]
    assert m[0][4] == {
        "magic": "puff",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    assert parse_json(meta_item.payload.get_bytes()) == {
        "timestamp": mock.ANY,
        "mapping": {
            "s:my-set@none": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }


def test_gauge(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts)
    metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts)
    metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts)
    Hub.current.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "my-gauge@none"
    assert m[0][2] == "g"
    assert len(m[0][3]) == 5
    assert list(map(float, m[0][3])) == [30.0, 10.0, 30.0, 60.0, 3.0]
    assert m[0][4] == {
        "x": "y",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }


def test_multiple(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts)
    metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts)
    metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts)
    for _ in range(10):
        metrics.incr("counter-1", 1.0, timestamp=ts)
    metrics.incr("counter-2", 1.0, timestamp=ts)

    Hub.current.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 3

    assert m[0][1] == "counter-1@none"
    assert m[0][2] == "c"
    assert list(map(float, m[0][3])) == [10.0]
    assert m[0][4] == {
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[1][1] == "counter-2@none"
    assert m[1][2] == "c"
    assert list(map(float, m[1][3])) == [1.0]
    assert m[1][4] == {
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[2][1] == "my-gauge@none"
    assert m[2][2] == "g"
    assert len(m[2][3]) == 5
    assert list(map(float, m[2][3])) == [30.0, 10.0, 30.0, 60.0, 3.0]
    assert m[2][4] == {
        "x": "y",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }


def test_transaction_name(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    with push_scope() as scope:
        scope.set_transaction_name("/user/{user_id}", source="route")
        metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)
        metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
        metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
        metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts)

    Hub.current.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "dist@none"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 4
    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
    assert m[0][4] == {
        "a": "b",
        "transaction": "/user/{user_id}",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }


def test_metric_summaries(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        enable_tracing=True,
        _experiments={"enable_metrics": True, "metrics_summary_sample_rate": 1.0},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    with start_transaction(
        op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE
    ) as transaction:
        metrics.incr("root-counter", timestamp=ts)
        with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts):
            for x in range(10):
                metrics.distribution("my-dist", float(x), timestamp=ts)

    Hub.current.flush()

    (transaction, envelope) = envelopes

    # Metrics Emission
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 3

    assert m[0][1] == "my-dist@none"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 10
    assert sorted(m[0][3]) == list(map(str, map(float, range(10))))
    assert m[0][4] == {
        "transaction": "/foo",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[1][1] == "my-timer-metric@second"
    assert m[1][2] == "d"
    assert len(m[1][3]) == 1
    assert m[1][4] == {
        "a": "b",
        "transaction": "/foo",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[2][1] == "root-counter@none"
    assert m[2][2] == "c"
    assert m[2][3] == ["1.0"]
    assert m[2][4] == {
        "transaction": "/foo",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    # Measurement Attachment
    t = transaction.items[0].get_transaction_event()

    assert t["_metrics_summary"] == {
        "c:root-counter@none": [
            {
                "count": 1,
                "min": 1.0,
                "max": 1.0,
                "sum": 1.0,
                "tags": {
                    "transaction": "/foo",
                    "release": "fun-release@1.0.0",
                    "environment": "not-fun-env",
                },
            }
        ]
    }

    assert t["spans"][0]["_metrics_summary"]["d:my-dist@none"] == [
        {
            "count": 10,
            "min": 0.0,
            "max": 9.0,
            "sum": 45.0,
            "tags": {
                "environment": "not-fun-env",
                "release": "fun-release@1.0.0",
                "transaction": "/foo",
            },
        }
    ]

    assert t["spans"][0]["tags"] == {"a": "b"}
    (timer,) = t["spans"][0]["_metrics_summary"]["d:my-timer-metric@second"]
    assert timer["count"] == 1
    assert timer["max"] == timer["min"] == timer["sum"]
    assert timer["sum"] > 0
    assert timer["tags"] == {
        "a": "b",
        "environment": "not-fun-env",
        "release": "fun-release@1.0.0",
        "transaction": "/foo",
    }


def test_metrics_summary_disabled(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        enable_tracing=True,
        _experiments={"enable_metrics": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    with start_transaction(
        op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE
    ) as transaction:
        with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts):
            pass

    Hub.current.flush()

    (transaction, envelope) = envelopes

    # Metrics Emission
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "my-timer-metric@second"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 1
    assert m[0][4] == {
        "a": "b",
        "transaction": "/foo",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    # Measurement Attachment
    t = transaction.items[0].get_transaction_event()
    assert "_metrics_summary" not in t
    assert "_metrics_summary" not in t["spans"][0]


def test_metrics_summary_filtered(sentry_init, capture_envelopes):
    def should_summarize_metric(key, tags):
        return key == "foo"

    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        enable_tracing=True,
        _experiments={
            "enable_metrics": True,
            "metrics_summary_sample_rate": 1.0,
            "should_summarize_metric": should_summarize_metric,
        },
    )
    ts = time.time()
    envelopes = capture_envelopes()

    with start_transaction(
        op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE
    ) as transaction:
        metrics.timing("foo", value=3.0, tags={"a": "b"}, timestamp=ts)
        metrics.timing("foo", value=2.0, tags={"b": "c"}, timestamp=ts)
        metrics.timing("bar", value=1.0, tags={"a": "b"}, timestamp=ts)

    Hub.current.flush()

    (transaction, envelope) = envelopes

    # Metrics Emission
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 3
    assert m[0][1] == "bar@second"
    assert m[1][1] == "foo@second"
    assert m[2][1] == "foo@second"

    # Measurement Attachment
    t = transaction.items[0].get_transaction_event()["_metrics_summary"]
    assert len(t["d:foo@second"]) == 2
    assert {
        "tags": {
            "a": "b",
            "environment": "not-fun-env",
            "release": "fun-release@1.0.0",
            "transaction": "/foo",
        },
        "min": 3.0,
        "max": 3.0,
        "count": 1,
        "sum": 3.0,
    } in t["d:foo@second"]
    assert {
        "tags": {
            "b": "c",
            "environment": "not-fun-env",
            "release": "fun-release@1.0.0",
            "transaction": "/foo",
        },
        "min": 2.0,
        "max": 2.0,
        "count": 1,
        "sum": 2.0,
    } in t["d:foo@second"]


def test_tag_normalization(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    # fmt: off
    metrics.distribution("a", 1.0, tags={"foo-bar": "%$foo"}, timestamp=ts)
    metrics.distribution("b", 1.0, tags={"foo$$$bar": "blah{}"}, timestamp=ts)
    metrics.distribution("c", 1.0, tags={u"foö-bar": u"snöwmän"}, timestamp=ts)
    # fmt: on
    Hub.current.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 3
    assert m[0][4] == {
        "foo-bar": "_$foo",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[1][4] == {
        "foo_bar": "blah{}",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    # fmt: off
    assert m[2][4] == {
        "fo_-bar": u"snöwmän",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }
    # fmt: on


def test_before_emit_metric(sentry_init, capture_envelopes):
    def before_emit(key, tags):
        if key == "removed-metric":
            return False
        tags["extra"] = "foo"
        del tags["release"]
        # this better be a noop!
        metrics.incr("shitty-recursion")
        return True

    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={
            "enable_metrics": True,
            "before_emit_metric": before_emit,
        },
    )
    envelopes = capture_envelopes()

    metrics.incr("removed-metric", 1.0)
    metrics.incr("actual-metric", 1.0)
    Hub.current.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "actual-metric@none"
    assert m[0][3] == ["1.0"]
    assert m[0][4] == {
        "extra": "foo",
        "environment": "not-fun-env",
    }


def test_aggregator_flush(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={
            "enable_metrics": True,
        },
    )
    envelopes = capture_envelopes()

    metrics.incr("a-metric", 1.0)
    Hub.current.flush()

    assert len(envelopes) == 1
    assert Hub.current.client.metrics_aggregator.buckets == {}


def test_tag_serialization(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    envelopes = capture_envelopes()

    metrics.incr(
        "counter",
        tags={
            "no-value": None,
            "an-int": 42,
            "a-float": 23.0,
            "a-string": "blah",
            "more-than-one": [1, "zwei", "3.0", None],
        },
    )
    Hub.current.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][4] == {
        "an-int": "42",
        "a-float": "23.0",
        "a-string": "blah",
        "more-than-one": ["1", "3.0", "zwei"],
        "release": "fun-release",
        "environment": "not-fun-env",
    }


def test_flush_recursion_protection(sentry_init, capture_envelopes, monkeypatch):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    envelopes = capture_envelopes()
    test_client = Hub.current.client

    real_capture_envelope = test_client.transport.capture_envelope

    def bad_capture_envelope(*args, **kwargs):
        metrics.incr("bad-metric")
        return real_capture_envelope(*args, **kwargs)

    monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope)

    metrics.incr("counter")

    # flush twice to see the inner metric
    Hub.current.flush()
    Hub.current.flush()

    (envelope,) = envelopes
    m = parse_metrics(envelope.items[0].payload.get_bytes())
    assert len(m) == 1
    assert m[0][1] == "counter@none"


def test_flush_recursion_protection_background_flush(
    sentry_init, capture_envelopes, monkeypatch
):
    monkeypatch.setattr(metrics.MetricsAggregator, "FLUSHER_SLEEP_TIME", 0.1)
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    envelopes = capture_envelopes()
    test_client = Hub.current.client

    real_capture_envelope = test_client.transport.capture_envelope

    def bad_capture_envelope(*args, **kwargs):
        metrics.incr("bad-metric")
        return real_capture_envelope(*args, **kwargs)

    monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope)

    metrics.incr("counter")

    # flush via sleep and flag
    Hub.current.client.metrics_aggregator._force_flush = True
    time.sleep(0.5)

    (envelope,) = envelopes
    m = parse_metrics(envelope.items[0].payload.get_bytes())
    assert len(m) == 1
    assert m[0][1] == "counter@none"
sentry-python-1.39.2/tests/test_monitor.py000066400000000000000000000052161454744723200207330ustar00rootroot00000000000000import random

from sentry_sdk import Hub, start_transaction
from sentry_sdk.transport import Transport

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


class HealthyTestTransport(Transport):
    def _send_event(self, event):
        pass

    def _send_envelope(self, envelope):
        pass

    def is_healthy(self):
        return True


class UnhealthyTestTransport(HealthyTestTransport):
    def is_healthy(self):
        return False


def test_no_monitor_if_disabled(sentry_init):
    sentry_init(
        transport=HealthyTestTransport(),
        enable_backpressure_handling=False,
    )

    assert Hub.current.client.monitor is None


def test_monitor_if_enabled(sentry_init):
    sentry_init(transport=HealthyTestTransport())

    monitor = Hub.current.client.monitor
    assert monitor is not None
    assert monitor._thread is None

    assert monitor.is_healthy() is True
    assert monitor.downsample_factor == 0
    assert monitor._thread is not None
    assert monitor._thread.name == "sentry.monitor"


def test_monitor_unhealthy(sentry_init):
    sentry_init(transport=UnhealthyTestTransport())

    monitor = Hub.current.client.monitor
    monitor.interval = 0.1

    assert monitor.is_healthy() is True

    for i in range(15):
        monitor.run()
        assert monitor.is_healthy() is False
        assert monitor.downsample_factor == (i + 1 if i < 10 else 10)


def test_transaction_uses_downsampled_rate(
    sentry_init, capture_client_reports, monkeypatch
):
    sentry_init(
        traces_sample_rate=1.0,
        transport=UnhealthyTestTransport(),
    )

    reports = capture_client_reports()

    monitor = Hub.current.client.monitor
    monitor.interval = 0.1

    # make sure rng doesn't sample
    monkeypatch.setattr(random, "random", lambda: 0.9)

    assert monitor.is_healthy() is True
    monitor.run()
    assert monitor.is_healthy() is False
    assert monitor.downsample_factor == 1

    with start_transaction(name="foobar") as transaction:
        assert transaction.sampled is False
        assert transaction.sample_rate == 0.5

    assert reports == [("backpressure", "transaction")]


def test_monitor_no_thread_on_shutdown_no_errors(sentry_init):
    sentry_init(transport=HealthyTestTransport())

    # make it seem like the interpreter is shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        monitor = Hub.current.client.monitor
        assert monitor is not None
        assert monitor._thread is None
        monitor.run()
        assert monitor._thread is None
sentry-python-1.39.2/tests/test_profiler.py000066400000000000000000000625231454744723200210720ustar00rootroot00000000000000import inspect
import os
import sys
import threading
import time

import pytest

from collections import defaultdict
from sentry_sdk import start_transaction
from sentry_sdk.profiler import (
    GeventScheduler,
    Profile,
    Scheduler,
    ThreadScheduler,
    extract_frame,
    extract_stack,
    frame_id,
    get_current_thread_id,
    get_frame_name,
    setup_profiler,
)
from sentry_sdk.tracing import Transaction
from sentry_sdk._lru_cache import LRUCache
from sentry_sdk._queue import Queue

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3

try:
    import gevent
except ImportError:
    gevent = None


def requires_python_version(major, minor, reason=None):
    if reason is None:
        reason = "Requires Python {}.{}".format(major, minor)
    return pytest.mark.skipif(sys.version_info < (major, minor), reason=reason)


requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")


def process_test_sample(sample):
    # insert a mock hashable for the stack
    return [(tid, (stack, stack)) for tid, stack in sample]


def non_experimental_options(mode=None, sample_rate=None):
    return {"profiler_mode": mode, "profiles_sample_rate": sample_rate}


def experimental_options(mode=None, sample_rate=None):
    return {
        "_experiments": {"profiler_mode": mode, "profiles_sample_rate": sample_rate}
    }


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("foo"),
        pytest.param(
            "gevent",
            marks=pytest.mark.skipif(gevent is not None, reason="gevent not enabled"),
        ),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [
        pytest.param(experimental_options, id="experiment"),
        pytest.param(non_experimental_options, id="non experimental"),
    ],
)
def test_profiler_invalid_mode(mode, make_options, teardown_profiling):
    with pytest.raises(ValueError):
        setup_profiler(make_options(mode))


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("sleep"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [
        pytest.param(experimental_options, id="experiment"),
        pytest.param(non_experimental_options, id="non experimental"),
    ],
)
def test_profiler_valid_mode(mode, make_options, teardown_profiling):
    # should not raise any exceptions
    setup_profiler(make_options(mode))


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    "make_options",
    [
        pytest.param(experimental_options, id="experiment"),
        pytest.param(non_experimental_options, id="non experimental"),
    ],
)
def test_profiler_setup_twice(make_options, teardown_profiling):
    # setting up the first time should return True to indicate success
    assert setup_profiler(make_options())
    # setting up the second time should return False to indicate no-op
    assert not setup_profiler(make_options())


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    ("profiles_sample_rate", "profile_count"),
    [
        pytest.param(1.00, 1, id="profiler sampled at 1.00"),
        pytest.param(0.75, 1, id="profiler sampled at 0.75"),
        pytest.param(0.25, 0, id="profiler sampled at 0.25"),
        pytest.param(0.00, 0, id="profiler sampled at 0.00"),
        pytest.param(None, 0, id="profiler not enabled"),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [
        pytest.param(experimental_options, id="experiment"),
        pytest.param(non_experimental_options, id="non experimental"),
    ],
)
@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_profiles_sample_rate(
    sentry_init,
    capture_envelopes,
    capture_client_reports,
    teardown_profiling,
    profiles_sample_rate,
    profile_count,
    make_options,
    mode,
):
    options = make_options(mode=mode, sample_rate=profiles_sample_rate)
    sentry_init(
        traces_sample_rate=1.0,
        profiler_mode=options.get("profiler_mode"),
        profiles_sample_rate=options.get("profiles_sample_rate"),
        _experiments=options.get("_experiments", {}),
    )

    envelopes = capture_envelopes()
    reports = capture_client_reports()

    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
        with start_transaction(name="profiling"):
            pass

    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    assert len(items["profile"]) == profile_count
    if profiles_sample_rate is None or profiles_sample_rate == 0:
        assert reports == []
    elif profile_count:
        assert reports == []
    else:
        assert reports == [("sample_rate", "profile")]


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    ("profiles_sampler", "profile_count"),
    [
        pytest.param(lambda _: 1.00, 1, id="profiler sampled at 1.00"),
        pytest.param(lambda _: 0.75, 1, id="profiler sampled at 0.75"),
        pytest.param(lambda _: 0.25, 0, id="profiler sampled at 0.25"),
        pytest.param(lambda _: 0.00, 0, id="profiler sampled at 0.00"),
        pytest.param(lambda _: None, 0, id="profiler not enabled"),
        pytest.param(
            lambda ctx: 1 if ctx["transaction_context"]["name"] == "profiling" else 0,
            1,
            id="profiler sampled for transaction name",
        ),
        pytest.param(
            lambda ctx: 0 if ctx["transaction_context"]["name"] == "profiling" else 1,
            0,
            id="profiler not sampled for transaction name",
        ),
        pytest.param(
            lambda _: "1", 0, id="profiler not sampled because string sample rate"
        ),
        pytest.param(lambda _: True, 1, id="profiler sampled at True"),
        pytest.param(lambda _: False, 0, id="profiler sampled at False"),
    ],
)
@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_profiles_sampler(
    sentry_init,
    capture_envelopes,
    capture_client_reports,
    teardown_profiling,
    profiles_sampler,
    profile_count,
    mode,
):
    sentry_init(
        traces_sample_rate=1.0,
        profiles_sampler=profiles_sampler,
    )

    envelopes = capture_envelopes()
    reports = capture_client_reports()

    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
        with start_transaction(name="profiling"):
            pass

    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    assert len(items["profile"]) == profile_count
    if profile_count:
        assert reports == []
    else:
        assert reports == [("sample_rate", "profile")]


@requires_python_version(3, 3)
def test_minimum_unique_samples_required(
    sentry_init,
    capture_envelopes,
    capture_client_reports,
    teardown_profiling,
):
    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )

    envelopes = capture_envelopes()
    reports = capture_client_reports()

    with start_transaction(name="profiling"):
        pass

    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    # because we dont leave any time for the profiler to
    # take any samples, it should be not be sent
    assert len(items["profile"]) == 0
    assert reports == [("insufficient_data", "profile")]


@requires_python_version(3, 3)
def test_profile_captured(
    sentry_init,
    capture_envelopes,
    teardown_profiling,
):
    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )

    envelopes = capture_envelopes()

    with start_transaction(name="profiling"):
        time.sleep(0.05)

    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    assert len(items["profile"]) == 1


def get_frame(depth=1):
    """
    This function is not exactly true to its name. Depending on
    how it is called, the true depth of the stack can be deeper
    than the argument implies.
    """
    if depth <= 0:
        raise ValueError("only positive integers allowed")
    if depth > 1:
        return get_frame(depth=depth - 1)
    return inspect.currentframe()


class GetFrameBase:
    def inherited_instance_method(self):
        return inspect.currentframe()

    def inherited_instance_method_wrapped(self):
        def wrapped():
            return inspect.currentframe()

        return wrapped

    @classmethod
    def inherited_class_method(cls):
        return inspect.currentframe()

    @classmethod
    def inherited_class_method_wrapped(cls):
        def wrapped():
            return inspect.currentframe()

        return wrapped

    @staticmethod
    def inherited_static_method():
        return inspect.currentframe()


class GetFrame(GetFrameBase):
    def instance_method(self):
        return inspect.currentframe()

    def instance_method_wrapped(self):
        def wrapped():
            return inspect.currentframe()

        return wrapped

    @classmethod
    def class_method(cls):
        return inspect.currentframe()

    @classmethod
    def class_method_wrapped(cls):
        def wrapped():
            return inspect.currentframe()

        return wrapped

    @staticmethod
    def static_method():
        return inspect.currentframe()


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    ("frame", "frame_name"),
    [
        pytest.param(
            get_frame(),
            "get_frame",
            id="function",
        ),
        pytest.param(
            (lambda: inspect.currentframe())(),
            "",
            id="lambda",
        ),
        pytest.param(
            GetFrame().instance_method(),
            "GetFrame.instance_method",
            id="instance_method",
        ),
        pytest.param(
            GetFrame().instance_method_wrapped()(),
            "wrapped"
            if sys.version_info < (3, 11)
            else "GetFrame.instance_method_wrapped..wrapped",
            id="instance_method_wrapped",
        ),
        pytest.param(
            GetFrame().class_method(),
            "GetFrame.class_method",
            id="class_method",
        ),
        pytest.param(
            GetFrame().class_method_wrapped()(),
            "wrapped"
            if sys.version_info < (3, 11)
            else "GetFrame.class_method_wrapped..wrapped",
            id="class_method_wrapped",
        ),
        pytest.param(
            GetFrame().static_method(),
            "static_method" if sys.version_info < (3, 11) else "GetFrame.static_method",
            id="static_method",
        ),
        pytest.param(
            GetFrame().inherited_instance_method(),
            "GetFrameBase.inherited_instance_method",
            id="inherited_instance_method",
        ),
        pytest.param(
            GetFrame().inherited_instance_method_wrapped()(),
            "wrapped"
            if sys.version_info < (3, 11)
            else "GetFrameBase.inherited_instance_method_wrapped..wrapped",
            id="instance_method_wrapped",
        ),
        pytest.param(
            GetFrame().inherited_class_method(),
            "GetFrameBase.inherited_class_method",
            id="inherited_class_method",
        ),
        pytest.param(
            GetFrame().inherited_class_method_wrapped()(),
            "wrapped"
            if sys.version_info < (3, 11)
            else "GetFrameBase.inherited_class_method_wrapped..wrapped",
            id="inherited_class_method_wrapped",
        ),
        pytest.param(
            GetFrame().inherited_static_method(),
            "inherited_static_method"
            if sys.version_info < (3, 11)
            else "GetFrameBase.inherited_static_method",
            id="inherited_static_method",
        ),
    ],
)
def test_get_frame_name(frame, frame_name):
    assert get_frame_name(frame) == frame_name


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    ("get_frame", "function"),
    [
        pytest.param(lambda: get_frame(depth=1), "get_frame", id="simple"),
    ],
)
def test_extract_frame(get_frame, function):
    cwd = os.getcwd()
    frame = get_frame()
    extracted_frame = extract_frame(frame_id(frame), frame, cwd)

    # the abs_path should be equal toe the normalized path of the co_filename
    assert extracted_frame["abs_path"] == os.path.normpath(frame.f_code.co_filename)

    # the module should be pull from this test module
    assert extracted_frame["module"] == __name__

    # the filename should be the file starting after the cwd
    assert extracted_frame["filename"] == __file__[len(cwd) + 1 :]

    assert extracted_frame["function"] == function

    # the lineno will shift over time as this file is modified so just check
    # that it is an int
    assert isinstance(extracted_frame["lineno"], int)


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    ("depth", "max_stack_depth", "actual_depth"),
    [
        pytest.param(1, 128, 1, id="less than"),
        pytest.param(256, 128, 128, id="greater than"),
        pytest.param(128, 128, 128, id="equals"),
    ],
)
def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
    # introduce a lambda that we'll be looking for in the stack
    frame = (lambda: get_frame(depth=depth))()

    # plus 1 because we introduced a lambda intentionally that we'll
    # look for in the final stack to make sure its in the right position
    base_stack_depth = len(inspect.stack()) + 1

    # increase the max_depth by the `base_stack_depth` to account
    # for the extra frames pytest will add
    _, frame_ids, frames = extract_stack(
        frame,
        LRUCache(max_size=1),
        max_stack_depth=max_stack_depth + base_stack_depth,
        cwd=os.getcwd(),
    )
    assert len(frame_ids) == base_stack_depth + actual_depth
    assert len(frames) == base_stack_depth + actual_depth

    for i in range(actual_depth):
        assert frames[i]["function"] == "get_frame", i

    # index 0 contains the inner most frame on the stack, so the lamdba
    # should be at index `actual_depth`
    if sys.version_info >= (3, 11):
        assert (
            frames[actual_depth]["function"]
            == "test_extract_stack_with_max_depth.."
        ), actual_depth
    else:
        assert frames[actual_depth]["function"] == "", actual_depth


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    ("frame", "depth"),
    [(get_frame(depth=1), len(inspect.stack()))],
)
def test_extract_stack_with_cache(frame, depth):
    # make sure cache has enough room or this test will fail
    cache = LRUCache(max_size=depth)
    cwd = os.getcwd()
    _, _, frames1 = extract_stack(frame, cache, cwd=cwd)
    _, _, frames2 = extract_stack(frame, cache, cwd=cwd)

    assert len(frames1) > 0
    assert len(frames2) > 0
    assert len(frames1) == len(frames2)
    for i, (frame1, frame2) in enumerate(zip(frames1, frames2)):
        # DO NOT use `==` for the assertion here since we are
        # testing for identity, and using `==` would test for
        # equality which would always pass since we're extract
        # the same stack.
        assert frame1 is frame2, i


@requires_python_version(3, 3)
def test_get_current_thread_id_explicit_thread():
    results = Queue(maxsize=1)

    def target1():
        pass

    def target2():
        results.put(get_current_thread_id(thread1))

    thread1 = threading.Thread(target=target1)
    thread1.start()

    thread2 = threading.Thread(target=target2)
    thread2.start()

    thread2.join()
    thread1.join()

    assert thread1.ident == results.get(timeout=1)


@requires_python_version(3, 3)
@requires_gevent
def test_get_current_thread_id_gevent_in_thread():
    results = Queue(maxsize=1)

    def target():
        job = gevent.spawn(get_current_thread_id)
        job.join()
        results.put(job.value)

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()
    assert thread.ident == results.get(timeout=1)


@requires_python_version(3, 3)
def test_get_current_thread_id_running_thread():
    results = Queue(maxsize=1)

    def target():
        results.put(get_current_thread_id())

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()
    assert thread.ident == results.get(timeout=1)


@requires_python_version(3, 3)
def test_get_current_thread_id_main_thread():
    results = Queue(maxsize=1)

    def target():
        # mock that somehow the current thread doesn't exist
        with mock.patch("threading.current_thread", side_effect=[None]):
            results.put(get_current_thread_id())

    thread_id = threading.main_thread().ident if sys.version_info >= (3, 4) else None

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()
    assert thread_id == results.get(timeout=1)


def get_scheduler_threads(scheduler):
    return [thread for thread in threading.enumerate() if thread.name == scheduler.name]


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    ("scheduler_class",),
    [
        pytest.param(ThreadScheduler, id="thread scheduler"),
        pytest.param(
            GeventScheduler,
            marks=[
                requires_gevent,
                pytest.mark.skip(
                    reason="cannot find this thread via threading.enumerate()"
                ),
            ],
            id="gevent scheduler",
        ),
    ],
)
def test_thread_scheduler_single_background_thread(scheduler_class):
    scheduler = scheduler_class(frequency=1000)

    # not yet setup, no scheduler threads yet
    assert len(get_scheduler_threads(scheduler)) == 0

    scheduler.setup()

    # setup but no profiles started so still no threads
    assert len(get_scheduler_threads(scheduler)) == 0

    scheduler.ensure_running()

    # the scheduler will start always 1 thread
    assert len(get_scheduler_threads(scheduler)) == 1

    scheduler.ensure_running()

    # the scheduler still only has 1 thread
    assert len(get_scheduler_threads(scheduler)) == 1

    scheduler.teardown()

    # once finished, the thread should stop
    assert len(get_scheduler_threads(scheduler)) == 0


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    ("scheduler_class",),
    [
        pytest.param(ThreadScheduler, id="thread scheduler"),
        pytest.param(
            GeventScheduler,
            marks=[
                requires_gevent,
                pytest.mark.skip(
                    reason="cannot find this thread via threading.enumerate()"
                ),
            ],
            id="gevent scheduler",
        ),
    ],
)
def test_thread_scheduler_no_thread_on_shutdown(scheduler_class):
    scheduler = scheduler_class(frequency=1000)

    # not yet setup, no scheduler threads yet
    assert len(get_scheduler_threads(scheduler)) == 0

    scheduler.setup()

    # setup but no profiles started so still no threads
    assert len(get_scheduler_threads(scheduler)) == 0

    # mock RuntimeError as if the 3.12 intepreter was shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        scheduler.ensure_running()

    assert scheduler.running is False

    # still no thread
    assert len(get_scheduler_threads(scheduler)) == 0

    scheduler.teardown()

    assert len(get_scheduler_threads(scheduler)) == 0


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    ("scheduler_class",),
    [
        pytest.param(ThreadScheduler, id="thread scheduler"),
        pytest.param(GeventScheduler, marks=requires_gevent, id="gevent scheduler"),
    ],
)
@mock.patch("sentry_sdk.profiler.MAX_PROFILE_DURATION_NS", 1)
def test_max_profile_duration_reached(scheduler_class):
    sample = [
        (
            "1",
            extract_stack(
                get_frame(),
                LRUCache(max_size=1),
                cwd=os.getcwd(),
            ),
        ),
    ]

    with scheduler_class(frequency=1000) as scheduler:
        transaction = Transaction(sampled=True)
        with Profile(transaction, scheduler=scheduler) as profile:
            # profile just started, it's active
            assert profile.active

            # write a sample at the start time, so still active
            profile.write(profile.start_ns + 0, sample)
            assert profile.active

            # write a sample at max time, so still active
            profile.write(profile.start_ns + 1, sample)
            assert profile.active

            # write a sample PAST the max time, so now inactive
            profile.write(profile.start_ns + 2, sample)
            assert not profile.active


class NoopScheduler(Scheduler):
    def setup(self):
        # type: () -> None
        pass

    def teardown(self):
        # type: () -> None
        pass

    def ensure_running(self):
        # type: () -> None
        pass


current_thread = threading.current_thread()
thread_metadata = {
    str(current_thread.ident): {
        "name": str(current_thread.name),
    },
}


sample_stacks = [
    extract_stack(
        get_frame(),
        LRUCache(max_size=1),
        max_stack_depth=1,
        cwd=os.getcwd(),
    ),
    extract_stack(
        get_frame(),
        LRUCache(max_size=1),
        max_stack_depth=2,
        cwd=os.getcwd(),
    ),
]


@requires_python_version(3, 3)
@pytest.mark.parametrize(
    ("samples", "expected"),
    [
        pytest.param(
            [],
            {
                "frames": [],
                "samples": [],
                "stacks": [],
                "thread_metadata": thread_metadata,
            },
            id="empty",
        ),
        pytest.param(
            [(6, [("1", sample_stacks[0])])],
            {
                "frames": [],
                "samples": [],
                "stacks": [],
                "thread_metadata": thread_metadata,
            },
            id="single sample out of range",
        ),
        pytest.param(
            [(0, [("1", sample_stacks[0])])],
            {
                "frames": [sample_stacks[0][2][0]],
                "samples": [
                    {
                        "elapsed_since_start_ns": "0",
                        "thread_id": "1",
                        "stack_id": 0,
                    },
                ],
                "stacks": [[0]],
                "thread_metadata": thread_metadata,
            },
            id="single sample in range",
        ),
        pytest.param(
            [
                (0, [("1", sample_stacks[0])]),
                (1, [("1", sample_stacks[0])]),
            ],
            {
                "frames": [sample_stacks[0][2][0]],
                "samples": [
                    {
                        "elapsed_since_start_ns": "0",
                        "thread_id": "1",
                        "stack_id": 0,
                    },
                    {
                        "elapsed_since_start_ns": "1",
                        "thread_id": "1",
                        "stack_id": 0,
                    },
                ],
                "stacks": [[0]],
                "thread_metadata": thread_metadata,
            },
            id="two identical stacks",
        ),
        pytest.param(
            [
                (0, [("1", sample_stacks[0])]),
                (1, [("1", sample_stacks[1])]),
            ],
            {
                "frames": [
                    sample_stacks[0][2][0],
                    sample_stacks[1][2][0],
                ],
                "samples": [
                    {
                        "elapsed_since_start_ns": "0",
                        "thread_id": "1",
                        "stack_id": 0,
                    },
                    {
                        "elapsed_since_start_ns": "1",
                        "thread_id": "1",
                        "stack_id": 1,
                    },
                ],
                "stacks": [[0], [1, 0]],
                "thread_metadata": thread_metadata,
            },
            id="two different stacks",
        ),
    ],
)
@mock.patch("sentry_sdk.profiler.MAX_PROFILE_DURATION_NS", 5)
def test_profile_processing(
    DictionaryContaining,  # noqa: N803
    samples,
    expected,
):
    with NoopScheduler(frequency=1000) as scheduler:
        transaction = Transaction(sampled=True)
        with Profile(transaction, scheduler=scheduler) as profile:
            for ts, sample in samples:
                # force the sample to be written at a time relative to the
                # start of the profile
                now = profile.start_ns + ts
                profile.write(now, sample)

            processed = profile.process()

            assert processed["thread_metadata"] == DictionaryContaining(
                expected["thread_metadata"]
            )
            assert processed["frames"] == expected["frames"]
            assert processed["stacks"] == expected["stacks"]
            assert processed["samples"] == expected["samples"]
sentry-python-1.39.2/tests/test_scope.py000066400000000000000000000077321454744723200203620ustar00rootroot00000000000000import copy
import os
import pytest
from sentry_sdk import capture_exception
from sentry_sdk.scope import Scope

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


def test_copying():
    s1 = Scope()
    s1.fingerprint = {}
    s1.set_tag("foo", "bar")

    s2 = copy.copy(s1)
    assert "foo" in s2._tags

    s1.set_tag("bam", "baz")
    assert "bam" in s1._tags
    assert "bam" not in s2._tags

    assert s1._fingerprint is s2._fingerprint


def test_merging(sentry_init, capture_events):
    sentry_init()

    s = Scope()
    s.set_user({"id": "42"})

    events = capture_events()

    capture_exception(NameError(), scope=s)

    (event,) = events
    assert event["user"] == {"id": "42"}


def test_common_args():
    s = Scope()
    s.update_from_kwargs(
        user={"id": 23},
        level="warning",
        extras={"k": "v"},
        contexts={"os": {"name": "Blafasel"}},
        tags={"x": "y"},
        fingerprint=["foo"],
    )

    s2 = Scope()
    s2.set_extra("foo", "bar")
    s2.set_tag("a", "b")
    s2.set_context("device", {"a": "b"})
    s2.update_from_scope(s)

    assert s._user == {"id": 23}
    assert s._level == "warning"
    assert s._extras == {"k": "v"}
    assert s._contexts == {"os": {"name": "Blafasel"}}
    assert s._tags == {"x": "y"}
    assert s._fingerprint == ["foo"]

    assert s._user == s2._user
    assert s._level == s2._level
    assert s._fingerprint == s2._fingerprint
    assert s2._extras == {"k": "v", "foo": "bar"}
    assert s2._tags == {"a": "b", "x": "y"}
    assert s2._contexts == {"os": {"name": "Blafasel"}, "device": {"a": "b"}}


BAGGAGE_VALUE = (
    "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
    "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
    "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
)

SENTRY_TRACE_VALUE = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1"


@pytest.mark.parametrize(
    "env,excepted_value",
    [
        (
            {
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
            },
            {
                "sentry-trace": SENTRY_TRACE_VALUE,
            },
        ),
        (
            {
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            {
                "baggage": BAGGAGE_VALUE,
            },
        ),
        (
            {
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            {
                "sentry-trace": SENTRY_TRACE_VALUE,
                "baggage": BAGGAGE_VALUE,
            },
        ),
        (
            {
                "SENTRY_USE_ENVIRONMENT": "",
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            {
                "sentry-trace": SENTRY_TRACE_VALUE,
                "baggage": BAGGAGE_VALUE,
            },
        ),
        (
            {
                "SENTRY_USE_ENVIRONMENT": "True",
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            {
                "sentry-trace": SENTRY_TRACE_VALUE,
                "baggage": BAGGAGE_VALUE,
            },
        ),
        (
            {
                "SENTRY_USE_ENVIRONMENT": "no",
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            None,
        ),
        (
            {
                "SENTRY_USE_ENVIRONMENT": "True",
                "MY_OTHER_VALUE": "asdf",
                "SENTRY_RELEASE": "1.0.0",
            },
            None,
        ),
    ],
)
def test_load_trace_data_from_env(env, excepted_value):
    new_env = os.environ.copy()
    new_env.update(env)

    with mock.patch.dict(os.environ, new_env):
        s = Scope()
        incoming_trace_data = s._load_trace_data_from_env()
        assert incoming_trace_data == excepted_value
sentry-python-1.39.2/tests/test_scrubber.py000066400000000000000000000113521454744723200210510ustar00rootroot00000000000000import sys
import logging

from sentry_sdk import capture_exception, capture_event, start_transaction, start_span
from sentry_sdk.utils import event_from_exception
from sentry_sdk.scrubber import EventScrubber


logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)


def test_request_scrubbing(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        1 / 0
    except ZeroDivisionError:
        ev, _hint = event_from_exception(sys.exc_info())

        ev["request"] = {
            "headers": {
                "COOKIE": "secret",
                "authorization": "Bearer bla",
                "ORIGIN": "google.com",
            },
            "cookies": {
                "sessionid": "secret",
                "foo": "bar",
            },
            "data": {
                "token": "secret",
                "foo": "bar",
            },
        }

        capture_event(ev)

    (event,) = events

    assert event["request"] == {
        "headers": {
            "COOKIE": "[Filtered]",
            "authorization": "[Filtered]",
            "ORIGIN": "google.com",
        },
        "cookies": {"sessionid": "[Filtered]", "foo": "bar"},
        "data": {"token": "[Filtered]", "foo": "bar"},
    }

    assert event["_meta"]["request"] == {
        "headers": {
            "COOKIE": {"": {"rem": [["!config", "s"]]}},
            "authorization": {"": {"rem": [["!config", "s"]]}},
        },
        "cookies": {"sessionid": {"": {"rem": [["!config", "s"]]}}},
        "data": {"token": {"": {"rem": [["!config", "s"]]}}},
    }


def test_stack_var_scrubbing(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        password = "supersecret"  # noqa
        api_key = "1231231231"  # noqa
        safe = "keepthis"  # noqa
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events

    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert frame["vars"]["password"] == "[Filtered]"
    assert frame["vars"]["api_key"] == "[Filtered]"
    assert frame["vars"]["safe"] == "'keepthis'"

    meta = event["_meta"]["exception"]["values"]["0"]["stacktrace"]["frames"]["0"][
        "vars"
    ]
    assert meta == {
        "password": {"": {"rem": [["!config", "s"]]}},
        "api_key": {"": {"rem": [["!config", "s"]]}},
    }


def test_breadcrumb_extra_scrubbing(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    logger.info("bread", extra=dict(foo=42, password="secret"))
    logger.critical("whoops", extra=dict(bar=69, auth="secret"))

    (event,) = events

    assert event["extra"]["bar"] == 69
    assert event["extra"]["auth"] == "[Filtered]"

    assert event["breadcrumbs"]["values"][0]["data"] == {
        "foo": 42,
        "password": "[Filtered]",
    }

    assert event["_meta"]["extra"]["auth"] == {"": {"rem": [["!config", "s"]]}}
    assert event["_meta"]["breadcrumbs"] == {
        "values": {"0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}}}
    }


def test_span_data_scrubbing(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction(name="hi"):
        with start_span(op="foo", description="bar") as span:
            span.set_data("password", "secret")
            span.set_data("datafoo", "databar")

    (event,) = events
    assert event["spans"][0]["data"] == {"password": "[Filtered]", "datafoo": "databar"}
    assert event["_meta"]["spans"] == {
        "0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}}
    }


def test_custom_denylist(sentry_init, capture_events):
    sentry_init(event_scrubber=EventScrubber(denylist=["my_sensitive_var"]))
    events = capture_events()

    try:
        my_sensitive_var = "secret"  # noqa
        safe = "keepthis"  # noqa
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events

    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert frame["vars"]["my_sensitive_var"] == "[Filtered]"
    assert frame["vars"]["safe"] == "'keepthis'"

    meta = event["_meta"]["exception"]["values"]["0"]["stacktrace"]["frames"]["0"][
        "vars"
    ]
    assert meta == {
        "my_sensitive_var": {"": {"rem": [["!config", "s"]]}},
    }


def test_scrubbing_doesnt_affect_local_vars(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        password = "cat123"
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events

    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert frame["vars"]["password"] == "[Filtered]"
    assert password == "cat123"
sentry-python-1.39.2/tests/test_serializer.py000066400000000000000000000113201454744723200214060ustar00rootroot00000000000000import re
import sys
import pytest

from sentry_sdk.serializer import MAX_DATABAG_BREADTH, MAX_DATABAG_DEPTH, serialize

try:
    from hypothesis import given
    import hypothesis.strategies as st
except ImportError:
    pass
else:

    def test_bytes_serialization_decode_many(message_normalizer):
        @given(binary=st.binary(min_size=1))
        def inner(binary):
            result = message_normalizer(binary, should_repr_strings=False)
            assert result == binary.decode("utf-8", "replace")

        inner()

    def test_bytes_serialization_repr_many(message_normalizer):
        @given(binary=st.binary(min_size=1))
        def inner(binary):
            result = message_normalizer(binary, should_repr_strings=True)
            assert result == repr(binary)

        inner()


@pytest.fixture
def message_normalizer(validate_event_schema):
    def inner(message, **kwargs):
        event = serialize({"logentry": {"message": message}}, **kwargs)
        validate_event_schema(event)
        return event["logentry"]["message"]

    return inner


@pytest.fixture
def extra_normalizer(validate_event_schema):
    def inner(extra, **kwargs):
        event = serialize({"extra": {"foo": extra}}, **kwargs)
        validate_event_schema(event)
        return event["extra"]["foo"]

    return inner


@pytest.fixture
def body_normalizer(validate_event_schema):
    def inner(body, **kwargs):
        event = serialize({"request": {"data": body}}, **kwargs)
        validate_event_schema(event)
        return event["request"]["data"]

    return inner


def test_bytes_serialization_decode(message_normalizer):
    binary = b"abc123\x80\xf0\x9f\x8d\x95"
    result = message_normalizer(binary, should_repr_strings=False)
    # fmt: off
    assert result == u"abc123\ufffd\U0001f355"
    # fmt: on


@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
def test_bytes_serialization_repr(message_normalizer):
    binary = b"abc123\x80\xf0\x9f\x8d\x95"
    result = message_normalizer(binary, should_repr_strings=True)
    assert result == r"b'abc123\x80\xf0\x9f\x8d\x95'"


def test_bytearray_serialization_decode(message_normalizer):
    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
    result = message_normalizer(binary, should_repr_strings=False)
    # fmt: off
    assert result == u"abc123\ufffd\U0001f355"
    # fmt: on


@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
def test_bytearray_serialization_repr(message_normalizer):
    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
    result = message_normalizer(binary, should_repr_strings=True)
    assert result == r"bytearray(b'abc123\x80\xf0\x9f\x8d\x95')"


def test_memoryview_serialization_repr(message_normalizer):
    binary = memoryview(b"abc123\x80\xf0\x9f\x8d\x95")
    result = message_normalizer(binary, should_repr_strings=False)
    assert re.match(r"^$", result)


def test_serialize_sets(extra_normalizer):
    result = extra_normalizer({1, 2, 3})
    assert result == [1, 2, 3]


def test_serialize_custom_mapping(extra_normalizer):
    class CustomReprDict(dict):
        def __sentry_repr__(self):
            return "custom!"

    result = extra_normalizer(CustomReprDict(one=1, two=2))
    assert result == "custom!"


def test_custom_mapping_doesnt_mess_with_mock(extra_normalizer):
    """
    Adding the __sentry_repr__ magic method check in the serializer
    shouldn't mess with how mock works. This broke some stuff when we added
    sentry_repr without the dunders.
    """
    mock = pytest.importorskip("unittest.mock")
    m = mock.Mock()
    extra_normalizer(m)
    assert len(m.mock_calls) == 0


def test_trim_databag_breadth(body_normalizer):
    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }

    result = body_normalizer(data)

    assert len(result) == MAX_DATABAG_BREADTH
    for key, value in result.items():
        assert data.get(key) == value


def test_no_trimming_if_max_request_body_size_is_always(body_normalizer):
    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }
    curr = data
    for _ in range(MAX_DATABAG_DEPTH + 5):
        curr["nested"] = {}
        curr = curr["nested"]

    result = body_normalizer(data, max_request_body_size="always")

    assert result == data


def test_max_value_length_default(body_normalizer):
    data = {"key": "a" * 2000}

    result = body_normalizer(data)

    assert len(result["key"]) == 1024  # fallback max length


def test_max_value_length(body_normalizer):
    data = {"key": "a" * 2000}

    max_value_length = 1800
    result = body_normalizer(data, max_value_length=max_value_length)

    assert len(result["key"]) == max_value_length
sentry-python-1.39.2/tests/test_sessions.py000066400000000000000000000101301454744723200211010ustar00rootroot00000000000000import sentry_sdk

from sentry_sdk import Hub
from sentry_sdk.sessions import auto_session_tracking

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


def sorted_aggregates(item):
    aggregates = item["aggregates"]
    aggregates.sort(key=lambda item: (item["started"], item.get("did", "")))
    return aggregates


def test_basic(sentry_init, capture_envelopes):
    sentry_init(release="fun-release", environment="not-fun-env")
    envelopes = capture_envelopes()

    hub = Hub.current
    hub.start_session()

    try:
        with hub.configure_scope() as scope:
            scope.set_user({"id": "42"})
            raise Exception("all is wrong")
    except Exception:
        hub.capture_exception()
    hub.end_session()
    hub.flush()

    assert len(envelopes) == 2
    assert envelopes[0].get_event() is not None

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json

    assert sess_event["attrs"] == {
        "release": "fun-release",
        "environment": "not-fun-env",
    }
    assert sess_event["did"] == "42"
    assert sess_event["init"]
    assert sess_event["status"] == "exited"
    assert sess_event["errors"] == 1


def test_aggregates(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
    )
    envelopes = capture_envelopes()

    hub = Hub.current

    with auto_session_tracking(session_mode="request"):
        with sentry_sdk.push_scope():
            try:
                with sentry_sdk.configure_scope() as scope:
                    scope.set_user({"id": "42"})
                    raise Exception("all is wrong")
            except Exception:
                sentry_sdk.capture_exception()

    with auto_session_tracking(session_mode="request"):
        pass

    hub.start_session(session_mode="request")
    hub.end_session()

    sentry_sdk.flush()

    assert len(envelopes) == 2
    assert envelopes[0].get_event() is not None

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json
    assert sess_event["attrs"] == {
        "release": "fun-release",
        "environment": "not-fun-env",
    }

    aggregates = sorted_aggregates(sess_event)
    assert len(aggregates) == 1
    assert aggregates[0]["exited"] == 2
    assert aggregates[0]["errored"] == 1


def test_aggregates_explicitly_disabled_session_tracking_request_mode(
    sentry_init, capture_envelopes
):
    sentry_init(
        release="fun-release", environment="not-fun-env", auto_session_tracking=False
    )
    envelopes = capture_envelopes()

    hub = Hub.current

    with auto_session_tracking(session_mode="request"):
        with sentry_sdk.push_scope():
            try:
                raise Exception("all is wrong")
            except Exception:
                sentry_sdk.capture_exception()

    with auto_session_tracking(session_mode="request"):
        pass

    hub.start_session(session_mode="request")
    hub.end_session()

    sentry_sdk.flush()

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json

    aggregates = sorted_aggregates(sess_event)
    assert len(aggregates) == 1
    assert aggregates[0]["exited"] == 1
    assert "errored" not in aggregates[0]


def test_no_thread_on_shutdown_no_errors(sentry_init):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
    )

    hub = Hub.current

    # make it seem like the interpreter is shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        with auto_session_tracking(session_mode="request"):
            with sentry_sdk.push_scope():
                try:
                    raise Exception("all is wrong")
                except Exception:
                    sentry_sdk.capture_exception()

        with auto_session_tracking(session_mode="request"):
            pass

        hub.start_session(session_mode="request")
        hub.end_session()

        sentry_sdk.flush()
sentry-python-1.39.2/tests/test_spotlight.py000066400000000000000000000027221454744723200212600ustar00rootroot00000000000000import pytest

from sentry_sdk import Hub, capture_exception


@pytest.fixture
def capture_spotlight_envelopes(monkeypatch):
    def inner():
        envelopes = []
        test_spotlight = Hub.current.client.spotlight
        old_capture_envelope = test_spotlight.capture_envelope

        def append_envelope(envelope):
            envelopes.append(envelope)
            return old_capture_envelope(envelope)

        monkeypatch.setattr(test_spotlight, "capture_envelope", append_envelope)
        return envelopes

    return inner


def test_spotlight_off_by_default(sentry_init):
    sentry_init()
    assert Hub.current.client.spotlight is None


def test_spotlight_default_url(sentry_init):
    sentry_init(spotlight=True)

    spotlight = Hub.current.client.spotlight
    assert spotlight is not None
    assert spotlight.url == "http://localhost:8969/stream"


def test_spotlight_custom_url(sentry_init):
    sentry_init(spotlight="http://foobar@test.com/132")

    spotlight = Hub.current.client.spotlight
    assert spotlight is not None
    assert spotlight.url == "http://foobar@test.com/132"


def test_spotlight_envelope(sentry_init, capture_spotlight_envelopes):
    sentry_init(spotlight=True)
    envelopes = capture_spotlight_envelopes()

    try:
        raise ValueError("aha!")
    except Exception:
        capture_exception()

    (envelope,) = envelopes
    payload = envelope.items[0].payload.json

    assert payload["exception"]["values"][0]["value"] == "aha!"
sentry-python-1.39.2/tests/test_transport.py000066400000000000000000000277011454744723200213030ustar00rootroot00000000000000# coding: utf-8
import logging
import pickle
import gzip
import io

from datetime import datetime, timedelta

import pytest
from collections import namedtuple
from werkzeug.wrappers import Request, Response

from pytest_localserver.http import WSGIServer

from sentry_sdk import Hub, Client, add_breadcrumb, capture_message, Scope
from sentry_sdk._compat import datetime_utcnow
from sentry_sdk.transport import _parse_rate_limits
from sentry_sdk.envelope import Envelope, parse_json
from sentry_sdk.integrations.logging import LoggingIntegration

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3

CapturedData = namedtuple("CapturedData", ["path", "event", "envelope", "compressed"])


class CapturingServer(WSGIServer):
    def __init__(self, host="127.0.0.1", port=0, ssl_context=None):
        WSGIServer.__init__(self, host, port, self, ssl_context=ssl_context)
        self.code = 204
        self.headers = {}
        self.captured = []

    def respond_with(self, code=200, headers=None):
        self.code = code
        if headers:
            self.headers = headers

    def clear_captured(self):
        del self.captured[:]

    def __call__(self, environ, start_response):
        """
        This is the WSGI application.
        """
        request = Request(environ)
        event = envelope = None
        if request.headers.get("content-encoding") == "gzip":
            rdr = gzip.GzipFile(fileobj=io.BytesIO(request.data))
            compressed = True
        else:
            rdr = io.BytesIO(request.data)
            compressed = False

        if request.mimetype == "application/json":
            event = parse_json(rdr.read())
        else:
            envelope = Envelope.deserialize_from(rdr)

        self.captured.append(
            CapturedData(
                path=request.path,
                event=event,
                envelope=envelope,
                compressed=compressed,
            )
        )

        response = Response(status=self.code)
        response.headers.extend(self.headers)
        return response(environ, start_response)


@pytest.fixture
def capturing_server(request):
    server = CapturingServer()
    server.start()
    request.addfinalizer(server.stop)
    return server


@pytest.fixture
def make_client(request, capturing_server):
    def inner(**kwargs):
        return Client(
            "http://foobar@{}/132".format(capturing_server.url[len("http://") :]),
            **kwargs
        )

    return inner


@pytest.mark.forked
@pytest.mark.parametrize("debug", (True, False))
@pytest.mark.parametrize("client_flush_method", ["close", "flush"])
@pytest.mark.parametrize("use_pickle", (True, False))
@pytest.mark.parametrize("compressionlevel", (0, 9))
def test_transport_works(
    capturing_server,
    request,
    capsys,
    caplog,
    debug,
    make_client,
    client_flush_method,
    use_pickle,
    compressionlevel,
    maybe_monkeypatched_threading,
):
    caplog.set_level(logging.DEBUG)
    client = make_client(
        debug=debug,
        _experiments={
            "transport_zlib_compression_level": compressionlevel,
        },
    )

    if use_pickle:
        client = pickle.loads(pickle.dumps(client))

    Hub.current.bind_client(client)
    request.addfinalizer(lambda: Hub.current.bind_client(None))

    add_breadcrumb(level="info", message="i like bread", timestamp=datetime_utcnow())
    capture_message("löl")

    getattr(client, client_flush_method)()

    out, err = capsys.readouterr()
    assert not err and not out
    assert capturing_server.captured
    assert capturing_server.captured[0].compressed == (compressionlevel > 0)

    assert any("Sending event" in record.msg for record in caplog.records) == debug


@pytest.mark.parametrize(
    "num_pools,expected_num_pools",
    (
        (None, 2),
        (2, 2),
        (10, 10),
    ),
)
def test_transport_num_pools(make_client, num_pools, expected_num_pools):
    _experiments = {}
    if num_pools is not None:
        _experiments["transport_num_pools"] = num_pools

    client = make_client(_experiments=_experiments)

    options = client.transport._get_pool_options([])
    assert options["num_pools"] == expected_num_pools


def test_transport_infinite_loop(capturing_server, request, make_client):
    client = make_client(
        debug=True,
        # Make sure we cannot create events from our own logging
        integrations=[LoggingIntegration(event_level=logging.DEBUG)],
    )

    with Hub(client):
        capture_message("hi")
        client.flush()

    assert len(capturing_server.captured) == 1


def test_transport_no_thread_on_shutdown_no_errors(capturing_server, make_client):
    client = make_client()

    # make it seem like the interpreter is shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        with Hub(client):
            capture_message("hi")

    # nothing exploded but also no events can be sent anymore
    assert len(capturing_server.captured) == 0


NOW = datetime(2014, 6, 2)


@pytest.mark.parametrize(
    "input,expected",
    [
        # Invalid rate limits
        ("", {}),
        ("invalid", {}),
        (",,,", {}),
        (
            "42::organization, invalid, 4711:foobar;transaction;security:project",
            {
                None: NOW + timedelta(seconds=42),
                "transaction": NOW + timedelta(seconds=4711),
                "security": NOW + timedelta(seconds=4711),
                # Unknown data categories
                "foobar": NOW + timedelta(seconds=4711),
            },
        ),
        (
            "4711:foobar;;transaction:organization",
            {
                "transaction": NOW + timedelta(seconds=4711),
                # Unknown data categories
                "foobar": NOW + timedelta(seconds=4711),
                "": NOW + timedelta(seconds=4711),
            },
        ),
    ],
)
def test_parse_rate_limits(input, expected):
    assert dict(_parse_rate_limits(input, now=NOW)) == expected


def test_simple_rate_limits(capturing_server, capsys, caplog, make_client):
    client = make_client()
    capturing_server.respond_with(code=429, headers={"Retry-After": "4"})

    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set([None])

    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "event"})
    client.flush()

    assert not capturing_server.captured


@pytest.mark.parametrize("response_code", [200, 429])
def test_data_category_limits(
    capturing_server, capsys, caplog, response_code, make_client, monkeypatch
):
    client = make_client(send_client_reports=False)

    captured_outcomes = []

    def record_lost_event(reason, data_category=None, item=None):
        if data_category is None:
            data_category = item.data_category
        return captured_outcomes.append((reason, data_category))

    monkeypatch.setattr(client.transport, "record_lost_event", record_lost_event)

    capturing_server.respond_with(
        code=response_code,
        headers={"X-Sentry-Rate-Limits": "4711:transaction:organization"},
    )

    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set(["transaction"])

    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "transaction"})
    client.flush()

    assert not capturing_server.captured

    client.capture_event({"type": "event"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/store/"

    assert captured_outcomes == [
        ("ratelimit_backoff", "transaction"),
        ("ratelimit_backoff", "transaction"),
    ]


@pytest.mark.parametrize("response_code", [200, 429])
def test_data_category_limits_reporting(
    capturing_server, capsys, caplog, response_code, make_client, monkeypatch
):
    client = make_client(send_client_reports=True)

    capturing_server.respond_with(
        code=response_code,
        headers={
            "X-Sentry-Rate-Limits": "4711:transaction:organization, 4711:attachment:organization"
        },
    )

    outcomes_enabled = False
    real_fetch = client.transport._fetch_pending_client_report

    def intercepting_fetch(*args, **kwargs):
        if outcomes_enabled:
            return real_fetch(*args, **kwargs)

    monkeypatch.setattr(
        client.transport, "_fetch_pending_client_report", intercepting_fetch
    )
    # get rid of threading making things hard to track
    monkeypatch.setattr(client.transport._worker, "submit", lambda x: x() or True)

    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set(["attachment", "transaction"])

    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "transaction"})
    capturing_server.clear_captured()

    # flush out the events but don't flush the client reports
    client.flush()
    client.transport._last_client_report_sent = 0
    outcomes_enabled = True

    scope = Scope()
    scope.add_attachment(bytes=b"Hello World", filename="hello.txt")
    client.capture_event({"type": "error"}, scope=scope)
    client.flush()

    # this goes out with an extra envelope because it's flushed after the last item
    # that is normally in the queue.  This is quite funny in a way because it means
    # that the envelope that caused its own over quota report (an error with an
    # attachment) will include its outcome since it's pending.
    assert len(capturing_server.captured) == 1
    envelope = capturing_server.captured[0].envelope
    assert envelope.items[0].type == "event"
    assert envelope.items[1].type == "client_report"
    report = parse_json(envelope.items[1].get_bytes())
    assert sorted(report["discarded_events"], key=lambda x: x["quantity"]) == [
        {"category": "transaction", "reason": "ratelimit_backoff", "quantity": 2},
        {"category": "attachment", "reason": "ratelimit_backoff", "quantity": 11},
    ]
    capturing_server.clear_captured()

    # here we sent a normal event
    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "error", "release": "foo"})
    client.flush()

    assert len(capturing_server.captured) == 2

    event = capturing_server.captured[0].event
    assert event["type"] == "error"
    assert event["release"] == "foo"

    envelope = capturing_server.captured[1].envelope
    assert envelope.items[0].type == "client_report"
    report = parse_json(envelope.items[0].get_bytes())
    assert report["discarded_events"] == [
        {"category": "transaction", "reason": "ratelimit_backoff", "quantity": 1},
    ]


@pytest.mark.parametrize("response_code", [200, 429])
def test_complex_limits_without_data_category(
    capturing_server, capsys, caplog, response_code, make_client
):
    client = make_client()
    capturing_server.respond_with(
        code=response_code,
        headers={"X-Sentry-Rate-Limits": "4711::organization"},
    )

    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set([None])

    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "event"})
    client.flush()

    assert len(capturing_server.captured) == 0
sentry-python-1.39.2/tests/test_utils.py000066400000000000000000000434701454744723200204100ustar00rootroot00000000000000import pytest
import re
import sys

from sentry_sdk.utils import (
    Components,
    Dsn,
    get_default_release,
    get_error_message,
    get_git_revision,
    is_valid_sample_rate,
    logger,
    match_regex_list,
    parse_url,
    parse_version,
    safe_str,
    sanitize_url,
    serialize_frame,
    is_sentry_url,
    _get_installed_modules,
)

import sentry_sdk

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3

try:
    # Python 3
    FileNotFoundError
except NameError:
    # Python 2
    FileNotFoundError = IOError


def _normalize_distribution_name(name):
    # type: (str) -> str
    """Normalize distribution name according to PEP-0503.

    See:
    https://peps.python.org/pep-0503/#normalized-names
    for more details.
    """
    return re.sub(r"[-_.]+", "-", name).lower()


@pytest.mark.parametrize(
    ("url", "expected_result"),
    [
        ("http://localhost:8000", "http://localhost:8000"),
        ("http://example.com", "http://example.com"),
        ("https://example.com", "https://example.com"),
        (
            "example.com?token=abc&sessionid=123&save=true",
            "example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "http://example.com?token=abc&sessionid=123&save=true",
            "http://example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "https://example.com?token=abc&sessionid=123&save=true",
            "https://example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "http://localhost:8000/?token=abc&sessionid=123&save=true",
            "http://localhost:8000/?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "ftp://username:password@ftp.example.com:9876/bla/blub#foo",
            "ftp://[Filtered]:[Filtered]@ftp.example.com:9876/bla/blub#foo",
        ),
        (
            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
            "https://[Filtered]:[Filtered]@example.com/bla/blub?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]#fragment",
        ),
        ("bla/blub/foo", "bla/blub/foo"),
        ("/bla/blub/foo/", "/bla/blub/foo/"),
        (
            "bla/blub/foo?token=abc&sessionid=123&save=true",
            "bla/blub/foo?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
            "/bla/blub/foo/?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
    ],
)
def test_sanitize_url(url, expected_result):
    # sort parts because old Python versions (<3.6) don't preserve order
    sanitized_url = sanitize_url(url)
    parts = sorted(re.split(r"\&|\?|\#", sanitized_url))
    expected_parts = sorted(re.split(r"\&|\?|\#", expected_result))

    assert parts == expected_parts


@pytest.mark.parametrize(
    ("url", "expected_result"),
    [
        (
            "http://localhost:8000",
            Components(
                scheme="http", netloc="localhost:8000", path="", query="", fragment=""
            ),
        ),
        (
            "http://example.com",
            Components(
                scheme="http", netloc="example.com", path="", query="", fragment=""
            ),
        ),
        (
            "https://example.com",
            Components(
                scheme="https", netloc="example.com", path="", query="", fragment=""
            ),
        ),
        (
            "example.com?token=abc&sessionid=123&save=true",
            Components(
                scheme="",
                netloc="",
                path="example.com",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "http://example.com?token=abc&sessionid=123&save=true",
            Components(
                scheme="http",
                netloc="example.com",
                path="",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "https://example.com?token=abc&sessionid=123&save=true",
            Components(
                scheme="https",
                netloc="example.com",
                path="",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "http://localhost:8000/?token=abc&sessionid=123&save=true",
            Components(
                scheme="http",
                netloc="localhost:8000",
                path="/",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "ftp://username:password@ftp.example.com:9876/bla/blub#foo",
            Components(
                scheme="ftp",
                netloc="[Filtered]:[Filtered]@ftp.example.com:9876",
                path="/bla/blub",
                query="",
                fragment="foo",
            ),
        ),
        (
            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
            Components(
                scheme="https",
                netloc="[Filtered]:[Filtered]@example.com",
                path="/bla/blub",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="fragment",
            ),
        ),
        (
            "bla/blub/foo",
            Components(
                scheme="", netloc="", path="bla/blub/foo", query="", fragment=""
            ),
        ),
        (
            "bla/blub/foo?token=abc&sessionid=123&save=true",
            Components(
                scheme="",
                netloc="",
                path="bla/blub/foo",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
            Components(
                scheme="",
                netloc="",
                path="/bla/blub/foo/",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
    ],
)
def test_sanitize_url_and_split(url, expected_result):
    sanitized_url = sanitize_url(url, split=True)
    # sort query because old Python versions (<3.6) don't preserve order
    query = sorted(sanitized_url.query.split("&"))
    expected_query = sorted(expected_result.query.split("&"))

    assert sanitized_url.scheme == expected_result.scheme
    assert sanitized_url.netloc == expected_result.netloc
    assert query == expected_query
    assert sanitized_url.path == expected_result.path
    assert sanitized_url.fragment == expected_result.fragment


@pytest.mark.parametrize(
    ("url", "sanitize", "expected_url", "expected_query", "expected_fragment"),
    [
        # Test with sanitize=True
        (
            "https://example.com",
            True,
            "https://example.com",
            "",
            "",
        ),
        (
            "example.com?token=abc&sessionid=123&save=true",
            True,
            "example.com",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "",
        ),
        (
            "https://example.com?token=abc&sessionid=123&save=true",
            True,
            "https://example.com",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "",
        ),
        (
            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
            True,
            "https://[Filtered]:[Filtered]@example.com/bla/blub",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "fragment",
        ),
        (
            "bla/blub/foo",
            True,
            "bla/blub/foo",
            "",
            "",
        ),
        (
            "/bla/blub/foo/#baz",
            True,
            "/bla/blub/foo/",
            "",
            "baz",
        ),
        (
            "bla/blub/foo?token=abc&sessionid=123&save=true",
            True,
            "bla/blub/foo",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "",
        ),
        (
            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
            True,
            "/bla/blub/foo/",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "",
        ),
        # Test with sanitize=False
        (
            "https://example.com",
            False,
            "https://example.com",
            "",
            "",
        ),
        (
            "example.com?token=abc&sessionid=123&save=true",
            False,
            "example.com",
            "token=abc&sessionid=123&save=true",
            "",
        ),
        (
            "https://example.com?token=abc&sessionid=123&save=true",
            False,
            "https://example.com",
            "token=abc&sessionid=123&save=true",
            "",
        ),
        (
            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
            False,
            "https://[Filtered]:[Filtered]@example.com/bla/blub",
            "token=abc&sessionid=123&save=true",
            "fragment",
        ),
        (
            "bla/blub/foo",
            False,
            "bla/blub/foo",
            "",
            "",
        ),
        (
            "/bla/blub/foo/#baz",
            False,
            "/bla/blub/foo/",
            "",
            "baz",
        ),
        (
            "bla/blub/foo?token=abc&sessionid=123&save=true",
            False,
            "bla/blub/foo",
            "token=abc&sessionid=123&save=true",
            "",
        ),
        (
            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
            False,
            "/bla/blub/foo/",
            "token=abc&sessionid=123&save=true",
            "",
        ),
    ],
)
def test_parse_url(url, sanitize, expected_url, expected_query, expected_fragment):
    assert parse_url(url, sanitize=sanitize).url == expected_url
    assert parse_url(url, sanitize=sanitize).fragment == expected_fragment

    # sort parts because old Python versions (<3.6) don't preserve order
    sanitized_query = parse_url(url, sanitize=sanitize).query
    query_parts = sorted(re.split(r"\&|\?|\#", sanitized_query))
    expected_query_parts = sorted(re.split(r"\&|\?|\#", expected_query))

    assert query_parts == expected_query_parts


@pytest.mark.parametrize(
    "rate",
    [0.0, 0.1231, 1.0, True, False],
)
def test_accepts_valid_sample_rate(rate):
    with mock.patch.object(logger, "warning", mock.Mock()):
        result = is_valid_sample_rate(rate, source="Testing")
        assert logger.warning.called is False
        assert result is True


@pytest.mark.parametrize(
    "rate",
    [
        "dogs are great",  # wrong type
        (0, 1),  # wrong type
        {"Maisey": "Charllie"},  # wrong type
        [True, True],  # wrong type
        {0.2012},  # wrong type
        float("NaN"),  # wrong type
        None,  # wrong type
        -1.121,  # wrong value
        1.231,  # wrong value
    ],
)
def test_warns_on_invalid_sample_rate(rate, StringContaining):  # noqa: N803
    with mock.patch.object(logger, "warning", mock.Mock()):
        result = is_valid_sample_rate(rate, source="Testing")
        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
        assert result is False


@pytest.mark.parametrize(
    "include_source_context",
    [True, False],
)
def test_include_source_context_when_serializing_frame(include_source_context):
    frame = sys._getframe()
    result = serialize_frame(frame, include_source_context=include_source_context)

    assert include_source_context ^ ("pre_context" in result) ^ True
    assert include_source_context ^ ("context_line" in result) ^ True
    assert include_source_context ^ ("post_context" in result) ^ True


@pytest.mark.parametrize(
    "item,regex_list,expected_result",
    [
        ["", [], False],
        [None, [], False],
        ["", None, False],
        [None, None, False],
        ["some-string", [], False],
        ["some-string", None, False],
        ["some-string", ["some-string"], True],
        ["some-string", ["some"], False],
        ["some-string", ["some$"], False],  # same as above
        ["some-string", ["some.*"], True],
        ["some-string", ["Some"], False],  # we do case sensitive matching
        ["some-string", [".*string$"], True],
    ],
)
def test_match_regex_list(item, regex_list, expected_result):
    assert match_regex_list(item, regex_list) == expected_result


@pytest.mark.parametrize(
    "version,expected_result",
    [
        ["3.5.15", (3, 5, 15)],
        ["2.0.9", (2, 0, 9)],
        ["2.0.0", (2, 0, 0)],
        ["0.6.0", (0, 6, 0)],
        ["2.0.0.post1", (2, 0, 0)],
        ["2.0.0rc3", (2, 0, 0)],
        ["2.0.0rc2", (2, 0, 0)],
        ["2.0.0rc1", (2, 0, 0)],
        ["2.0.0b4", (2, 0, 0)],
        ["2.0.0b3", (2, 0, 0)],
        ["2.0.0b2", (2, 0, 0)],
        ["2.0.0b1", (2, 0, 0)],
        ["0.6beta3", (0, 6)],
        ["0.6beta2", (0, 6)],
        ["0.6beta1", (0, 6)],
        ["0.4.2b", (0, 4, 2)],
        ["0.4.2a", (0, 4, 2)],
        ["0.0.1", (0, 0, 1)],
        ["0.0.0", (0, 0, 0)],
        ["1", (1,)],
        ["1.0", (1, 0)],
        ["1.0.0", (1, 0, 0)],
        [" 1.0.0 ", (1, 0, 0)],
        ["  1.0.0   ", (1, 0, 0)],
        ["x1.0.0", None],
        ["1.0.0x", None],
        ["x1.0.0x", None],
    ],
)
def test_parse_version(version, expected_result):
    assert parse_version(version) == expected_result


@pytest.fixture
def mock_hub_with_dsn_netloc():
    """
    Returns a mocked hub with a DSN netloc of "abcd1234.ingest.sentry.io".
    """

    mock_hub = mock.Mock(spec=sentry_sdk.Hub)
    mock_hub.client = mock.Mock(spec=sentry_sdk.Client)
    mock_hub.client.transport = mock.Mock(spec=sentry_sdk.Transport)
    mock_hub.client.transport.parsed_dsn = mock.Mock(spec=Dsn)

    mock_hub.client.transport.parsed_dsn.netloc = "abcd1234.ingest.sentry.io"

    return mock_hub


@pytest.mark.parametrize(
    ["test_url", "is_sentry_url_expected"],
    [
        ["https://asdf@abcd1234.ingest.sentry.io/123456789", True],
        ["https://asdf@abcd1234.ingest.notsentry.io/123456789", False],
    ],
)
def test_is_sentry_url_true(test_url, is_sentry_url_expected, mock_hub_with_dsn_netloc):
    ret_val = is_sentry_url(mock_hub_with_dsn_netloc, test_url)

    assert ret_val == is_sentry_url_expected


def test_is_sentry_url_no_client():
    hub = mock.Mock()
    hub.client = None

    test_url = "https://asdf@abcd1234.ingest.sentry.io/123456789"

    ret_val = is_sentry_url(hub, test_url)

    assert not ret_val


@pytest.mark.parametrize(
    "error,expected_result",
    [
        ["", lambda x: safe_str(x)],
        ["some-string", lambda _: "some-string"],
    ],
)
def test_get_error_message(error, expected_result):
    with pytest.raises(BaseException) as exc_value:
        exc_value.message = error
        raise Exception
    assert get_error_message(exc_value) == expected_result(exc_value)

    with pytest.raises(BaseException) as exc_value:
        exc_value.detail = error
        raise Exception
    assert get_error_message(exc_value) == expected_result(exc_value)


def test_installed_modules():
    try:
        from importlib.metadata import distributions, version

        importlib_available = True
    except ImportError:
        importlib_available = False

    try:
        import pkg_resources

        pkg_resources_available = True
    except ImportError:
        pkg_resources_available = False

    installed_distributions = {
        _normalize_distribution_name(dist): version
        for dist, version in _get_installed_modules().items()
    }

    if importlib_available:
        importlib_distributions = {
            _normalize_distribution_name(dist.metadata["Name"]): version(
                dist.metadata["Name"]
            )
            for dist in distributions()
            if dist.metadata["Name"] is not None
            and version(dist.metadata["Name"]) is not None
        }
        assert installed_distributions == importlib_distributions

    elif pkg_resources_available:
        pkg_resources_distributions = {
            _normalize_distribution_name(dist.key): dist.version
            for dist in pkg_resources.working_set
        }
        assert installed_distributions == pkg_resources_distributions
    else:
        pytest.fail("Neither importlib nor pkg_resources is available")


def test_installed_modules_caching():
    mock_generate_installed_modules = mock.Mock()
    mock_generate_installed_modules.return_value = {"package": "1.0.0"}
    with mock.patch("sentry_sdk.utils._installed_modules", None):
        with mock.patch(
            "sentry_sdk.utils._generate_installed_modules",
            mock_generate_installed_modules,
        ):
            _get_installed_modules()
            assert mock_generate_installed_modules.called
            mock_generate_installed_modules.reset_mock()

            _get_installed_modules()
            mock_generate_installed_modules.assert_not_called()


def test_devnull_inaccessible():
    with mock.patch("sentry_sdk.utils.open", side_effect=OSError("oh no")):
        revision = get_git_revision()

    assert revision is None


def test_devnull_not_found():
    with mock.patch("sentry_sdk.utils.open", side_effect=FileNotFoundError("oh no")):
        revision = get_git_revision()

    assert revision is None


def test_default_release():
    release = get_default_release()
    assert release is not None


def test_default_release_empty_string():
    with mock.patch("sentry_sdk.utils.get_git_revision", return_value=""):
        release = get_default_release()

    assert release is None
sentry-python-1.39.2/tests/tracing/000077500000000000000000000000001454744723200172565ustar00rootroot00000000000000sentry-python-1.39.2/tests/tracing/test_baggage.py000066400000000000000000000052131454744723200222450ustar00rootroot00000000000000# coding: utf-8
from sentry_sdk.tracing_utils import Baggage


def test_third_party_baggage():
    header = "other-vendor-value-1=foo;bar;baz, other-vendor-value-2=foo;bar;"
    baggage = Baggage.from_incoming_header(header)

    assert baggage.mutable
    assert baggage.sentry_items == {}
    assert sorted(baggage.third_party_items.split(",")) == sorted(
        "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",")
    )

    assert baggage.dynamic_sampling_context() == {}
    assert baggage.serialize() == ""
    assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted(
        "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",")
    )


def test_mixed_baggage():
    header = (
        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
        "sentry-user_id=Am%C3%A9lie, sentry-foo=bar, other-vendor-value-2=foo;bar;"
    )

    baggage = Baggage.from_incoming_header(header)

    assert not baggage.mutable

    assert baggage.sentry_items == {
        "public_key": "49d0f7386ad645858ae85020e393bef3",
        "trace_id": "771a43a4192642f0b136d5159a501700",
        "user_id": "Amélie",
        "sample_rate": "0.01337",
        "foo": "bar",
    }

    assert (
        baggage.third_party_items
        == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
    )

    assert baggage.dynamic_sampling_context() == {
        "public_key": "49d0f7386ad645858ae85020e393bef3",
        "trace_id": "771a43a4192642f0b136d5159a501700",
        "user_id": "Amélie",
        "sample_rate": "0.01337",
        "foo": "bar",
    }

    assert sorted(baggage.serialize().split(",")) == sorted(
        (
            "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
            "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,"
            "sentry-foo=bar"
        ).split(",")
    )

    assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted(
        (
            "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
            "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,sentry-foo=bar,"
            "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
        ).split(",")
    )


def test_malformed_baggage():
    header = ","

    baggage = Baggage.from_incoming_header(header)

    assert baggage.sentry_items == {}
    assert baggage.third_party_items == ""
    assert baggage.mutable
sentry-python-1.39.2/tests/tracing/test_decorator_async_py3.py000066400000000000000000000034341454744723200246450ustar00rootroot00000000000000from unittest import mock
import pytest
import sys

from tests.conftest import patch_start_tracing_child

from sentry_sdk.tracing_utils_py3 import (
    start_child_span_decorator as start_child_span_decorator_py3,
)
from sentry_sdk.utils import logger

if sys.version_info < (3, 6):
    pytest.skip("Async decorator only works on Python 3.6+", allow_module_level=True)


async def my_async_example_function():
    return "return_of_async_function"


@pytest.mark.asyncio
async def test_trace_decorator_async_py3():
    with patch_start_tracing_child() as fake_start_child:
        result = await my_async_example_function()
        fake_start_child.assert_not_called()
        assert result == "return_of_async_function"

        result2 = await start_child_span_decorator_py3(my_async_example_function)()
        fake_start_child.assert_called_once_with(
            op="function",
            description="test_decorator_async_py3.my_async_example_function",
        )
        assert result2 == "return_of_async_function"


@pytest.mark.asyncio
async def test_trace_decorator_async_py3_no_trx():
    with patch_start_tracing_child(fake_transaction_is_none=True):
        with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
            result = await my_async_example_function()
            fake_warning.assert_not_called()
            assert result == "return_of_async_function"

            result2 = await start_child_span_decorator_py3(my_async_example_function)()
            fake_warning.assert_called_once_with(
                "Can not create a child span for %s. "
                "Please start a Sentry transaction before calling this function.",
                "test_decorator_async_py3.my_async_example_function",
            )
            assert result2 == "return_of_async_function"
sentry-python-1.39.2/tests/tracing/test_decorator_sync.py000066400000000000000000000032011454744723200237010ustar00rootroot00000000000000from sentry_sdk._compat import PY2

if PY2:
    from sentry_sdk.tracing_utils_py2 import start_child_span_decorator
else:
    from sentry_sdk.tracing_utils_py3 import start_child_span_decorator

from sentry_sdk.utils import logger

from tests.conftest import patch_start_tracing_child

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


def my_example_function():
    return "return_of_sync_function"


def test_trace_decorator():
    with patch_start_tracing_child() as fake_start_child:
        result = my_example_function()
        fake_start_child.assert_not_called()
        assert result == "return_of_sync_function"

        result2 = start_child_span_decorator(my_example_function)()
        fake_start_child.assert_called_once_with(
            op="function", description="test_decorator_sync.my_example_function"
        )
        assert result2 == "return_of_sync_function"


def test_trace_decorator_no_trx():
    with patch_start_tracing_child(fake_transaction_is_none=True):
        with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
            result = my_example_function()
            fake_warning.assert_not_called()
            assert result == "return_of_sync_function"

            result2 = start_child_span_decorator(my_example_function)()
            fake_warning.assert_called_once_with(
                "Can not create a child span for %s. "
                "Please start a Sentry transaction before calling this function.",
                "test_decorator_sync.my_example_function",
            )
            assert result2 == "return_of_sync_function"
sentry-python-1.39.2/tests/tracing/test_deprecated.py000066400000000000000000000010771454744723200227740ustar00rootroot00000000000000from sentry_sdk import start_span

from sentry_sdk.tracing import Span


def test_start_span_to_start_transaction(sentry_init, capture_events):
    # XXX: this only exists for backwards compatibility with code before
    # Transaction / start_transaction were introduced.
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with start_span(transaction="/1/"):
        pass

    with start_span(Span(transaction="/2/")):
        pass

    assert len(events) == 2
    assert events[0]["transaction"] == "/1/"
    assert events[1]["transaction"] == "/2/"
sentry-python-1.39.2/tests/tracing/test_http_headers.py000066400000000000000000000034251454744723200233450ustar00rootroot00000000000000import pytest

from sentry_sdk.tracing import Transaction
from sentry_sdk.tracing_utils import extract_sentrytrace_data


try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


@pytest.mark.parametrize("sampled", [True, False, None])
def test_to_traceparent(sampled):
    transaction = Transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="12312012123120121231201212312012",
        sampled=sampled,
    )

    traceparent = transaction.to_traceparent()

    parts = traceparent.split("-")
    assert parts[0] == "12312012123120121231201212312012"  # trace_id
    assert parts[1] == transaction.span_id  # parent_span_id
    if sampled is None:
        assert len(parts) == 2
    else:
        assert parts[2] == "1" if sampled is True else "0"  # sampled


@pytest.mark.parametrize("sampling_decision", [True, False])
def test_sentrytrace_extraction(sampling_decision):
    sentrytrace_header = "12312012123120121231201212312012-0415201309082013-{}".format(
        1 if sampling_decision is True else 0
    )
    assert extract_sentrytrace_data(sentrytrace_header) == {
        "trace_id": "12312012123120121231201212312012",
        "parent_span_id": "0415201309082013",
        "parent_sampled": sampling_decision,
    }


def test_iter_headers(monkeypatch):
    monkeypatch.setattr(
        Transaction,
        "to_traceparent",
        mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"),
    )

    transaction = Transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
    )

    headers = dict(transaction.iter_headers())
    assert (
        headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0"
    )
sentry-python-1.39.2/tests/tracing/test_integration_tests.py000066400000000000000000000226121454744723200244370ustar00rootroot00000000000000# coding: utf-8
import weakref
import gc
import re
import pytest
import random

from sentry_sdk import (
    capture_message,
    configure_scope,
    Hub,
    start_span,
    start_transaction,
)
from sentry_sdk.transport import Transport
from sentry_sdk.tracing import Transaction


@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
def test_basic(sentry_init, capture_events, sample_rate):
    sentry_init(traces_sample_rate=sample_rate)
    events = capture_events()

    with start_transaction(name="hi") as transaction:
        transaction.set_status("ok")
        with pytest.raises(ZeroDivisionError):
            with start_span(op="foo", description="foodesc"):
                1 / 0

        with start_span(op="bar", description="bardesc"):
            pass

    if sample_rate:
        assert len(events) == 1
        event = events[0]

        assert event["transaction"] == "hi"
        assert event["transaction_info"]["source"] == "custom"

        span1, span2 = event["spans"]
        parent_span = event
        assert span1["tags"]["status"] == "internal_error"
        assert span1["op"] == "foo"
        assert span1["description"] == "foodesc"
        assert "status" not in span2.get("tags", {})
        assert span2["op"] == "bar"
        assert span2["description"] == "bardesc"
        assert parent_span["transaction"] == "hi"
        assert "status" not in event["tags"]
        assert event["contexts"]["trace"]["status"] == "ok"
    else:
        assert not events


@pytest.mark.parametrize("sampled", [True, False, None])
@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_rate):
    """
    Ensure data is actually passed along via headers, and that they are read
    correctly.
    """
    sentry_init(traces_sample_rate=sample_rate)
    envelopes = capture_envelopes()

    # make a parent transaction (normally this would be in a different service)
    with start_transaction(name="hi", sampled=True if sample_rate == 0 else None):
        with start_span() as old_span:
            old_span.sampled = sampled
            headers = dict(Hub.current.iter_trace_propagation_headers(old_span))
            headers["baggage"] = (
                "other-vendor-value-1=foo;bar;baz, "
                "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
                "sentry-public_key=49d0f7386ad645858ae85020e393bef3, "
                "sentry-sample_rate=0.01337, sentry-user_id=Amelie, "
                "other-vendor-value-2=foo;bar;"
            )

    # child transaction, to prove that we can read 'sentry-trace' header data correctly
    child_transaction = Transaction.continue_from_headers(headers, name="WRONG")
    assert child_transaction is not None
    assert child_transaction.parent_sampled == sampled
    assert child_transaction.trace_id == old_span.trace_id
    assert child_transaction.same_process_as_parent is False
    assert child_transaction.parent_span_id == old_span.span_id
    assert child_transaction.span_id != old_span.span_id

    baggage = child_transaction._baggage
    assert baggage
    assert not baggage.mutable
    assert baggage.sentry_items == {
        "public_key": "49d0f7386ad645858ae85020e393bef3",
        "trace_id": "771a43a4192642f0b136d5159a501700",
        "user_id": "Amelie",
        "sample_rate": "0.01337",
    }

    # add child transaction to the scope, to show that the captured message will
    # be tagged with the trace id (since it happens while the transaction is
    # open)
    with start_transaction(child_transaction):
        with configure_scope() as scope:
            # change the transaction name from "WRONG" to make sure the change
            # is reflected in the final data
            scope.transaction = "ho"
        capture_message("hello")

    # in this case the child transaction won't be captured
    if sampled is False or (sample_rate == 0 and sampled is None):
        trace1, message = envelopes
        message_payload = message.get_event()
        trace1_payload = trace1.get_transaction_event()

        assert trace1_payload["transaction"] == "hi"
    else:
        trace1, message, trace2 = envelopes
        trace1_payload = trace1.get_transaction_event()
        message_payload = message.get_event()
        trace2_payload = trace2.get_transaction_event()

        assert trace1_payload["transaction"] == "hi"
        assert trace2_payload["transaction"] == "ho"

        assert (
            trace1_payload["contexts"]["trace"]["trace_id"]
            == trace2_payload["contexts"]["trace"]["trace_id"]
            == child_transaction.trace_id
            == message_payload["contexts"]["trace"]["trace_id"]
        )

        assert trace2.headers["trace"] == baggage.dynamic_sampling_context()
        assert trace2.headers["trace"] == {
            "public_key": "49d0f7386ad645858ae85020e393bef3",
            "trace_id": "771a43a4192642f0b136d5159a501700",
            "user_id": "Amelie",
            "sample_rate": "0.01337",
        }

    assert message_payload["message"] == "hello"


@pytest.mark.parametrize("sample_rate", [0.5, 1.0])
def test_dynamic_sampling_head_sdk_creates_dsc(
    sentry_init, capture_envelopes, sample_rate, monkeypatch
):
    sentry_init(traces_sample_rate=sample_rate, release="foo")
    envelopes = capture_envelopes()

    # make sure transaction is sampled for both cases
    monkeypatch.setattr(random, "random", lambda: 0.1)

    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")

    # will create empty mutable baggage
    baggage = transaction._baggage
    assert baggage
    assert baggage.mutable
    assert baggage.sentry_items == {}
    assert baggage.third_party_items == ""

    with start_transaction(transaction):
        with start_span(op="foo", description="foodesc"):
            pass

    # finish will create a new baggage entry
    baggage = transaction._baggage
    trace_id = transaction.trace_id

    assert baggage
    assert not baggage.mutable
    assert baggage.third_party_items == ""
    assert baggage.sentry_items == {
        "environment": "production",
        "release": "foo",
        "sample_rate": str(sample_rate),
        "sampled": "true" if transaction.sampled else "false",
        "transaction": "Head SDK tx",
        "trace_id": trace_id,
    }

    expected_baggage = (
        "sentry-environment=production,sentry-release=foo,sentry-sample_rate=%s,sentry-transaction=Head%%20SDK%%20tx,sentry-trace_id=%s,sentry-sampled=%s"
        % (sample_rate, trace_id, "true" if transaction.sampled else "false")
    )
    assert sorted(baggage.serialize().split(",")) == sorted(expected_baggage.split(","))

    (envelope,) = envelopes
    assert envelope.headers["trace"] == baggage.dynamic_sampling_context()
    assert envelope.headers["trace"] == {
        "environment": "production",
        "release": "foo",
        "sample_rate": str(sample_rate),
        "sampled": "true" if transaction.sampled else "false",
        "transaction": "Head SDK tx",
        "trace_id": trace_id,
    }


@pytest.mark.parametrize(
    "args,expected_refcount",
    [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)],
)
def test_memory_usage(sentry_init, capture_events, args, expected_refcount):
    sentry_init(**args)

    references = weakref.WeakSet()

    with start_transaction(name="hi"):
        for i in range(100):
            with start_span(op="helloworld", description="hi {}".format(i)) as span:

                def foo():
                    pass

                references.add(foo)
                span.set_tag("foo", foo)
                pass

        del foo
        del span

        # required only for pypy (cpython frees immediately)
        gc.collect()

        assert len(references) == expected_refcount


def test_transactions_do_not_go_through_before_send(sentry_init, capture_events):
    def before_send(event, hint):
        raise RuntimeError("should not be called")

    sentry_init(traces_sample_rate=1.0, before_send=before_send)
    events = capture_events()

    with start_transaction(name="/"):
        pass

    assert len(events) == 1


def test_start_span_after_finish(sentry_init, capture_events):
    class CustomTransport(Transport):
        def capture_envelope(self, envelope):
            pass

        def capture_event(self, event):
            start_span(op="toolate", description="justdont")
            pass

    sentry_init(traces_sample_rate=1, transport=CustomTransport())
    events = capture_events()

    with start_transaction(name="hi"):
        with start_span(op="bar", description="bardesc"):
            pass

    assert len(events) == 1


def test_trace_propagation_meta_head_sdk(sentry_init):
    sentry_init(traces_sample_rate=1.0, release="foo")

    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")
    meta = None
    span = None

    with start_transaction(transaction):
        with start_span(op="foo", description="foodesc") as current_span:
            span = current_span
            meta = Hub.current.trace_propagation_meta()

    ind = meta.find(">") + 1
    sentry_trace, baggage = meta[:ind], meta[ind:]

    assert 'meta name="sentry-trace"' in sentry_trace
    sentry_trace_content = re.findall('content="([^"]*)"', sentry_trace)[0]
    assert sentry_trace_content == span.to_traceparent()

    assert 'meta name="baggage"' in baggage
    baggage_content = re.findall('content="([^"]*)"', baggage)[0]
    assert baggage_content == transaction.get_baggage().serialize()
sentry-python-1.39.2/tests/tracing/test_misc.py000066400000000000000000000307251454744723200216310ustar00rootroot00000000000000import pytest
import gc
import uuid
import os

import sentry_sdk
from sentry_sdk import Hub, start_span, start_transaction, set_measurement, push_scope
from sentry_sdk.consts import MATCH_ALL
from sentry_sdk.tracing import Span, Transaction
from sentry_sdk.tracing_utils import should_propagate_trace
from sentry_sdk.utils import Dsn

try:
    from unittest import mock  # python 3.3 and above
    from unittest.mock import MagicMock
except ImportError:
    import mock  # python < 3.3
    from mock import MagicMock


def test_span_trimming(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3})
    events = capture_events()

    with start_transaction(name="hi"):
        for i in range(10):
            with start_span(op="foo{}".format(i)):
                pass

    (event,) = events

    assert len(event["spans"]) == 3

    span1, span2, span3 = event["spans"]
    assert span1["op"] == "foo0"
    assert span2["op"] == "foo1"
    assert span3["op"] == "foo2"


def test_transaction_naming(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    # only transactions have names - spans don't
    with pytest.raises(TypeError):
        start_span(name="foo")
    assert len(events) == 0

    # default name in event if no name is passed
    with start_transaction() as transaction:
        pass
    assert len(events) == 1
    assert events[0]["transaction"] == ""

    # the name can be set once the transaction's already started
    with start_transaction() as transaction:
        transaction.name = "name-known-after-transaction-started"
    assert len(events) == 2
    assert events[1]["transaction"] == "name-known-after-transaction-started"

    # passing in a name works, too
    with start_transaction(name="a"):
        pass
    assert len(events) == 3
    assert events[2]["transaction"] == "a"


def test_start_transaction(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    # you can have it start a transaction for you
    result1 = start_transaction(
        name="/interactions/other-dogs/new-dog", op="greeting.sniff"
    )
    assert isinstance(result1, Transaction)
    assert result1.name == "/interactions/other-dogs/new-dog"
    assert result1.op == "greeting.sniff"

    # or you can pass it an already-created transaction
    preexisting_transaction = Transaction(
        name="/interactions/other-dogs/new-dog", op="greeting.sniff"
    )
    result2 = start_transaction(preexisting_transaction)
    assert result2 is preexisting_transaction


def test_finds_transaction_on_scope(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    transaction = start_transaction(name="dogpark")

    scope = Hub.current.scope

    # See note in Scope class re: getters and setters of the `transaction`
    # property. For the moment, assigning to scope.transaction merely sets the
    # transaction name, rather than putting the transaction on the scope, so we
    # have to assign to _span directly.
    scope._span = transaction

    # Reading scope.property, however, does what you'd expect, and returns the
    # transaction on the scope.
    assert scope.transaction is not None
    assert isinstance(scope.transaction, Transaction)
    assert scope.transaction.name == "dogpark"

    # If the transaction is also set as the span on the scope, it can be found
    # by accessing _span, too.
    assert scope._span is not None
    assert isinstance(scope._span, Transaction)
    assert scope._span.name == "dogpark"


def test_finds_transaction_when_descendent_span_is_on_scope(
    sentry_init,
):
    sentry_init(traces_sample_rate=1.0)

    transaction = start_transaction(name="dogpark")
    child_span = transaction.start_child(op="sniffing")

    scope = Hub.current.scope
    scope._span = child_span

    # this is the same whether it's the transaction itself or one of its
    # decedents directly attached to the scope
    assert scope.transaction is not None
    assert isinstance(scope.transaction, Transaction)
    assert scope.transaction.name == "dogpark"

    # here we see that it is in fact the span on the scope, rather than the
    # transaction itself
    assert scope._span is not None
    assert isinstance(scope._span, Span)
    assert scope._span.op == "sniffing"


def test_finds_orphan_span_on_scope(sentry_init):
    # this is deprecated behavior which may be removed at some point (along with
    # the start_span function)
    sentry_init(traces_sample_rate=1.0)

    span = start_span(op="sniffing")

    scope = Hub.current.scope
    scope._span = span

    assert scope._span is not None
    assert isinstance(scope._span, Span)
    assert scope._span.op == "sniffing"


def test_finds_non_orphan_span_on_scope(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    transaction = start_transaction(name="dogpark")
    child_span = transaction.start_child(op="sniffing")

    scope = Hub.current.scope
    scope._span = child_span

    assert scope._span is not None
    assert isinstance(scope._span, Span)
    assert scope._span.op == "sniffing"


def test_circular_references(monkeypatch, sentry_init, request):
    # TODO: We discovered while writing this test about transaction/span
    # reference cycles that there's actually also a circular reference in
    # `serializer.py`, between the functions `_serialize_node` and
    # `_serialize_node_impl`, both of which are defined inside of the main
    # `serialize` function, and each of which calls the other one. For now, in
    # order to avoid having those ref cycles give us a false positive here, we
    # can mock out `serialize`. In the long run, though, we should probably fix
    # that. (Whenever we do work on fixing it, it may be useful to add
    #
    #     gc.set_debug(gc.DEBUG_LEAK)
    #     request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK))
    #
    # immediately after the initial collection below, so we can see what new
    # objects the garbage collector has to clean up once `transaction.finish` is
    # called and the serializer runs.)
    monkeypatch.setattr(
        sentry_sdk.client,
        "serialize",
        mock.Mock(
            return_value=None,
        ),
    )

    # In certain versions of python, in some environments (specifically, python
    # 3.4 when run in GH Actions), we run into a `ctypes` bug which creates
    # circular references when `uuid4()` is called, as happens when we're
    # generating event ids. Mocking it with an implementation which doesn't use
    # the `ctypes` function lets us avoid having false positives when garbage
    # collecting. See https://bugs.python.org/issue20519.
    monkeypatch.setattr(
        uuid,
        "uuid4",
        mock.Mock(
            return_value=uuid.UUID(bytes=os.urandom(16)),
        ),
    )

    gc.disable()
    request.addfinalizer(gc.enable)

    sentry_init(traces_sample_rate=1.0)

    # Make sure that we're starting with a clean slate before we start creating
    # transaction/span reference cycles
    gc.collect()

    dogpark_transaction = start_transaction(name="dogpark")
    sniffing_span = dogpark_transaction.start_child(op="sniffing")
    wagging_span = dogpark_transaction.start_child(op="wagging")

    # At some point, you have to stop sniffing - there are balls to chase! - so finish
    # this span while the dogpark transaction is still open
    sniffing_span.finish()

    # The wagging, however, continues long past the dogpark, so that span will
    # NOT finish before the transaction ends. (Doing it in this order proves
    # that both finished and unfinished spans get their cycles broken.)
    dogpark_transaction.finish()

    # Eventually you gotta sleep...
    wagging_span.finish()

    # assuming there are no cycles by this point, these should all be able to go
    # out of scope and get their memory deallocated without the garbage
    # collector having anything to do
    del sniffing_span
    del wagging_span
    del dogpark_transaction

    assert gc.collect() == 0


def test_set_meaurement(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)

    events = capture_events()

    transaction = start_transaction(name="measuring stuff")

    with pytest.raises(TypeError):
        transaction.set_measurement()

    with pytest.raises(TypeError):
        transaction.set_measurement("metric.foo")

    transaction.set_measurement("metric.foo", 123)
    transaction.set_measurement("metric.bar", 456, unit="second")
    transaction.set_measurement("metric.baz", 420.69, unit="custom")
    transaction.set_measurement("metric.foobar", 12, unit="percent")
    transaction.set_measurement("metric.foobar", 17.99, unit="percent")

    transaction.finish()

    (event,) = events
    assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""}
    assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}
    assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"}
    assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"}


def test_set_meaurement_public_api(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)

    events = capture_events()

    with start_transaction(name="measuring stuff"):
        set_measurement("metric.foo", 123)
        set_measurement("metric.bar", 456, unit="second")

    (event,) = events
    assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""}
    assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}


@pytest.mark.parametrize(
    "trace_propagation_targets,url,expected_propagation_decision",
    [
        (None, "http://example.com", False),
        ([], "http://example.com", False),
        ([MATCH_ALL], "http://example.com", True),
        (["localhost"], "localhost:8443/api/users", True),
        (["localhost"], "http://localhost:8443/api/users", True),
        (["localhost"], "mylocalhost:8080/api/users", True),
        ([r"^/api"], "/api/envelopes", True),
        ([r"^/api"], "/backend/api/envelopes", False),
        ([r"myApi.com/v[2-4]"], "myApi.com/v2/projects", True),
        ([r"myApi.com/v[2-4]"], "myApi.com/v1/projects", False),
        ([r"https:\/\/.*"], "https://example.com", True),
        (
            [r"https://.*"],
            "https://example.com",
            True,
        ),  # to show escaping is not needed
        ([r"https://.*"], "http://example.com/insecure/", False),
    ],
)
def test_should_propagate_trace(
    trace_propagation_targets, url, expected_propagation_decision
):
    hub = MagicMock()
    hub.client = MagicMock()

    # This test assumes the urls are not Sentry URLs. Use test_should_propagate_trace_to_sentry for sentry URLs.
    hub.is_sentry_url = lambda _: False

    hub.client.options = {"trace_propagation_targets": trace_propagation_targets}
    hub.client.transport = MagicMock()
    hub.client.transport.parsed_dsn = Dsn("https://bla@xxx.sentry.io/12312012")

    assert should_propagate_trace(hub, url) == expected_propagation_decision


@pytest.mark.parametrize(
    "dsn,url,expected_propagation_decision",
    [
        (
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            "http://example.com",
            True,
        ),
        (
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            False,
        ),
        (
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            "http://squirrelchasers.ingest.sentry.io/12312012",
            False,
        ),
        (
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            "http://ingest.sentry.io/12312012",
            True,
        ),
        (
            "https://abc@localsentry.example.com/12312012",
            "http://localsentry.example.com",
            False,
        ),
    ],
)
def test_should_propagate_trace_to_sentry(
    sentry_init, dsn, url, expected_propagation_decision
):
    sentry_init(
        dsn=dsn,
        traces_sample_rate=1.0,
    )

    Hub.current.client.transport.parsed_dsn = Dsn(dsn)

    assert should_propagate_trace(Hub.current, url) == expected_propagation_decision


def test_start_transaction_updates_scope_name_source(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    with push_scope() as scope:
        with start_transaction(name="foobar", source="route"):
            assert scope._transaction == "foobar"
            assert scope._transaction_info == {"source": "route"}
sentry-python-1.39.2/tests/tracing/test_noop_span.py000066400000000000000000000034001454744723200226600ustar00rootroot00000000000000import sentry_sdk
from sentry_sdk.tracing import NoOpSpan

# This tests make sure, that the examples from the documentation [1]
# are working when OTel (OpenTelementry) instrumentation is turned on
# and therefore the Senntry tracing should not do anything.
#
# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/


def test_noop_start_transaction(sentry_init):
    sentry_init(instrumenter="otel", debug=True)

    with sentry_sdk.start_transaction(
        op="task", name="test_transaction_name"
    ) as transaction:
        assert isinstance(transaction, NoOpSpan)
        assert sentry_sdk.Hub.current.scope.span is transaction

        transaction.name = "new name"


def test_noop_start_span(sentry_init):
    sentry_init(instrumenter="otel", debug=True)

    with sentry_sdk.start_span(op="http", description="GET /") as span:
        assert isinstance(span, NoOpSpan)
        assert sentry_sdk.Hub.current.scope.span is span

        span.set_tag("http.response.status_code", 418)
        span.set_data("http.entity_type", "teapot")


def test_noop_transaction_start_child(sentry_init):
    sentry_init(instrumenter="otel", debug=True)

    transaction = sentry_sdk.start_transaction(name="task")
    assert isinstance(transaction, NoOpSpan)

    with transaction.start_child(op="child_task") as child:
        assert isinstance(child, NoOpSpan)
        assert sentry_sdk.Hub.current.scope.span is child


def test_noop_span_start_child(sentry_init):
    sentry_init(instrumenter="otel", debug=True)
    span = sentry_sdk.start_span(name="task")
    assert isinstance(span, NoOpSpan)

    with span.start_child(op="child_task") as child:
        assert isinstance(child, NoOpSpan)
        assert sentry_sdk.Hub.current.scope.span is child
sentry-python-1.39.2/tests/tracing/test_sampling.py000066400000000000000000000246231454744723200225100ustar00rootroot00000000000000import random

import pytest

from sentry_sdk import Hub, start_span, start_transaction, capture_exception
from sentry_sdk.tracing import Transaction
from sentry_sdk.utils import logger

try:
    from unittest import mock  # python 3.3 and above
except ImportError:
    import mock  # python < 3.3


def test_sampling_decided_only_for_transactions(sentry_init, capture_events):
    sentry_init(traces_sample_rate=0.5)

    with start_transaction(name="hi") as transaction:
        assert transaction.sampled is not None

        with start_span() as span:
            assert span.sampled == transaction.sampled

    with start_span() as span:
        assert span.sampled is None


@pytest.mark.parametrize("sampled", [True, False])
def test_nested_transaction_sampling_override(sentry_init, sampled):
    sentry_init(traces_sample_rate=1.0)

    with start_transaction(name="outer", sampled=sampled) as outer_transaction:
        assert outer_transaction.sampled is sampled
        with start_transaction(
            name="inner", sampled=(not sampled)
        ) as inner_transaction:
            assert inner_transaction.sampled is not sampled
        assert outer_transaction.sampled is sampled


def test_no_double_sampling(sentry_init, capture_events):
    # Transactions should not be subject to the global/error sample rate.
    # Only the traces_sample_rate should apply.
    sentry_init(traces_sample_rate=1.0, sample_rate=0.0)
    events = capture_events()

    with start_transaction(name="/"):
        pass

    assert len(events) == 1


@pytest.mark.parametrize("sampling_decision", [True, False])
def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision(
    sentry_init, sampling_decision
):
    sentry_init(traces_sample_rate=1.0)

    with start_transaction(name="/", sampled=sampling_decision):
        with start_span(op="child-span"):
            with start_span(op="child-child-span"):
                scope = Hub.current.scope
                assert scope.span.op == "child-child-span"
                assert scope.transaction.name == "/"


@pytest.mark.parametrize(
    "traces_sample_rate,expected_decision",
    [(0.0, False), (0.25, False), (0.75, True), (1.00, True)],
)
def test_uses_traces_sample_rate_correctly(
    sentry_init,
    traces_sample_rate,
    expected_decision,
):
    sentry_init(traces_sample_rate=traces_sample_rate)

    with mock.patch.object(random, "random", return_value=0.5):
        transaction = start_transaction(name="dogpark")
        assert transaction.sampled is expected_decision


@pytest.mark.parametrize(
    "traces_sampler_return_value,expected_decision",
    [(0.0, False), (0.25, False), (0.75, True), (1.00, True)],
)
def test_uses_traces_sampler_return_value_correctly(
    sentry_init,
    traces_sampler_return_value,
    expected_decision,
):
    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))

    with mock.patch.object(random, "random", return_value=0.5):
        transaction = start_transaction(name="dogpark")
        assert transaction.sampled is expected_decision


@pytest.mark.parametrize("traces_sampler_return_value", [True, False])
def test_tolerates_traces_sampler_returning_a_boolean(
    sentry_init, traces_sampler_return_value
):
    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))

    transaction = start_transaction(name="dogpark")
    assert transaction.sampled is traces_sampler_return_value


@pytest.mark.parametrize("sampling_decision", [True, False])
def test_only_captures_transaction_when_sampled_is_true(
    sentry_init, sampling_decision, capture_events
):
    sentry_init(traces_sampler=mock.Mock(return_value=sampling_decision))
    events = capture_events()

    transaction = start_transaction(name="dogpark")
    transaction.finish()

    assert len(events) == (1 if sampling_decision else 0)


@pytest.mark.parametrize(
    "traces_sample_rate,traces_sampler_return_value", [(0, True), (1, False)]
)
def test_prefers_traces_sampler_to_traces_sample_rate(
    sentry_init,
    traces_sample_rate,
    traces_sampler_return_value,
):
    # make traces_sample_rate imply the opposite of traces_sampler, to prove
    # that traces_sampler takes precedence
    traces_sampler = mock.Mock(return_value=traces_sampler_return_value)
    sentry_init(
        traces_sample_rate=traces_sample_rate,
        traces_sampler=traces_sampler,
    )

    transaction = start_transaction(name="dogpark")
    assert traces_sampler.called is True
    assert transaction.sampled is traces_sampler_return_value


@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_ignores_inherited_sample_decision_when_traces_sampler_defined(
    sentry_init, parent_sampling_decision
):
    # make traces_sampler pick the opposite of the inherited decision, to prove
    # that traces_sampler takes precedence
    traces_sampler = mock.Mock(return_value=not parent_sampling_decision)
    sentry_init(traces_sampler=traces_sampler)

    transaction = start_transaction(
        name="dogpark", parent_sampled=parent_sampling_decision
    )
    assert transaction.sampled is not parent_sampling_decision


@pytest.mark.parametrize("explicit_decision", [True, False])
def test_traces_sampler_doesnt_overwrite_explicitly_passed_sampling_decision(
    sentry_init, explicit_decision
):
    # make traces_sampler pick the opposite of the explicit decision, to prove
    # that the explicit decision takes precedence
    traces_sampler = mock.Mock(return_value=not explicit_decision)
    sentry_init(traces_sampler=traces_sampler)

    transaction = start_transaction(name="dogpark", sampled=explicit_decision)
    assert transaction.sampled is explicit_decision


@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_inherits_parent_sampling_decision_when_traces_sampler_undefined(
    sentry_init, parent_sampling_decision
):
    # make sure the parent sampling decision is the opposite of what
    # traces_sample_rate would produce, to prove the inheritance takes
    # precedence
    sentry_init(traces_sample_rate=0.5)
    mock_random_value = 0.25 if parent_sampling_decision is False else 0.75

    with mock.patch.object(random, "random", return_value=mock_random_value):
        transaction = start_transaction(
            name="dogpark", parent_sampled=parent_sampling_decision
        )
        assert transaction.sampled is parent_sampling_decision


@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_passes_parent_sampling_decision_in_sampling_context(
    sentry_init, parent_sampling_decision
):
    sentry_init(traces_sample_rate=1.0)

    sentry_trace_header = (
        "12312012123120121231201212312012-1121201211212012-{sampled}".format(
            sampled=int(parent_sampling_decision)
        )
    )

    transaction = Transaction.continue_from_headers(
        headers={"sentry-trace": sentry_trace_header}, name="dogpark"
    )
    spy = mock.Mock(wraps=transaction)
    start_transaction(transaction=spy)

    # there's only one call (so index at 0) and kwargs are always last in a call
    # tuple (so index at -1)
    sampling_context = spy._set_initial_sampling_decision.mock_calls[0][-1][
        "sampling_context"
    ]
    assert "parent_sampled" in sampling_context
    # because we passed in a spy, attribute access requires unwrapping
    assert sampling_context["parent_sampled"]._mock_wraps is parent_sampling_decision


def test_passes_custom_samling_context_from_start_transaction_to_traces_sampler(
    sentry_init, DictionaryContaining  # noqa: N803
):
    traces_sampler = mock.Mock()
    sentry_init(traces_sampler=traces_sampler)

    start_transaction(custom_sampling_context={"dogs": "yes", "cats": "maybe"})

    traces_sampler.assert_any_call(
        DictionaryContaining({"dogs": "yes", "cats": "maybe"})
    )


def test_sample_rate_affects_errors(sentry_init, capture_events):
    sentry_init(sample_rate=0)
    events = capture_events()

    try:
        1 / 0
    except Exception:
        capture_exception()

    assert len(events) == 0


@pytest.mark.parametrize(
    "traces_sampler_return_value",
    [
        "dogs are great",  # wrong type
        (0, 1),  # wrong type
        {"Maisey": "Charllie"},  # wrong type
        [True, True],  # wrong type
        {0.2012},  # wrong type
        float("NaN"),  # wrong type
        None,  # wrong type
        -1.121,  # wrong value
        1.231,  # wrong value
    ],
)
def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value(
    sentry_init, traces_sampler_return_value, StringContaining  # noqa: N803
):
    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))

    with mock.patch.object(logger, "warning", mock.Mock()):
        transaction = start_transaction(name="dogpark")
        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
        assert transaction.sampled is False


@pytest.mark.parametrize(
    "traces_sample_rate,sampled_output,reports_output",
    [
        (None, False, []),
        (0.0, False, [("sample_rate", "transaction")]),
        (1.0, True, []),
    ],
)
def test_records_lost_event_only_if_traces_sample_rate_enabled(
    sentry_init, traces_sample_rate, sampled_output, reports_output, monkeypatch
):
    reports = []

    def record_lost_event(reason, data_category=None, item=None):
        reports.append((reason, data_category))

    sentry_init(traces_sample_rate=traces_sample_rate)

    monkeypatch.setattr(
        Hub.current.client.transport, "record_lost_event", record_lost_event
    )

    transaction = start_transaction(name="dogpark")
    assert transaction.sampled is sampled_output
    transaction.finish()

    assert reports == reports_output


@pytest.mark.parametrize(
    "traces_sampler,sampled_output,reports_output",
    [
        (None, False, []),
        (lambda _x: 0.0, False, [("sample_rate", "transaction")]),
        (lambda _x: 1.0, True, []),
    ],
)
def test_records_lost_event_only_if_traces_sampler_enabled(
    sentry_init, traces_sampler, sampled_output, reports_output, monkeypatch
):
    reports = []

    def record_lost_event(reason, data_category=None, item=None):
        reports.append((reason, data_category))

    sentry_init(traces_sampler=traces_sampler)

    monkeypatch.setattr(
        Hub.current.client.transport, "record_lost_event", record_lost_event
    )

    transaction = start_transaction(name="dogpark")
    assert transaction.sampled is sampled_output
    transaction.finish()

    assert reports == reports_output
sentry-python-1.39.2/tests/utils/000077500000000000000000000000001454744723200167675ustar00rootroot00000000000000sentry-python-1.39.2/tests/utils/__init__.py000066400000000000000000000000501454744723200210730ustar00rootroot00000000000000# Make this a module for test_abs_path.
sentry-python-1.39.2/tests/utils/test_contextvars.py000066400000000000000000000014321454744723200227600ustar00rootroot00000000000000import pytest
import random
import time


@pytest.mark.forked
def test_leaks(maybe_monkeypatched_threading):
    import threading

    # Need to explicitly call _get_contextvars because the SDK has already
    # decided upon gevent on import.

    from sentry_sdk import utils

    _, ContextVar = utils._get_contextvars()  # noqa: N806

    ts = []

    var = ContextVar("test_contextvar_leaks")

    success = []

    def run():
        value = int(random.random() * 1000)
        var.set(value)

        for _ in range(100):
            time.sleep(0)
            assert var.get(None) == value

        success.append(1)

    for _ in range(20):
        t = threading.Thread(target=run)
        t.start()
        ts.append(t)

    for t in ts:
        t.join()

    assert len(success) == 20
sentry-python-1.39.2/tests/utils/test_general.py000066400000000000000000000413131454744723200220170ustar00rootroot00000000000000# coding: utf-8
import sys
import os

import pytest


from sentry_sdk.utils import (
    BadDsn,
    Dsn,
    safe_repr,
    exceptions_from_error_tuple,
    filename_for_module,
    iter_event_stacktraces,
    to_base64,
    from_base64,
    set_in_app_in_frames,
    strip_string,
    AnnotatedValue,
)
from sentry_sdk._compat import text_type, string_types


try:
    from hypothesis import given
    import hypothesis.strategies as st
except ImportError:
    pass
else:
    any_string = st.one_of(st.binary(), st.text())

    @given(x=any_string)
    def test_safe_repr_never_broken_for_strings(x):
        r = safe_repr(x)
        assert isinstance(r, text_type)
        assert "broken repr" not in r


def test_safe_repr_regressions():
    # fmt: off
    assert u"лошадь" in safe_repr(u"лошадь")
    # fmt: on


@pytest.mark.xfail(
    sys.version_info < (3,),
    reason="Fixing this in Python 2 would break other behaviors",
)
# fmt: off
@pytest.mark.parametrize("prefix", ("", "abcd", u"лошадь"))
@pytest.mark.parametrize("character", u"\x00\x07\x1b\n")
# fmt: on
def test_safe_repr_non_printable(prefix, character):
    """Check that non-printable characters are escaped"""
    string = prefix + character
    assert character not in safe_repr(string)
    assert character not in safe_repr(string.encode("utf-8"))


def test_abs_path():
    """Check if abs_path is actually an absolute path. This can happen either
    with eval/exec like here, or when the file in the frame is relative to
    __main__"""

    code = compile("1/0", "test.py", "exec")
    try:
        exec(code, {})
    except Exception:
        exceptions = exceptions_from_error_tuple(sys.exc_info())

    (exception,) = exceptions
    frame1, frame2 = frames = exception["stacktrace"]["frames"]

    for frame in frames:
        assert os.path.abspath(frame["abs_path"]) == frame["abs_path"]

    assert frame1["filename"] == "tests/utils/test_general.py"
    assert frame2["filename"] == "test.py"


def test_filename():
    x = filename_for_module

    assert x("bogus", "bogus") == "bogus"

    assert x("os", os.__file__) == "os.py"

    import sentry_sdk.utils

    assert x("sentry_sdk.utils", sentry_sdk.utils.__file__) == "sentry_sdk/utils.py"


@pytest.mark.parametrize(
    "given,expected_store,expected_envelope",
    [
        (
            "https://foobar@sentry.io/123",
            "https://sentry.io/api/123/store/",
            "https://sentry.io/api/123/envelope/",
        ),
        (
            "https://foobar@sentry.io/bam/123",
            "https://sentry.io/bam/api/123/store/",
            "https://sentry.io/bam/api/123/envelope/",
        ),
        (
            "https://foobar@sentry.io/bam/baz/123",
            "https://sentry.io/bam/baz/api/123/store/",
            "https://sentry.io/bam/baz/api/123/envelope/",
        ),
    ],
)
def test_parse_dsn_paths(given, expected_store, expected_envelope):
    dsn = Dsn(given)
    auth = dsn.to_auth()
    assert auth.store_api_url == expected_store
    assert auth.get_api_url("store") == expected_store
    assert auth.get_api_url("envelope") == expected_envelope


@pytest.mark.parametrize(
    "dsn",
    [
        "https://foobar@sentry.io"
        "https://foobar@sentry.io/"
        "https://foobar@sentry.io/asdf"
        "https://foobar@sentry.io/asdf/"
        "https://foobar@sentry.io/asdf/123/"
    ],
)
def test_parse_invalid_dsn(dsn):
    with pytest.raises(BadDsn):
        dsn = Dsn(dsn)


@pytest.mark.parametrize(
    "frame,in_app_include,in_app_exclude,project_root,resulting_frame",
    [
        [
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            None,
            None,
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            None,
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
            None,
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
        ],
        [
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
            },
            None,
            None,
            None,
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
            },
            None,
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            None,
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            None,
            None,
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        # include
        [
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,  # because there is no module set
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
                "in_app": False,  # because there is no module set
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
                "in_app": True,
            },
        ],
        [
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        # exclude
        [
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            None,
            ["main"],
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            None,
            ["main"],
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
            None,
            ["main"],
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
        ],
        [
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
            },
            None,
            ["main"],
            None,
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
            },
            None,
            ["main"],
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            ["main"],
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            ["main"],
            None,
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
            },
            None,
            None,
            None,
            {
                "module": "fastapi.routing",
            },
        ],
        [
            {
                "module": "fastapi.routing",
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "fastapi.routing",
                "in_app": True,
            },
        ],
        [
            {
                "module": "fastapi.routing",
            },
            None,
            ["fastapi"],
            None,
            {
                "module": "fastapi.routing",
                "in_app": False,
            },
        ],
        # with project_root set
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            None,
            "/home/ubuntu/fastapi",
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
                "in_app": True,
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            ["main"],
            None,
            "/home/ubuntu/fastapi",
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
                "in_app": True,
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            ["main"],
            "/home/ubuntu/fastapi",
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
                "in_app": False,
            },
        ],
    ],
)
def test_set_in_app_in_frames(
    frame, in_app_include, in_app_exclude, project_root, resulting_frame
):
    new_frames = set_in_app_in_frames(
        [frame],
        in_app_include=in_app_include,
        in_app_exclude=in_app_exclude,
        project_root=project_root,
    )

    assert new_frames[0] == resulting_frame


def test_iter_stacktraces():
    assert set(
        iter_event_stacktraces(
            {
                "threads": {"values": [{"stacktrace": 1}]},
                "stacktrace": 2,
                "exception": {"values": [{"stacktrace": 3}]},
            }
        )
    ) == {1, 2, 3}


# fmt: off
@pytest.mark.parametrize(
    ("original", "base64_encoded"),
    [
        # ascii only
        ("Dogs are great!", "RG9ncyBhcmUgZ3JlYXQh"),
        # emoji
        (u"🐶", "8J+Qtg=="),
        # non-ascii
        (
            u"Καλό κορίτσι, Μάιζεϊ!",
            "zprOsc67z4wgzrrOv8+Bzq/PhM+DzrksIM6czqzOuc62zrXPiiE=",
        ),
        # mix of ascii and non-ascii
        (
            u"Of margir hundar! Ég geri ráð fyrir að ég þurfi stærra rúm.",
            "T2YgbWFyZ2lyIGh1bmRhciEgw4lnIGdlcmkgcsOhw7AgZnlyaXIgYcOwIMOpZyDDvnVyZmkgc3TDpnJyYSByw7ptLg==",
        ),
    ],
)
# fmt: on
def test_successful_base64_conversion(original, base64_encoded):
    # all unicode characters should be handled correctly
    assert to_base64(original) == base64_encoded
    assert from_base64(base64_encoded) == original

    # "to" and "from" should be inverses
    assert from_base64(to_base64(original)) == original
    assert to_base64(from_base64(base64_encoded)) == base64_encoded


@pytest.mark.parametrize(
    "input",
    [
        1231,  # incorrect type
        True,  # incorrect type
        [],  # incorrect type
        {},  # incorrect type
        None,  # incorrect type
        "yayfordogs",  # wrong length
        "#dog",  # invalid ascii character
        "🐶",  # non-ascii character
    ],
)
def test_failed_base64_conversion(input):
    # conversion from base64 should fail if given input of the wrong type or
    # input which isn't a valid base64 string
    assert from_base64(input) is None

    # any string can be converted to base64, so only type errors will cause
    # failures
    if type(input) not in string_types:
        assert to_base64(input) is None


def test_strip_string():
    # If value is None returns None.
    assert strip_string(None) is None

    # If max_length is not passed, returns the full text (up to 1024 bytes).
    text_1024_long = "a" * 1024
    assert strip_string(text_1024_long).count("a") == 1024

    # If value exceeds the max_length, returns an AnnotatedValue.
    text_1025_long = "a" * 1025
    stripped_text = strip_string(text_1025_long)
    assert isinstance(stripped_text, AnnotatedValue)
    assert stripped_text.value.count("a") == 1021  # + '...' is 1024

    # If text has unicode characters, it counts bytes and not number of characters.
    # fmt: off
    text_with_unicode_character = u"éê"
    assert strip_string(text_with_unicode_character, max_length=2).value == u"é..."
    # fmt: on
sentry-python-1.39.2/tests/utils/test_transaction.py000066400000000000000000000030031454744723200227210ustar00rootroot00000000000000import sys
from functools import partial

import pytest

from sentry_sdk.utils import transaction_from_function

try:
    from functools import partialmethod
except ImportError:
    pass


class MyClass:
    def myfunc(self):
        pass


def myfunc():
    pass


@partial
def my_partial():
    pass


my_lambda = lambda: None

my_partial_lambda = partial(lambda: None)


def test_transaction_from_function():
    x = transaction_from_function
    assert x(MyClass) == "tests.utils.test_transaction.MyClass"
    assert x(MyClass.myfunc) == "tests.utils.test_transaction.MyClass.myfunc"
    assert x(myfunc) == "tests.utils.test_transaction.myfunc"
    assert x(None) is None
    assert x(42) is None
    assert x(lambda: None).endswith("")
    assert x(my_lambda) == "tests.utils.test_transaction."
    assert (
        x(my_partial) == "partial()"
    )
    assert (
        x(my_partial_lambda)
        == "partial(>)"
    )


@pytest.mark.skipif(sys.version_info < (3, 4), reason="Require python 3.4 or higher")
def test_transaction_from_function_partialmethod():
    x = transaction_from_function

    class MyPartialClass:
        @partialmethod
        def my_partial_method(self):
            pass

    assert (
        x(MyPartialClass.my_partial_method)
        == "partialmethod(.MyPartialClass.my_partial_method>)"
    )
sentry-python-1.39.2/tox.ini000066400000000000000000000472471454744723200160160ustar00rootroot00000000000000# Tox (http://codespeak.net/~hpk/tox/) is a tool for running tests
# in multiple virtualenvs. This configuration file will run the
# test suite on all supported python versions. To use it, "pip install tox"
# and then run "tox" from this directory.

[tox]
envlist =
    # === Common ===
    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common

    # === Integrations ===
    # General format is {pythonversion}-{integrationname}-v{frameworkversion}
    # 1 blank line between different integrations
    # Each framework version should only be mentioned once. I.e:
    #   {py3.7,py3.10}-django-v{3.2}
    #   {py3.10}-django-v{4.0}
    # instead of:
    #   {py3.7}-django-v{3.2}
    #   {py3.7,py3.10}-django-v{3.2,4.0}
    #
    # At a minimum, we should test against at least the lowest
    # and the latest supported version of a framework.

    # AIOHTTP
    {py3.7}-aiohttp-v{3.4}
    {py3.7,py3.9,py3.11}-aiohttp-v{3.8}
    {py3.8,py3.11}-aiohttp-latest

    # Ariadne
    {py3.8,py3.11}-ariadne-v{0.20}
    {py3.8,py3.11,py3.12}-ariadne-latest

    # Arq
    {py3.7,py3.11}-arq-v{0.23}
    {py3.7,py3.11,py3.12}-arq-latest

    # Asgi
    {py3.7,py3.11,py3.12}-asgi

    # asyncpg
    {py3.7,py3.10}-asyncpg-v{0.23}
    {py3.8,py3.11,py3.12}-asyncpg-latest

    # AWS Lambda
    # The aws_lambda tests deploy to the real AWS and have their own
    # matrix of Python versions to run the test lambda function in.
    # see `lambda_runtime` fixture in tests/integrations/aws_lambda.py
    {py3.9}-aws_lambda

    # Beam
    {py3.7}-beam-v{2.12}
    {py3.8,py3.11}-beam-latest

    # Boto3
    {py2.7,py3.6,py3.7}-boto3-v{1.12}
    {py3.7,py3.11,py3.12}-boto3-v{1.21}
    {py3.7,py3.11,py3.12}-boto3-v{1.29}
    {py3.7,py3.11,py3.12}-boto3-latest

    # Bottle
    {py2.7,py3.5,py3.9}-bottle-v{0.12}
    {py3.5,py3.11,py3.12}-bottle-latest

    # Celery
    {py2.7}-celery-v{3}
    {py2.7,py3.5,py3.8}-celery-v{4}
    {py3.6,py3.8}-celery-v{5.0}
    {py3.7,py3.10}-celery-v{5.1,5.2}
    {py3.8,py3.11}-celery-v{5.3}
    {py3.8,py3.11}-celery-latest

    # Chalice
    {py3.6,py3.9}-chalice-v{1.16}
    {py3.7,py3.10}-chalice-latest

    # Clickhouse Driver
    {py3.8,py3.11}-clickhouse_driver-v{0.2.0}
    {py3.8,py3.11,py3.12}-clickhouse_driver-latest

    # Cloud Resource Context
    {py3.6,py3.11,py3.12}-cloud_resource_context

    # Django
    # - Django 1.x
    {py2.7,py3.5}-django-v{1.8}
    {py2.7,py3.5,py3.7}-django-v{1.11}
    # - Django 2.x
    {py3.5,py3.7}-django-v{2.0}
    {py3.5,py3.9}-django-v{2.2}
    # - Django 3.x
    {py3.6,py3.9}-django-v{3.0}
    {py3.6,py3.11}-django-v{3.2}
    # - Django 4.x
    {py3.8,py3.11,py3.12}-django-v{4.0,4.1,4.2}
    # - Django 5.x
    {py3.10,py3.11,py3.12}-django-v{5.0}
    {py3.10,py3.11,py3.12}-django-latest

    # Falcon
    {py2.7,py3.5,py3.7}-falcon-v{1,1.4,2}
    {py3.5,py3.6,py3.11,py3.12}-falcon-v{3}
    {py3.7,py3.11,py3.12}-falcon-latest

    # FastAPI
    {py3.7,py3.10}-fastapi-v{0.79}
    {py3.8,py3.11,py3.12}-fastapi-latest

    # Flask
    {py2.7,py3.5}-flask-v{0,0.11}
    {py2.7,py3.5,py3.8}-flask-v{1}
    {py3.8,py3.11,py3.12}-flask-v{2}
    {py3.10,py3.11,py3.12}-flask-v{3}
    {py3.10,py3.11,py3.12}-flask-latest

    # Gevent
    {py2.7,py3.6,py3.8,py3.10,py3.11}-gevent

    # GCP
    {py3.7}-gcp

    # GQL
    {py3.7,py3.11}-gql-v{3.4}
    {py3.7,py3.11}-gql-latest

    # Graphene
    {py3.7,py3.11}-graphene-v{3.3}
    {py3.7,py3.11,py3.12}-graphene-latest

    # gRPC
    {py3.7,py3.10}-grpc-v{1.21,1.30,1.40}
    {py3.7,py3.11}-grpc-v{1.50}
    {py3.8,py3.11,py3.12}-grpc-latest

    # HTTPX
    {py3.6,py3.9}-httpx-v{0.16,0.18}
    {py3.6,py3.10}-httpx-v{0.20,0.22}
    {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24}
    {py3.9,py3.11,py3.12}-httpx-v{0.25}
    {py3.9,py3.11,py3.12}-httpx-latest

    # Huey
    {py2.7,py3.5,py3.11,py3.12}-huey-v{2.0}
    {py3.5,py3.11,py3.12}-huey-latest

    # Loguru
    {py3.5,py3.11,py3.12}-loguru-v{0.5}
    {py3.5,py3.11,py3.12}-loguru-latest

    # OpenTelemetry (OTel)
    {py3.7,py3.9,py3.11,py3.12}-opentelemetry

    # pure_eval
    {py3.5,py3.11,py3.12}-pure_eval

    # PyMongo (Mongo DB)
    {py2.7,py3.6}-pymongo-v{3.1}
    {py2.7,py3.6,py3.9}-pymongo-v{3.12}
    {py3.6,py3.11}-pymongo-v{4.0}
    {py3.7,py3.11,py3.12}-pymongo-v{4.3,4.6}
    {py3.7,py3.11,py3.12}-pymongo-latest

    # Pyramid
    {py2.7,py3.5,py3.11}-pyramid-v{1.6}
    {py2.7,py3.5,py3.11,py3.12}-pyramid-v{1.10}
    {py3.6,py3.11,py3.12}-pyramid-v{2.0}
    {py3.6,py3.11,py3.12}-pyramid-latest

    # Quart
    {py3.7,py3.11}-quart-v{0.16}
    {py3.8,py3.11,py3.12}-quart-v{0.19}
    {py3.8,py3.11,py3.12}-quart-latest

    # Redis
    {py2.7,py3.7,py3.8}-redis-v{3}
    {py3.7,py3.8,py3.11}-redis-v{4}
    {py3.7,py3.11,py3.12}-redis-v{5}
    {py3.7,py3.11,py3.12}-redis-latest

    # Redis Cluster
    {py2.7,py3.7,py3.8}-rediscluster-v{1,2}
    # no -latest, not developed anymore

    # Requests
    {py2.7,py3.8,py3.11,py3.12}-requests

    # RQ (Redis Queue)
    {py2.7,py3.5,py3.6}-rq-v{0.6}
    {py2.7,py3.5,py3.9}-rq-v{0.13,1.0}
    {py3.5,py3.11}-rq-v{1.5,1.10}
    {py3.7,py3.11,py3.12}-rq-v{1.15}
    {py3.7,py3.11,py3.12}-rq-latest

    # Sanic
    {py3.5,py3.7}-sanic-v{0.8}
    {py3.6,py3.8}-sanic-v{20}
    {py3.7,py3.11}-sanic-v{22}
    {py3.7,py3.11}-sanic-v{23}
    {py3.8,py3.11}-sanic-latest

    # Starlette
    {py3.7,py3.10}-starlette-v{0.19}
    {py3.7,py3.11}-starlette-v{0.20,0.24,0.28}
    {py3.8,py3.11,py3.12}-starlette-v{0.32}
    {py3.8,py3.11,py3.12}-starlette-latest

    # Starlite
    {py3.8,py3.11}-starlite-v{1.48,1.51}
    # 1.51.14 is the last starlite version; the project continues as litestar

    # SQL Alchemy
    {py2.7,py3.7,py3.9}-sqlalchemy-v{1.2,1.4}
    {py3.7,py3.11}-sqlalchemy-v{2.0}
    {py3.7,py3.11,py3.12}-sqlalchemy-latest

    # Strawberry
    {py3.8,py3.11}-strawberry-v{0.209}
    {py3.8,py3.11,py3.12}-strawberry-latest

    # Tornado
    {py3.7,py3.9}-tornado-v{5}
    {py3.8,py3.11,py3.12}-tornado-v{6}
    {py3.8,py3.11,py3.12}-tornado-latest

    # Trytond
    {py3.5,py3.6}-trytond-v{4}
    {py3.6,py3.8}-trytond-v{5}
    {py3.6,py3.11}-trytond-v{6}
    {py3.8,py3.11,py3.12}-trytond-v{7}
    {py3.8,py3.11,py3.12}-trytond-latest

[testenv]
deps =
    # if you change test-requirements.txt and your change is not being reflected
    # in what's installed by tox (when running tox locally), try running tox
    # with the -r flag
    -r test-requirements.txt

    py3.8-common: hypothesis

    linters: -r linter-requirements.txt
    linters: werkzeug<2.3.0

    # Common
    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common: pytest-asyncio<=0.21.1
    # See https://github.com/pytest-dev/pytest/issues/9621
    # and https://github.com/pytest-dev/pytest-forked/issues/67
    # for justification of the upper bound on pytest
    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common: pytest<7.0.0

    # AIOHTTP
    aiohttp-v3.4: aiohttp~=3.4.0
    aiohttp-v3.8: aiohttp~=3.8.0
    aiohttp-latest: aiohttp
    aiohttp: pytest-aiohttp
    aiohttp-v3.8: pytest-asyncio<=0.21.1
    aiohttp-latest: pytest-asyncio<=0.21.1

    # Ariadne
    ariadne-v0.20: ariadne~=0.20.0
    ariadne-latest: ariadne
    ariadne: fastapi
    ariadne: flask
    ariadne: httpx

    # Arq
    arq-v0.23: arq~=0.23.0
    arq-v0.23: pydantic<2
    arq-latest: arq
    arq: fakeredis>=2.2.0,<2.8
    arq: pytest-asyncio<=0.21.1
    arq: async-timeout

    # Asgi
    asgi: pytest-asyncio<=0.21.1
    asgi: async-asgi-testclient

    # Asyncpg
    asyncpg-v0.23: asyncpg~=0.23.0
    asyncpg-latest: asyncpg
    asyncpg: pytest-asyncio<=0.21.1

    # AWS Lambda
    aws_lambda: boto3

    # Beam
    beam-v2.12: apache-beam~=2.12.0
    beam-latest: apache-beam

    # Boto3
    boto3-v1.12: boto3~=1.12.0
    boto3-v1.21: boto3~=1.21.0
    boto3-v1.29: boto3~=1.29.0
    boto3-latest: boto3

    # Bottle
    bottle: Werkzeug<2.1.0
    bottle-v0.12: bottle~=0.12.0
    bottle-latest: bottle

    # Celery
    celery: redis
    celery-v3: Celery~=3.0
    celery-v4: Celery~=4.0
    celery-v5.0: Celery~=5.0.0
    celery-v5.1: Celery~=5.1.0
    celery-v5.2: Celery~=5.2.0
    celery-v5.3: Celery~=5.3.0
    celery-latest: Celery

    {py3.5}-celery: newrelic<6.0.0
    {py3.7}-celery: importlib-metadata<5.0
    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-celery: newrelic

    # Chalice
    chalice-v1.16: chalice~=1.16.0
    chalice-latest: chalice
    chalice: pytest-chalice==0.0.5

    {py3.7}-chalice: botocore~=1.31
    {py3.8}-chalice: botocore~=1.31

    # Clickhouse Driver
    clickhouse_driver-v0.2.0: clickhouse_driver~=0.2.0
    clickhouse_driver-latest: clickhouse_driver

    # Django
    django: psycopg2-binary
    django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
    django-v{2.0,2.2,3.0,3.2,4.0,4.1,4.2,5.0}: channels[daphne]
    django-v{1.8,1.11,2.0,2.2,3.0,3.2}: Werkzeug<2.1.0
    django-v{1.8,1.11,2.0}: pytest-django<4.0
    django-v{2.2,3.0,3.2,4.0,4.1,4.2,5.0}: pytest-django
    django-v{4.0,4.1,4.2,5.0}: djangorestframework
    django-v{4.0,4.1,4.2,5.0}: pytest-asyncio<=0.21.1
    django-v{4.0,4.1,4.2,5.0}: Werkzeug
    django-latest: djangorestframework
    django-latest: pytest-asyncio<=0.21.1
    django-latest: pytest-django
    django-latest: Werkzeug
    django-latest: channels[daphne]

    django-v1.8: Django~=1.8.0
    django-v1.11: Django~=1.11.0
    django-v2.0: Django~=2.0.0
    django-v2.2: Django~=2.2.0
    django-v3.0: Django~=3.0.0
    django-v3.2: Django~=3.2.0
    django-v4.0: Django~=4.0.0
    django-v4.1: Django~=4.1.0
    django-v4.2: Django~=4.2.0
    django-v5.0: Django~=5.0.0
    django-latest: Django

    # Falcon
    falcon-v1.4: falcon~=1.4.0
    falcon-v1: falcon~=1.0
    falcon-v2: falcon~=2.0
    falcon-v3: falcon~=3.0
    falcon-latest: falcon

    # FastAPI
    fastapi: httpx
    fastapi: anyio<4.0.0 # thats a dep of httpx
    fastapi: pytest-asyncio<=0.21.1
    fastapi: python-multipart
    fastapi: requests
    fastapi-v{0.79}: fastapi~=0.79.0
    fastapi-latest: fastapi

    # Flask
    flask: flask-login
    flask-v{0.11,0,1,2.0}: Werkzeug<2.1.0
    flask-v{0.11,0,1,2.0}: markupsafe<2.1.0
    flask-v{3}: Werkzeug
    flask-v0.11: Flask~=0.11.0
    flask-v0: Flask~=0.11
    flask-v1: Flask~=1.0
    flask-v2: Flask~=2.0
    flask-v3: Flask~=3.0
    flask-latest: Flask

    # Gevent
    # See http://www.gevent.org/install.html#older-versions-of-python
    # for justification of the versions pinned below
    py3.5-gevent: gevent==20.9.0
    # See https://stackoverflow.com/questions/51496550/runtime-warning-greenlet-greenlet-size-changed
    # for justification why greenlet is pinned here
    py3.5-gevent: greenlet==0.4.17
    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: gevent>=22.10.0, <22.11.0
    # See https://github.com/pytest-dev/pytest/issues/9621
    # and https://github.com/pytest-dev/pytest-forked/issues/67
    # for justification of the upper bound on pytest
    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest<7.0.0

    # GQL
    gql-v{3.4}: gql[all]~=3.4.0
    gql-latest: gql[all]

    # Graphene
    graphene: blinker
    graphene: fastapi
    graphene: flask
    graphene: httpx
    graphene-v{3.3}: graphene~=3.3.0
    graphene-latest: graphene

    # gRPC
    grpc: protobuf
    grpc: mypy-protobuf
    grpc: types-protobuf
    grpc: pytest-asyncio<=0.21.1
    grpc-v1.21: grpcio-tools~=1.21.0
    grpc-v1.30: grpcio-tools~=1.30.0
    grpc-v1.40: grpcio-tools~=1.40.0
    grpc-v1.50: grpcio-tools~=1.50.0
    grpc-latest: grpcio-tools

    # HTTPX
    httpx-v0.16: pytest-httpx==0.10.0
    httpx-v0.18: pytest-httpx==0.12.0
    httpx-v0.20: pytest-httpx==0.14.0
    httpx-v0.22: pytest-httpx==0.19.0
    httpx-v0.23: pytest-httpx==0.21.0
    httpx-v0.24: pytest-httpx==0.22.0
    httpx-v0.25: pytest-httpx==0.25.0
    httpx: pytest-httpx
    # anyio is a dep of httpx
    httpx: anyio<4.0.0
    httpx-v0.16: httpx~=0.16.0
    httpx-v0.18: httpx~=0.18.0
    httpx-v0.20: httpx~=0.20.0
    httpx-v0.22: httpx~=0.22.0
    httpx-v0.23: httpx~=0.23.0
    httpx-v0.24: httpx~=0.24.0
    httpx-v0.25: httpx~=0.25.0
    httpx-latest: httpx

    # Huey
    huey-v2.0: huey~=2.0.0
    huey-latest: huey

    # Loguru
    loguru-v0.5: loguru~=0.5.0
    loguru-latest: loguru

    # OpenTelemetry (OTel)
    opentelemetry: opentelemetry-distro

    # pure_eval
    pure_eval: pure_eval

    # PyMongo (MongoDB)
    pymongo: mockupdb
    pymongo-v3.1: pymongo~=3.1.0
    pymongo-v3.13: pymongo~=3.13.0
    pymongo-v4.0: pymongo~=4.0.0
    pymongo-v4.3: pymongo~=4.3.0
    pymongo-v4.6: pymongo~=4.6.0
    pymongo-latest: pymongo

    # Pyramid
    pyramid: Werkzeug<2.1.0
    pyramid-v1.6: pyramid~=1.6.0
    pyramid-v1.10: pyramid~=1.10.0
    pyramid-v2.0: pyramid~=2.0.0
    pyramid-latest: pyramid

    # Quart
    quart: quart-auth
    quart: pytest-asyncio<=0.21.1
    quart-v0.16: blinker<1.6
    quart-v0.16: jinja2<3.1.0
    quart-v0.16: Werkzeug<2.1.0
    quart-v0.16: hypercorn<0.15.0
    quart-v0.16: quart~=0.16.0
    quart-v0.19: Werkzeug>=3.0.0
    quart-v0.19: quart~=0.19.0
    quart-latest: quart

    # Redis
    redis: fakeredis!=1.7.4
    {py3.7,py3.8,py3.9,py3.10,py3.11}-redis: pytest-asyncio<=0.21.1
    redis-v3: redis~=3.0
    redis-v4: redis~=4.0
    redis-v5: redis~=5.0
    redis-latest: redis

    # Redis Cluster
    rediscluster-v1: redis-py-cluster~=1.0
    rediscluster-v2: redis-py-cluster~=2.0

    # Requests
    requests: requests>=2.0

    # RQ (Redis Queue)
    # https://github.com/jamesls/fakeredis/issues/245
    rq-v{0.6}: fakeredis<1.0
    rq-v{0.6}: redis<3.2.2
    rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4
    rq-v{1.15}: fakeredis
    rq-latest: fakeredis
    rq-v0.6: rq~=0.6.0
    rq-v0.13: rq~=0.13.0
    rq-v1.0: rq~=1.0.0
    rq-v1.5: rq~=1.5.0
    rq-v1.10: rq~=1.10.0
    rq-v1.15: rq~=1.15.0
    rq-latest: rq

    # Sanic
    sanic: websockets<11.0
    sanic: aiohttp
    sanic-v{22,23}: sanic_testing
    sanic-latest: sanic_testing
    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
    {py3.5}-sanic: ujson<4
    sanic-v0.8: sanic~=0.8.0
    sanic-v20: sanic~=20.0
    sanic-v22: sanic~=22.0
    sanic-v23: sanic~=23.0
    sanic-latest: sanic

    # Starlette
    starlette: pytest-asyncio<=0.21.1
    starlette: python-multipart
    starlette: requests
    starlette: httpx
    starlette: anyio<4.0.0 # thats a dep of httpx
    starlette: jinja2
    starlette-v0.19: starlette~=0.19.0
    starlette-v0.20: starlette~=0.20.0
    starlette-v0.24: starlette~=0.24.0
    starlette-v0.28: starlette~=0.28.0
    starlette-v0.32: starlette~=0.32.0
    starlette-latest: starlette

    # Starlite
    starlite: pytest-asyncio<=0.21.1
    starlite: python-multipart
    starlite: requests
    starlite: cryptography
    starlite: pydantic<2.0.0
    {py3.8,py3.9}-starlite: typing-extensions==4.5.0  # this is used by pydantic, which is used by starlite. When the problem is fixed in here or pydantic, this can be removed
    starlite-v{1.48}: starlite~=1.48.0
    starlite-v{1.51}: starlite~=1.51.0

    # SQLAlchemy
    sqlalchemy-v1.2: sqlalchemy~=1.2.0
    sqlalchemy-v1.4: sqlalchemy~=1.4.0
    sqlalchemy-v2.0: sqlalchemy~=2.0.0
    sqlalchemy-latest: sqlalchemy

    # Strawberry
    strawberry: fastapi
    strawberry: flask
    strawberry: httpx
    strawberry-v0.209: strawberry-graphql[fastapi,flask]~=0.209.0
    strawberry-latest: strawberry-graphql[fastapi,flask]

    # Tornado
    tornado-v5: tornado~=5.0
    tornado-v6: tornado~=6.0
    tornado-latest: tornado

    # Trytond
    trytond-v4: trytond~=4.0
    trytond-v5: trytond~=5.0
    trytond-v6: trytond~=6.0
    trytond-v7: trytond~=7.0
    trytond-latest: trytond

    trytond-v{4}: werkzeug<1.0
    trytond-v{5,6,7}: werkzeug<2.0
    trytond-latest: werkzeug<2.0

setenv =
    PYTHONDONTWRITEBYTECODE=1
    common: TESTPATH=tests
    aiohttp: TESTPATH=tests/integrations/aiohttp
    ariadne: TESTPATH=tests/integrations/ariadne
    arq: TESTPATH=tests/integrations/arq
    asgi: TESTPATH=tests/integrations/asgi
    asyncpg: TESTPATH=tests/integrations/asyncpg
    aws_lambda: TESTPATH=tests/integrations/aws_lambda
    beam: TESTPATH=tests/integrations/beam
    boto3: TESTPATH=tests/integrations/boto3
    bottle: TESTPATH=tests/integrations/bottle
    celery: TESTPATH=tests/integrations/celery
    chalice: TESTPATH=tests/integrations/chalice
    clickhouse_driver: TESTPATH=tests/integrations/clickhouse_driver
    cloud_resource_context: TESTPATH=tests/integrations/cloud_resource_context
    django: TESTPATH=tests/integrations/django
    falcon: TESTPATH=tests/integrations/falcon
    fastapi:  TESTPATH=tests/integrations/fastapi
    flask: TESTPATH=tests/integrations/flask
    # run all tests with gevent
    gevent: TESTPATH=tests
    gcp: TESTPATH=tests/integrations/gcp
    gql: TESTPATH=tests/integrations/gql
    graphene: TESTPATH=tests/integrations/graphene
    httpx: TESTPATH=tests/integrations/httpx
    huey: TESTPATH=tests/integrations/huey
    loguru: TESTPATH=tests/integrations/loguru
    opentelemetry: TESTPATH=tests/integrations/opentelemetry
    pure_eval: TESTPATH=tests/integrations/pure_eval
    pymongo: TESTPATH=tests/integrations/pymongo
    pyramid: TESTPATH=tests/integrations/pyramid
    quart: TESTPATH=tests/integrations/quart
    redis: TESTPATH=tests/integrations/redis
    rediscluster: TESTPATH=tests/integrations/rediscluster
    requests: TESTPATH=tests/integrations/requests
    rq: TESTPATH=tests/integrations/rq
    sanic: TESTPATH=tests/integrations/sanic
    starlette: TESTPATH=tests/integrations/starlette
    starlite: TESTPATH=tests/integrations/starlite
    sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
    strawberry: TESTPATH=tests/integrations/strawberry
    tornado: TESTPATH=tests/integrations/tornado
    trytond: TESTPATH=tests/integrations/trytond
    socket: TESTPATH=tests/integrations/socket
    grpc: TESTPATH=tests/integrations/grpc

    COVERAGE_FILE=.coverage-{envname}
passenv =
    SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID
    SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY
    SENTRY_PYTHON_TEST_POSTGRES_USER
    SENTRY_PYTHON_TEST_POSTGRES_PASSWORD
    SENTRY_PYTHON_TEST_POSTGRES_NAME
    SENTRY_PYTHON_TEST_POSTGRES_HOST
usedevelop = True
extras =
    bottle: bottle
    falcon: falcon
    flask: flask
    pymongo: pymongo

basepython =
    py2.7: python2.7
    py3.5: python3.5
    py3.6: python3.6
    py3.7: python3.7
    py3.8: python3.8
    py3.9: python3.9
    py3.10: python3.10
    py3.11: python3.11
    py3.12: python3.12

    # Python version is pinned here because flake8 actually behaves differently
    # depending on which version is used. You can patch this out to point to
    # some random Python 3 binary, but then you get guaranteed mismatches with
    # CI. Other tools such as mypy and black have options that pin the Python
    # version.
    linters: python3.12

commands =
    {py3.7,py3.8}-boto3: pip install urllib3<2.0.0

    ; https://github.com/pytest-dev/pytest/issues/5532
    {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5
    {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2
    ; https://github.com/pallets/flask/issues/4455
    {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
    ; https://github.com/more-itertools/more-itertools/issues/578
    py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0

    ; use old pytest for old Python versions:
    {py2.7,py3.5}: pip install pytest-forked==1.1.3

    ; Running `py.test` as an executable suffers from an import error
    ; when loading tests in scenarios. In particular, django fails to
    ; load the settings from the test module.
    {py2.7}: python -m pytest --ignore-glob='*py3.py' -rsx -s --durations=5 -vvv {env:TESTPATH} {posargs}
    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}: python -m pytest -rsx -s --durations=5 -vvv {env:TESTPATH} {posargs}

[testenv:linters]
commands =
    flake8 tests sentry_sdk
    black --check tests sentry_sdk
    mypy sentry_sdk