pax_global_header 0000666 0000000 0000000 00000000064 14125057761 0014522 g ustar 00root root 0000000 0000000 52 comment=ddeff802436123865082462e203d604aabac0380
sentry-python-1.4.3/ 0000775 0000000 0000000 00000000000 14125057761 0014372 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/.craft.yml 0000664 0000000 0000000 00000001541 14125057761 0016273 0 ustar 00root root 0000000 0000000 minVersion: 0.23.1
targets:
- name: pypi
includeNames: /^sentry[_\-]sdk.*$/
- name: github
- name: gh-pages
- name: registry
sdks:
pypi:sentry-sdk:
- name: aws-lambda-layer
includeNames: /^sentry-python-serverless-\d+(\.\d+)*\.zip$/
layerName: SentryPythonServerlessSDK
compatibleRuntimes:
- name: python
versions:
# The number of versions must be, at most, the maximum number of
# runtimes AWS Lambda permits for a layer.
# On the other hand, AWS Lambda does not support every Python runtime.
# The supported runtimes are available in the following link:
# https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html
- python2.7
- python3.6
- python3.7
- python3.8
license: MIT
changelog: CHANGELOG.md
changelogPolicy: simple
sentry-python-1.4.3/.flake8 0000664 0000000 0000000 00000001632 14125057761 0015547 0 ustar 00root root 0000000 0000000 [flake8]
ignore =
E203, // Handled by black (Whitespace before ':' -- handled by black)
E266, // Handled by black (Too many leading '#' for block comment)
E501, // Handled by black (Line too long)
W503, // Handled by black (Line break occured before a binary operator)
E402, // Sometimes not possible due to execution order (Module level import is not at top of file)
E731, // I don't care (Do not assign a lambda expression, use a def)
B950, // Handled by black (Line too long by flake8-bugbear)
B011, // I don't care (Do not call assert False)
B014, // does not apply to Python 2 (redundant exception types by flake8-bugbear)
N812, // I don't care (Lowercase imported as non-lowercase by pep8-naming)
N804 // is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls)
max-line-length = 80
select = N,B,C,E,F,W,T4,B9
exclude=checkouts,lol*,.tox
sentry-python-1.4.3/.github/ 0000775 0000000 0000000 00000000000 14125057761 0015732 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/.github/dependabot.yml 0000664 0000000 0000000 00000001542 14125057761 0020564 0 ustar 00root root 0000000 0000000 version: 2
updates:
- package-ecosystem: pip
directory: "/"
schedule:
interval: weekly
open-pull-requests-limit: 10
allow:
- dependency-type: direct
- dependency-type: indirect
ignore:
- dependency-name: pytest
versions:
- "> 3.7.3"
- dependency-name: pytest-cov
versions:
- "> 2.8.1"
- dependency-name: pytest-forked
versions:
- "> 1.1.3"
- dependency-name: sphinx
versions:
- ">= 2.4.a, < 2.5"
- dependency-name: tox
versions:
- "> 3.7.0"
- dependency-name: werkzeug
versions:
- "> 0.15.5, < 1"
- dependency-name: werkzeug
versions:
- ">= 1.0.a, < 1.1"
- dependency-name: mypy
versions:
- "0.800"
- dependency-name: sphinx
versions:
- 3.4.3
- package-ecosystem: gitsubmodule
directory: "/"
schedule:
interval: weekly
open-pull-requests-limit: 10
sentry-python-1.4.3/.github/workflows/ 0000775 0000000 0000000 00000000000 14125057761 0017767 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/.github/workflows/black.yml 0000664 0000000 0000000 00000001246 14125057761 0021571 0 ustar 00root root 0000000 0000000 name: black
on: push
jobs:
format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install Black
run: pip install -r linter-requirements.txt
- name: Run Black
run: black tests examples sentry_sdk
- name: Commit changes
run: |
if git diff-files --quiet; then
echo "No changes"
exit 0
fi
git config --global user.name 'sentry-bot'
git config --global user.email 'markus+ghbot@sentry.io'
git commit -am "fix: Formatting"
git push
sentry-python-1.4.3/.github/workflows/ci.yml 0000664 0000000 0000000 00000007320 14125057761 0021107 0 ustar 00root root 0000000 0000000 name: ci
on:
push:
branches:
- master
- release/**
pull_request:
jobs:
dist:
name: distribution packages
timeout-minutes: 10
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- uses: actions/setup-python@v2
with:
python-version: 3.9
- run: |
pip install virtualenv
make aws-lambda-layer-build
- uses: actions/upload-artifact@v2
with:
name: ${{ github.sha }}
path: |
dist/*
dist-serverless/*
docs:
timeout-minutes: 10
name: build documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- uses: actions/setup-python@v2
with:
python-version: 3.9
- run: |
pip install virtualenv
make apidocs
cd docs/_build && zip -r gh-pages ./
- uses: actions/upload-artifact@v2
with:
name: ${{ github.sha }}
path: docs/_build/gh-pages.zip
lint:
timeout-minutes: 10
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
- run: |
pip install tox
tox -e linters
test:
continue-on-error: true
timeout-minutes: 45
runs-on: ${{ matrix.linux-version }}
strategy:
matrix:
linux-version: [ubuntu-latest]
python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9"]
include:
# GHA doesn't host the combo of python 3.4 and ubuntu-latest (which is
# currently 20.04), so run just that one under 18.04. (See
# https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
# for a listing of supported python/os combos.)
- linux-version: ubuntu-18.04
python-version: "3.4"
services:
# Label used to access the service container
redis:
# Docker Hub image
image: redis
# Set health checks to wait until redis has started
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps port 6379 on service container to the host
- 6379:6379
postgres:
image: postgres
env:
POSTGRES_PASSWORD: sentry
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
# Maps tcp port 5432 on service container to the host
ports:
- 5432:5432
env:
SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: setup
env:
PGHOST: localhost
PGPASSWORD: sentry
run: |
psql -c 'create database travis_ci_test;' -U postgres
psql -c 'create database test_travis_ci_test;' -U postgres
pip install codecov tox
- name: run tests
env:
CI_PYTHON_VERSION: ${{ matrix.python-version }}
timeout-minutes: 45
run: |
coverage erase
./scripts/runtox.sh '' --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
coverage combine .coverage*
coverage xml -i
codecov --file coverage.xml
sentry-python-1.4.3/.github/workflows/codeql-analysis.yml 0000664 0000000 0000000 00000004471 14125057761 0023610 0 ustar 00root root 0000000 0000000 # For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '18 18 * * 3'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# ℹ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1
sentry-python-1.4.3/.github/workflows/release.yml 0000664 0000000 0000000 00000001327 14125057761 0022135 0 ustar 00root root 0000000 0000000 name: Release
on:
workflow_dispatch:
inputs:
version:
description: Version to release
required: true
force:
description: Force a release even when there are release-blockers (optional)
required: false
jobs:
release:
runs-on: ubuntu-latest
name: "Release a new version"
steps:
- uses: actions/checkout@v2
with:
token: ${{ secrets.GH_RELEASE_PAT }}
fetch-depth: 0
- name: Prepare release
uses: getsentry/action-prepare-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GH_RELEASE_PAT }}
with:
version: ${{ github.event.inputs.version }}
force: ${{ github.event.inputs.force }}
sentry-python-1.4.3/.gitignore 0000664 0000000 0000000 00000000346 14125057761 0016365 0 ustar 00root root 0000000 0000000 *.pyc
*.log
*.egg
*.db
*.pid
.python-version
.coverage*
.DS_Store
.tox
pip-log.txt
*.egg-info
/build
/dist
/dist-serverless
.cache
.idea
.eggs
venv
.venv
.vscode/tags
.pytest_cache
.hypothesis
relay
pip-wheel-metadata
.mypy_cache
sentry-python-1.4.3/.gitmodules 0000664 0000000 0000000 00000000174 14125057761 0016551 0 ustar 00root root 0000000 0000000 [submodule "checkouts/data-schemas"]
path = checkouts/data-schemas
url = https://github.com/getsentry/sentry-data-schemas
sentry-python-1.4.3/.vscode/ 0000775 0000000 0000000 00000000000 14125057761 0015733 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/.vscode/settings.json 0000664 0000000 0000000 00000000132 14125057761 0020462 0 ustar 00root root 0000000 0000000 {
"python.pythonPath": ".venv/bin/python",
"python.formatting.provider": "black"
} sentry-python-1.4.3/CHANGELOG.md 0000664 0000000 0000000 00000065773 14125057761 0016225 0 ustar 00root root 0000000 0000000 # Changelog and versioning
## Versioning Policy
This project follows [semver](https://semver.org/), with three additions:
- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice.
- All undocumented APIs are considered internal. They are not part of this contract.
- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation.
We recommend to pin your version requirements against `1.x.*` or `1.x.y`.
Either one of the following is fine:
```
sentry-sdk>=0.10.0,<0.11.0
sentry-sdk==0.10.1
```
A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker.
## 1.4.3
- Turned client reports on by default.
## 1.4.2
- Made envelope modifications in the HTTP transport non observable #1206
## 1.4.1
- Fix race condition between `finish` and `start_child` in tracing #1203
## 1.4.0
- No longer set the last event id for transactions #1186
- Added support for client reports (disabled by default for now) #1181
- Added `tracestate` header handling #1179
- Added real ip detection to asgi integration #1199
## 1.3.1
- Fix detection of contextvars compatibility with Gevent versions >=20.9.0 #1157
## 1.3.0
- Add support for Sanic versions 20 and 21 #1146
## 1.2.0
- Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139
- Fix for worker to set deamon attribute instead of deprecated setDaemon method #1093
- Fix for `bottle` Integration that discards `-dev` for version extraction #1085
- Fix for transport that adds a unified hook for capturing metrics about dropped events #1100
- Add `Httpx` Integration #1119
- Add support for china domains in `AWSLambda` Integration #1051
## 1.1.0
- Fix for `AWSLambda` integration returns value of original handler #1106
- Fix for `RQ` integration that only captures exception if RQ job has failed and ignore retries #1076
- Feature that supports Tracing for the `Tornado` integration #1060
- Feature that supports wild cards in `ignore_logger` in the `Logging` Integration #1053
- Fix for django that deals with template span description names that are either lists or tuples #1054
## 1.0.0
This release contains a breaking change
- **BREAKING CHANGE**: Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed explicitly setting experimental `session_mode` in favor of auto detecting its value, hence enabling release health by default #994
- Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path`
- Fix for tracing by getting HTTP headers from span rather than transaction when possible #1035
- Fix for Flask transactions missing request body in non errored transactions #1034
- Fix for honoring the `X-Forwarded-For` header #1037
- Fix for worker that logs data dropping of events with level error #1032
## 0.20.3
- Added scripts to support auto instrumentation of no code AWS lambda Python functions
## 0.20.2
- Fix incorrect regex in craft to include wheel file in pypi release
## 0.20.1
- Fix for error that occurs with Async Middlewares when the middleware is a function rather than a class
## 0.20.0
- Fix for header extraction for AWS lambda/API extraction
- Fix multiple **kwargs type hints # 967
- Fix that corrects AWS lambda integration failure to detect the aws-lambda-ric 1.0 bootstrap #976
- Fix AWSLambda integration: variable "timeout_thread" referenced before assignment #977
- Use full git sha as release name #960
- **BREAKING CHANGE**: The default environment is now production, not based on release
- Django integration now creates transaction spans for template rendering
- Fix headers not parsed correctly in ASGI middleware, Decode headers before creating transaction #984
- Restored ability to have tracing disabled #991
- Fix Django async views not behaving asynchronously
- Performance improvement: supported pre-aggregated sessions
## 0.19.5
- Fix two regressions added in 0.19.2 with regard to sampling behavior when reading the sampling decision from headers.
- Increase internal transport queue size and make it configurable.
## 0.19.4
- Fix a bug that would make applications crash if an old version of `boto3` was installed.
## 0.19.3
- Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, Flask, GCP, Pyramid, Tryton, RQ, and WSGI integrations
- Fix a bug where the AWS integration would crash if event was anything besides a dictionary
- Fix the Django integrations's ASGI handler for Channels 3.0. Thanks Luke Pomfrey!
## 0.19.2
- Add `traces_sampler` option.
- The SDK now attempts to infer a default release from various environment variables and the current git repo.
- Fix a crash with async views in Django 3.1.
- Fix a bug where complex URL patterns in Django would create malformed transaction names.
- Add options for transaction styling in AIOHTTP.
- Add basic attachment support (documentation tbd).
- fix a crash in the `pure_eval` integration.
- Integration for creating spans from `boto3`.
## 0.19.1
- Fix dependency check for `blinker` fixes #858
- Fix incorrect timeout warnings in AWS Lambda and GCP integrations #854
## 0.19.0
- Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default.
## 0.18.0
- **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez!
- Added Performance/Tracing support for AWS and GCP functions.
- Fix an issue with Django instrumentation where the SDK modified `resolver_match.callback` and broke user code.
## 0.17.8
- Fix yet another bug with disjoint traces in Celery.
- Added support for Chalice 1.20. Thanks again to the folks at Cuenca MX!
## 0.17.7
- Internal: Change data category for transaction envelopes.
- Fix a bug under Celery 4.2+ that may have caused disjoint traces or missing transactions.
## 0.17.6
- Support for Flask 0.10 (only relaxing version check)
## 0.17.5
- Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation.
- Add possibility to wrap ASGI application twice in middleware to enable split up of request scope data and exception catching.
## 0.17.4
- New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX!
## 0.17.3
- Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming.
## 0.17.2
- Fix timezone bugs in GCP integration.
## 0.17.1
- Fix timezone bugs in AWS Lambda integration.
- Fix crash on GCP integration because of missing parameter `timeout_warning`.
## 0.17.0
- Fix a bug where class-based callables used as Django views (without using Django's regular class-based views) would not have `csrf_exempt` applied.
- New integration for Google Cloud Functions.
- Fix a bug where a recently released version of `urllib3` would cause the SDK to enter an infinite loop on networking and SSL errors.
- **Breaking change**: Remove the `traceparent_v2` option. The option has been ignored since 0.16.3, just remove it from your code.
## 0.16.5
- Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute.
## 0.16.4
- Add experiment to avoid trunchating span descriptions. Initialize with `init(_experiments={"smart_transaction_trimming": True})`.
- Add a span around the Django view in transactions to distinguish its operations from middleware operations.
## 0.16.3
- Fix AWS Lambda support for Python 3.8.
- The AWS Lambda integration now captures initialization/import errors for Python 3.
- The AWS Lambda integration now supports an option to warn about functions likely to time out.
- Testing for RQ 1.5
- Flip default of `traceparent_v2`. This change should have zero impact. The flag will be removed in 0.17.
- Fix compatibility bug with Django 3.1.
## 0.16.2
- New (optional) integrations for richer stacktraces: `pure_eval` for additional variables, `executing` for better function names.
## 0.16.1
- Flask integration: Fix a bug that prevented custom tags from being attached to transactions.
## 0.16.0
- Redis integration: add tags for more commands
- Redis integration: Patch rediscluster package if installed.
- Session tracking: A session is no longer considered crashed if there has been a fatal log message (only unhandled exceptions count).
- **Breaking change**: Revamping of the tracing API.
- **Breaking change**: `before_send` is no longer called for transactions.
## 0.15.1
- Fix fatal crash in Pyramid integration on 404.
## 0.15.0
- **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations.
- Contextvars are now used in more circumstances following a bugfix release of `gevent`. This will fix a few instances of wrong request data being attached to events while using an asyncio-based web framework.
- APM: Fix a bug in the SQLAlchemy integration where a span was left open if the database transaction had to be rolled back. This could have led to deeply nested span trees under that db query span.
- Fix a bug in the Pyramid integration where the transaction name could not be overridden at all.
- Fix a broken type annotation on `capture_exception`.
- Basic support for Django 3.1. More work is required for async middlewares to be instrumented properly for APM.
## 0.14.4
- Fix bugs in transport rate limit enforcement for specific data categories. The bug should not have affected anybody because we do not yet emit rate limits for specific event types/data categories.
- Fix a bug in `capture_event` where it would crash if given additional kwargs. Thanks to Tatiana Vasilevskaya!
- Fix a bug where contextvars from the request handler were inaccessible in AIOHTTP error handlers.
- Fix a bug where the Celery integration would crash if newrelic instrumented Celery as well.
## 0.14.3
- Attempt to use a monotonic clock to measure span durations in Performance/APM.
- Avoid overwriting explicitly set user data in web framework integrations.
- Allow to pass keyword arguments to `capture_event` instead of configuring the scope.
- Feature development for session tracking.
## 0.14.2
- Fix a crash in Django Channels instrumentation when SDK is reinitialized.
- More contextual data for AWS Lambda (cloudwatch logs link).
## 0.14.1
- Fix a crash in the Django integration when used in combination with Django Rest Framework's test utilities for request.
- Fix high memory consumption when sending a lot of errors in the same process. Particularly noticeable in async environments.
## 0.14.0
- Show ASGI request data in Django 3.0
- New integration for the Trytond ERP framework. Thanks n1ngu!
## 0.13.5
- Fix trace continuation bugs in APM.
- No longer report `asyncio.CancelledError` as part of AIOHTTP integration.
## 0.13.4
- Fix package classifiers to mark this package as supporting Python 3.8. The SDK supported 3.8 before though.
- Update schema sent for transaction events (transaction status).
- Fix a bug where `None` inside request data was skipped/omitted.
## 0.13.3
- Fix an issue with the ASGI middleware that would cause Uvicorn to infer the wrong ASGI versions and call the wrapped application with the wrong argument count.
- Do not ignore the `tornado.application` logger.
- The Redis integration now instruments Redis blaster for breadcrumbs and transaction spans.
## 0.13.2
- Fix a bug in APM that would cause wrong durations to be displayed on non-UTC servers.
## 0.13.1
- Add new global functions for setting scope/context data.
- Fix a bug that would make Django 1.11+ apps crash when using function-based middleware.
## 0.13.0
- Remove an old deprecation warning (behavior itself already changed since a long time).
- The AIOHTTP integration now attaches the request body to crash reports. Thanks to Vitali Rebkavets!
- Add an experimental PySpark integration.
- First release to be tested under Python 3.8. No code changes were necessary though, so previous releases also might have worked.
## 0.12.3
- Various performance improvements to event sending.
- Avoid crashes when scope or hub is racy.
- Revert a change that broke applications using gevent and channels (in the same virtualenv, but different processes).
- Fix a bug that made the SDK crash on unicode in SQL.
## 0.12.2
- Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets.
## 0.12.1
- Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues.
## 0.12.0
- Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions.
- Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time.
- APM: Add spans for more methods on `subprocess.Popen` objects.
- APM: Add spans for Django middlewares.
- APM: Add spans for ASGI requests.
- Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.**
## 0.11.2
- Fix a bug where the SDK would throw an exception on shutdown when running under eventlet.
- Add missing data to Redis breadcrumbs.
## 0.11.1
- Remove a faulty assertion (observed in environment with Django Channels and ASGI).
## 0.11.0
- Fix type hints for the logging integration. Thanks Steven Dignam!
- Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita!
- Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li!
- Fix a series of bugs in the stdlib integration that broke usage of `subprocess`.
- More instrumentation for APM.
- New integration for SQLAlchemy (creates breadcrumbs from queries).
- New (experimental) integration for Apache Beam.
- Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone.
- The `AiohttpIntegration` now sets the event's transaction name.
- Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events.
## 0.10.2
- Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash.
- Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels.
- Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration.
- Fix a memory leak in the new tracing feature when it is not enabled.
## 0.10.1
- Fix bug where the SDK would yield a deprecation warning about `collections.abc` vs `collections`.
- Fix bug in stdlib integration that would cause spawned subprocesses to not inherit the environment variables from the parent process.
## 0.10.0
- Massive refactor in preparation to tracing. There are no intentional breaking changes, but there is a risk of breakage (hence the minor version bump). Two new client options `traces_sample_rate` and `traceparent_v2` have been added. Do not change the defaults in production, they will bring your application down or at least fill your Sentry project up with nonsense events.
## 0.9.5
- Do not use `getargspec` on Python 3 to evade deprecation warning.
## 0.9.4
- Revert a change in 0.9.3 that prevented passing a `unicode` string as DSN to `init()`.
## 0.9.3
- Add type hints for `init()`.
- Include user agent header when sending events.
## 0.9.2
- Fix a bug in the Django integration that would prevent the user from initializing the SDK at the top of `settings.py`.
This bug was introduced in 0.9.1 for all Django versions, but has been there for much longer for Django 1.6 in particular.
## 0.9.1
- Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to leak event data between requests.
- Fix a bug where the GNU backtrace integration would not parse certain frames.
- Fix a bug where the SDK would not pick up request bodies for Django Rest Framework based apps.
- Remove a few more headers containing sensitive data per default.
- Various improvements to type hints. Thanks Ran Benita!
- Add a event hint to access the log record from `before_send`.
- Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican!
- Fix distribution information for mypy support (add `py.typed` file). Thanks Ran Benita!
## 0.9.0
- The SDK now captures `SystemExit` and other `BaseException`s when coming from within a WSGI app (Flask, Django, ...)
- Pyramid: No longer report an exception if there exists an exception view for it.
## 0.8.1
- Fix infinite recursion bug in Celery integration.
## 0.8.0
- Add the always_run option in excepthook integration.
- Fix performance issues when attaching large data to events. This is not really intended to be a breaking change, but this release does include a rewrite of a larger chunk of code, therefore the minor version bump.
## 0.7.14
- Fix crash when using Celery integration (`TypeError` when using `apply_async`).
## 0.7.13
- Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry.
- Add experimental support for tracing PoC.
## 0.7.12
- Read from `X-Real-IP` for user IP address.
- Fix a bug that would not apply in-app rules for attached callstacks.
- It's now possible to disable automatic proxy support by passing `http_proxy=""`. Thanks Marco Neumann!
## 0.7.11
- Fix a bug that would send `errno` in an invalid format to the server.
- Fix import-time crash when running Python with `-O` flag.
- Fix a bug that would prevent the logging integration from attaching `extra` keys called `data`.
- Fix order in which exception chains are reported to match Raven behavior.
- New integration for the Falcon web framework. Thanks to Jacob Magnusson!
## 0.7.10
- Add more event trimming.
- Log Sentry's response body in debug mode.
- Fix a few bad typehints causing issues in IDEs.
- Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. redirects) as errors.
- Fix a bug that would prevent use of `in_app_exclude` without setting `in_app_include`.
- Fix a bug where request bodies of Django Rest Framework apps were not captured.
- Suppress errors during SQL breadcrumb capturing in Django integration. Also change order in which formatting strategies are tried.
## 0.7.9
- New integration for the Bottle web framework. Thanks to Stepan Henek!
- Self-protect against broken mapping implementations and other broken reprs instead of dropping all local vars from a stacktrace. Thanks to Marco Neumann!
## 0.7.8
- Add support for Sanic versions 18 and 19.
- Fix a bug that causes an SDK crash when using composed SQL from psycopg2.
## 0.7.7
- Fix a bug that would not capture request bodies if they were empty JSON arrays, objects or strings.
- New GNU backtrace integration parses stacktraces from exception messages and appends them to existing stacktrace.
- Capture Tornado formdata.
- Support Python 3.6 in Sanic and AIOHTTP integration.
- Clear breadcrumbs before starting a new request.
- Fix a bug in the Celery integration that would drop pending events during worker shutdown (particularly an issue when running with `max_tasks_per_child = 1`)
- Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the WSGI environment or other data that we're also trying to serialize at the same time.
## 0.7.6
- Fix a bug where artificial frames for Django templates would not be marked as in-app and would always appear as the innermost frame. Implement a heuristic to show template frame closer to `render` or `parse` invocation.
## 0.7.5
- Fix bug into Tornado integration that would send broken cookies to the server.
- Fix a bug in the logging integration that would ignore the client option `with_locals`.
## 0.7.4
- Read release and environment from process environment like the Raven SDK does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`.
- Fix a bug in the `serverless` integration where it would not push a new scope for each function call (leaking tags and other things across calls).
- Experimental support for type hints.
## 0.7.3
- Fix crash in AIOHTTP integration when integration was set up but disabled.
- Flask integration now adds usernames, email addresses based on the protocol Flask-User defines on top of Flask-Login.
- New threading integration catches exceptions from crashing threads.
- New method `flush` on hubs and clients. New global `flush` function.
- Add decorator for serverless functions to fix common problems in those environments.
- Fix a bug in the logging integration where using explicit handlers required enabling the integration.
## 0.7.2
- Fix `celery.exceptions.Retry` spamming in Celery integration.
## 0.7.1
- Fix `UnboundLocalError` crash in Celery integration.
## 0.7.0
- Properly display chained exceptions (PEP-3134).
- Rewrite celery integration to monkeypatch instead of using signals due to bugs in Celery 3's signal handling. The Celery scope is also now available in prerun and postrun signals.
- Fix Tornado integration to work with Tornado 6.
- Do not evaluate Django `QuerySet` when trying to capture local variables. Also an internal hook was added to overwrite `repr` for local vars.
## 0.6.9
- Second attempt at fixing the bug that was supposed to be fixed in 0.6.8.
> No longer access arbitrary sequences in local vars due to possible side effects.
## 0.6.8
- No longer access arbitrary sequences in local vars due to possible side effects.
## 0.6.7
- Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were.
- Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime.
- Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors.
- Fix a bug where a crashing `before_send` would crash the SDK and app.
- Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK.
## 0.6.6
- Un-break API of internal `Auth` object that we use in Sentry itself.
## 0.6.5
- Capture WSGI request data eagerly to save memory and avoid issues with uWSGI.
- Ability to use subpaths in DSN.
- Ignore `django.request` logger.
## 0.6.4
- Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps.
## 0.6.3
- New integration for Tornado
- Fix request data in Django, Flask and other WSGI frameworks leaking between events.
- Fix infinite recursion when sending more events in `before_send`.
## 0.6.2
- Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa.
## 0.6.1
- New integration for aiohttp-server.
- Fix crash when reading hostname in broken WSGI environments.
## 0.6.0
- Fix bug where a 429 without Retry-After would not be honored.
- Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic.
- A WSGI middleware is now available for catching errors and adding context about the current request to them.
- Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available.
- The Python 3.7 runtime for AWS Lambda is now supported.
- Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded.
- Logging an exception will no longer add the exception as breadcrumb to the exception's own event.
## 0.5.5
- New client option `ca_certs`.
- Fix crash with Django and psycopg2.
## 0.5.4
- Fix deprecation warning in relation to the `collections` stdlib module.
- Fix bug that would crash Django and Flask when streaming responses are failing halfway through.
## 0.5.3
- Fix bug where using `push_scope` with a callback would not pop the scope.
- Fix crash when initializing the SDK in `push_scope`.
- Fix bug where IP addresses were sent when `send_default_pii=False`.
## 0.5.2
- Fix bug where events sent through the RQ integration were sometimes lost.
- Remove a deprecation warning about usage of `logger.warn`.
- Fix bug where large frame local variables would lead to the event being rejected by Sentry.
## 0.5.1
- Integration for Redis Queue (RQ)
## 0.5.0
- Fix a bug that would omit several debug logs during SDK initialization.
- Fix issue that sent a event key `""` Sentry wouldn't understand.
- **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other.
- Fix a bug in the Sanic integration that would report the exception behind any HTTP error code.
- Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`.
- Additional attributes on log records are now put into `extra`.
- Integration for Pyramid.
- `sys.argv` is put into extra automatically.
## 0.4.3
- Fix a bug that would leak WSGI responses.
## 0.4.2
- Fix a bug in the Sanic integration that would leak data between requests.
- Fix a bug that would hide all debug logging happening inside of the built-in transport.
- Fix a bug that would report errors for typos in Django's shell.
## 0.4.1
- Fix bug that would only show filenames in stacktraces but not the parent directories.
## 0.4.0
- Changed how integrations are initialized. Integrations are now configured and enabled per-client.
## 0.3.11
- Fix issue with certain deployment tools and the AWS Lambda integration.
## 0.3.10
- Set transactions for Django like in Raven. Which transaction behavior is used can be configured.
- Fix a bug which would omit frame local variables from stacktraces in Celery.
- New option: `attach_stacktrace`
## 0.3.9
- Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions.
## 0.3.8
- Nicer log level for internal errors.
## 0.3.7
- Remove `repos` configuration option. There was never a way to make use of this feature.
- Fix a bug in `last_event_id`.
- Add Django SQL queries to breadcrumbs.
- Django integration won't set user attributes if they were already set.
- Report correct SDK version to Sentry.
## 0.3.6
- Integration for Sanic
## 0.3.5
- Integration for AWS Lambda
- Fix mojibake when encoding local variable values
## 0.3.4
- Performance improvement when storing breadcrumbs
## 0.3.3
- Fix crash when breadcrumbs had to be trunchated
## 0.3.2
- Fixed an issue where some paths where not properly sent as absolute paths
sentry-python-1.4.3/CONTRIBUTING.md 0000664 0000000 0000000 00000010203 14125057761 0016617 0 ustar 00root root 0000000 0000000 # How to contribute to the Sentry Python SDK
`sentry-sdk` is an ordinary Python package. You can install it with `pip
install -e .` into some virtualenv, edit the sourcecode and test out your
changes manually.
## Community
The public-facing channels for support and development of Sentry SDKs can be found on [Discord](https://discord.gg/Ww9hbqr).
## Running tests and linters
Make sure you have `virtualenv` installed, and the Python versions you care
about. You should have Python 2.7 and the latest Python 3 installed.
We have a `Makefile` that is supposed to help people get started with hacking
on the SDK without having to know or understand the Python ecosystem. You don't
need to `workon` or `bin/activate` anything, the `Makefile` will do everything
for you. Run `make` or `make help` to list commands.
Of course you can always run the underlying commands yourself, which is
particularly useful when wanting to provide arguments to `pytest` to run
specific tests. If you want to do that, we expect you to know your way around
Python development. To get started, clone the SDK repository, cd into it, set
up a virtualenv and run:
# This is "advanced mode". Use `make help` if you have no clue what's
# happening here!
pip install -e .
pip install -r test-requirements.txt
pytest tests/
## Releasing a new version
We use [craft](https://github.com/getsentry/craft#python-package-index-pypi) to
release new versions. You need credentials for the `getsentry` PyPI user, and
must have `twine` installed globally.
The usual release process goes like this:
1. Go through git log and write new entry into `CHANGELOG.md`, commit to master
2. `craft p a.b.c`
3. `craft pp a.b.c`
## Adding a new integration (checklist)
1. Write the integration.
* Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration.
* Everybody monkeypatches. That means:
* Make sure to think about conflicts with other monkeypatches when monkeypatching.
* You don't need to feel bad about it.
* Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations.
* Allow the user to disable the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event).
2. Write tests.
* Think about the minimum versions supported, and test each version in a separate env in `tox.ini`.
* Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed.
3. Update package metadata.
* We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically.
Do not set upper-bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata.
4. Write the [docs](https://github.com/getsentry/sentry-docs). Answer the following questions:
* What does your integration do? Split in two sections: Executive summary at top and exact behavior further down.
* Which version of the SDK supports which versions of the modules it hooks into?
* One code example with basic setup.
* Make sure to add integration page to `python/index.md` (people forget to do that all the time).
Tip: Put most relevant parts wrapped in `..` tags for usage from within the Sentry UI.
5. Merge docs after new version has been released (auto-deploys on merge).
6. (optional) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. May not be applicable or doable for all kinds of integrations.
sentry-python-1.4.3/LICENSE 0000664 0000000 0000000 00000002462 14125057761 0015403 0 ustar 00root root 0000000 0000000 Copyright (c) 2018 Sentry (https://sentry.io) and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
sentry-python-1.4.3/MANIFEST.in 0000664 0000000 0000000 00000000054 14125057761 0016127 0 ustar 00root root 0000000 0000000 include LICENSE
include sentry_sdk/py.typed
sentry-python-1.4.3/Makefile 0000664 0000000 0000000 00000003210 14125057761 0016026 0 ustar 00root root 0000000 0000000 SHELL = /bin/bash
VENV_PATH = .venv
help:
@echo "Thanks for your interest in the Sentry Python SDK!"
@echo
@echo "make lint: Run linters"
@echo "make test: Run basic tests (not testing most integrations)"
@echo "make test-all: Run ALL tests (slow, closest to CI)"
@echo "make format: Run code formatters (destructive)"
@echo "make aws-lambda-layer-build: Build serverless ZIP dist package"
@echo
@echo "Also make sure to read ./CONTRIBUTING.md"
@false
.venv:
virtualenv -ppython3 $(VENV_PATH)
$(VENV_PATH)/bin/pip install tox
dist: .venv
rm -rf dist build
$(VENV_PATH)/bin/python setup.py sdist bdist_wheel
.PHONY: dist
format: .venv
$(VENV_PATH)/bin/tox -e linters --notest
.tox/linters/bin/black .
.PHONY: format
test: .venv
@$(VENV_PATH)/bin/tox -e py2.7,py3.7
.PHONY: test
test-all: .venv
@TOXPATH=$(VENV_PATH)/bin/tox sh ./scripts/runtox.sh
.PHONY: test-all
check: lint test
.PHONY: check
lint: .venv
@set -e && $(VENV_PATH)/bin/tox -e linters || ( \
echo "================================"; \
echo "Bad formatting? Run: make format"; \
echo "================================"; \
false)
.PHONY: lint
apidocs: .venv
@$(VENV_PATH)/bin/pip install --editable .
@$(VENV_PATH)/bin/pip install -U -r ./docs-requirements.txt
@$(VENV_PATH)/bin/sphinx-build -W -b html docs/ docs/_build
.PHONY: apidocs
apidocs-hotfix: apidocs
@$(VENV_PATH)/bin/pip install ghp-import
@$(VENV_PATH)/bin/ghp-import -pf docs/_build
.PHONY: apidocs-hotfix
aws-lambda-layer-build: dist
$(VENV_PATH)/bin/pip install urllib3
$(VENV_PATH)/bin/pip install certifi
$(VENV_PATH)/bin/python -m scripts.build_awslambda_layer
.PHONY: aws-lambda-layer-build
sentry-python-1.4.3/README.md 0000664 0000000 0000000 00000003312 14125057761 0015650 0 ustar 00root root 0000000 0000000
_Bad software is everywhere, and we're tired of it. Sentry is on a mission to help developers write better software faster, so we can get back to enjoying technology. If you want to join us [**Check out our open positions**](https://sentry.io/careers/)_
# sentry-python - Sentry SDK for Python
[](https://travis-ci.com/getsentry/sentry-python)
[](https://pypi.python.org/pypi/sentry-sdk)
[](https://discord.gg/cWnMQeA)
This is the next line of the Python SDK for [Sentry](http://sentry.io/), intended to replace the `raven` package on PyPI.
```python
from sentry_sdk import init, capture_message
init("https://mydsn@sentry.io/123")
capture_message("Hello World") # Will create an event.
raise ValueError() # Will also create an event.
```
- To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/)
- Are you coming from raven-python? [Use this cheatsheet](https://docs.sentry.io/platforms/python/migration/)
- To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/)
# Contributing to the SDK
Please refer to [CONTRIBUTING.md](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md).
# License
Licensed under the BSD license, see [`LICENSE`](https://github.com/getsentry/sentry-python/blob/master/LICENSE)
sentry-python-1.4.3/checkouts/ 0000775 0000000 0000000 00000000000 14125057761 0016362 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/checkouts/data-schemas/ 0000775 0000000 0000000 00000000000 14125057761 0020714 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/codecov.yml 0000664 0000000 0000000 00000000207 14125057761 0016536 0 ustar 00root root 0000000 0000000 coverage:
status:
project:
default: false
patch:
default: false
python:
target: 90%
comment: false
sentry-python-1.4.3/docs-requirements.txt 0000664 0000000 0000000 00000000140 14125057761 0020577 0 ustar 00root root 0000000 0000000 sphinx==4.1.1
sphinx-rtd-theme
sphinx-autodoc-typehints[type_comments]>=1.8.0
typing-extensions
sentry-python-1.4.3/docs/ 0000775 0000000 0000000 00000000000 14125057761 0015322 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/docs/.gitignore 0000664 0000000 0000000 00000000007 14125057761 0017307 0 ustar 00root root 0000000 0000000 _build
sentry-python-1.4.3/docs/_static/ 0000775 0000000 0000000 00000000000 14125057761 0016750 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/docs/_static/.gitkeep 0000664 0000000 0000000 00000000000 14125057761 0020367 0 ustar 00root root 0000000 0000000 sentry-python-1.4.3/docs/api.rst 0000664 0000000 0000000 00000000251 14125057761 0016623 0 ustar 00root root 0000000 0000000 ========
Main API
========
.. inherited-members necessary because of hack for Client and init methods
.. automodule:: sentry_sdk
:members:
:inherited-members:
sentry-python-1.4.3/docs/conf.py 0000664 0000000 0000000 00000012535 14125057761 0016627 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
import os
import sys
import typing
# prevent circular imports
import sphinx.builders.html
import sphinx.builders.latex
import sphinx.builders.texinfo
import sphinx.builders.text
import sphinx.ext.autodoc
typing.TYPE_CHECKING = True
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
sys.path.insert(0, os.path.abspath(".."))
# -- Project information -----------------------------------------------------
project = u"sentry-python"
copyright = u"2019, Sentry Team and Contributors"
author = u"Sentry Team and Contributors"
release = "1.4.3"
version = ".".join(release.split(".")[:2]) # The short X.Y version.
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx_autodoc_typehints",
"sphinx.ext.viewcode",
"sphinx.ext.githubpages",
"sphinx.ext.intersphinx",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [u"_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "sentry-pythondoc"
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"sentry-python.tex",
u"sentry-python Documentation",
u"Sentry Team and Contributors",
"manual",
)
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "sentry-python", u"sentry-python Documentation", [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"sentry-python",
u"sentry-python Documentation",
author,
"sentry-python",
"One line description of project.",
"Miscellaneous",
)
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
sentry-python-1.4.3/docs/index.rst 0000664 0000000 0000000 00000000554 14125057761 0017167 0 ustar 00root root 0000000 0000000 =====================================
sentry-python - Sentry SDK for Python
=====================================
This is the API documentation for `Sentry's Python SDK
`_. For full documentation and other resources
visit the `GitHub repository `_.
.. toctree::
api
integrations
sentry-python-1.4.3/docs/integrations.rst 0000664 0000000 0000000 00000000305 14125057761 0020560 0 ustar 00root root 0000000 0000000 ============
Integrations
============
Logging
=======
.. module:: sentry_sdk.integrations.logging
.. autofunction:: ignore_logger
.. autoclass:: EventHandler
.. autoclass:: BreadcrumbHandler
sentry-python-1.4.3/examples/ 0000775 0000000 0000000 00000000000 14125057761 0016210 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/examples/basic.py 0000664 0000000 0000000 00000002057 14125057761 0017647 0 ustar 00root root 0000000 0000000 import sentry_sdk
from sentry_sdk.integrations.excepthook import ExcepthookIntegration
from sentry_sdk.integrations.atexit import AtexitIntegration
from sentry_sdk.integrations.dedupe import DedupeIntegration
from sentry_sdk.integrations.stdlib import StdlibIntegration
sentry_sdk.init(
dsn="https://@sentry.io/",
default_integrations=False,
integrations=[
ExcepthookIntegration(),
AtexitIntegration(),
DedupeIntegration(),
StdlibIntegration(),
],
environment="Production",
release="1.0.0",
send_default_pii=False,
max_breadcrumbs=5,
)
with sentry_sdk.push_scope() as scope:
scope.user = {"email": "john.doe@example.com"}
scope.set_tag("page_locale", "de-at")
scope.set_extra("request", {"id": "d5cf8a0fd85c494b9c6453c4fba8ab17"})
scope.level = "warning"
sentry_sdk.capture_message("Something went wrong!")
sentry_sdk.add_breadcrumb(category="auth", message="Authenticated user", level="info")
try:
1 / 0
except Exception as e:
sentry_sdk.capture_exception(e)
sentry-python-1.4.3/examples/tracing/ 0000775 0000000 0000000 00000000000 14125057761 0017637 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/examples/tracing/README.md 0000664 0000000 0000000 00000001124 14125057761 0021114 0 ustar 00root root 0000000 0000000 To run this app:
1. Have a Redis on the Redis default port (if you have Sentry running locally,
you probably already have this)
2. `pip install sentry-sdk flask rq`
3. `FLASK_APP=tracing flask run`
4. `FLASK_APP=tracing flask worker`
5. Go to `http://localhost:5000/` and enter a base64-encoded string (one is prefilled)
6. Hit submit, wait for heavy computation to end
7. `cat events | python traceviewer.py | dot -T svg > events.svg`
8. `open events.svg`
The last two steps are for viewing the traces. Nothing gets sent to Sentry
right now because Sentry does not deal with this data yet.
sentry-python-1.4.3/examples/tracing/events 0000664 0000000 0000000 00000066233 14125057761 0021100 0 ustar 00root root 0000000 0000000 {"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "968cff94913ebb07"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Upgrade-Insecure-Requests": "1", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "f9f4b21dd9da4c389426c1ffd2b62410", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "8eb30d5ae5f3403ba3a036e696111ec3", "span_id": "97e894108ff7a8cd"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1c71c7cb32934550bb49f05b6c2d4052", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "b7627895a90b41718be82d3ad21ab2f4", "span_id": "9fa95b4ffdcbe177"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1430ad5b0a0d45dca3f02c10271628f9", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "1636fdb33db84e7c9a4e606c1b176971", "span_id": "b682a29ead55075f"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js.map", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive"}}, "event_id": "72b1224307294e0fb6d6b1958076c4cc", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "compute", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "946edde6ee421874"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/compute/aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c72fd945c1174140a00bdbf6f6ed8fc5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "bf5be759039ede9a"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "e8c17b0cbe2045758aaffc2f11672fab", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "b2d56249f7fdf327"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "6577f8056383427d85df5b33bf9ccc2c", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "ac62ff8ae1b2eda6"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c03dfbab8a8145eeaa0d1a1adfcfcaa5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "tracing.decode_base64", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "worker"], "rq-job": {"kwargs": {"redis_key": "sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "encoded": "aGVsbG8gd29ybGQK"}, "args": [], "description": "tracing.decode_base64(encoded=u'aGVsbG8gd29ybGQK', redis_key='sentry-python-tracing-example-result:aGVsbG8gd29ybGQK')", "func": "tracing.decode_base64", "job_id": "fabff810-3dbb-45d3-987e-86395790dfa9"}}, "contexts": {"trace": {"parent_span_id": "946edde6ee421874", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9c2a6db8c79068a2"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "event_id": "2975518984734ef49d2f75db4e928ddc", "platform": "python", "spans": [{"start_timestamp": "2019-06-14T14:01:41Z", "same_process_as_parent": true, "description": "http://httpbin.org/base64/aGVsbG8gd29ybGQK GET", "tags": {"http.status_code": 200, "error": false}, "timestamp": "2019-06-14T14:01:41Z", "parent_span_id": "9c2a6db8c79068a2", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "op": "http", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "span_id": "8c931f4740435fb8"}], "breadcrumbs": [{"category": "httplib", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "type": "http", "timestamp": "2019-06-14T12:01:41Z"}, {"category": "rq.worker", "type": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "\u001b[32mdefault\u001b[39;49;00m: \u001b[34mJob OK\u001b[39;49;00m (fabff810-3dbb-45d3-987e-86395790dfa9)", "type": "default"}, {"category": "rq.worker", "type": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "Result is kept for 500 seconds", "type": "default"}], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9d91c6558b2e4c06"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "339cfc84adf0405986514c808afb0f68", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}}
sentry-python-1.4.3/examples/tracing/events.svg 0000664 0000000 0000000 00000104202 14125057761 0021663 0 ustar 00root root 0000000 0000000
sentry-python-1.4.3/examples/tracing/static/ 0000775 0000000 0000000 00000000000 14125057761 0021126 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/examples/tracing/static/tracing.js 0000664 0000000 0000000 00000041134 14125057761 0023116 0 ustar 00root root 0000000 0000000 (function (__window) {
var exports = {};
Object.defineProperty(exports, '__esModule', { value: true });
/*! *****************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at http://www.apache.org/licenses/LICENSE-2.0
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
MERCHANTABLITY OR NON-INFRINGEMENT.
See the Apache Version 2.0 License for specific language governing permissions
and limitations under the License.
***************************************************************************** */
/* global Reflect, Promise */
var extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
function __extends(d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
}
var __assign = function() {
__assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
function __read(o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o), r, ar = [], e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
}
catch (error) { e = { error: error }; }
finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
}
finally { if (e) throw e.error; }
}
return ar;
}
function __spread() {
for (var ar = [], i = 0; i < arguments.length; i++)
ar = ar.concat(__read(arguments[i]));
return ar;
}
/** An error emitted by Sentry SDKs and related utilities. */
var SentryError = /** @class */ (function (_super) {
__extends(SentryError, _super);
function SentryError(message) {
var _newTarget = this.constructor;
var _this = _super.call(this, message) || this;
_this.message = message;
// tslint:disable:no-unsafe-any
_this.name = _newTarget.prototype.constructor.name;
Object.setPrototypeOf(_this, _newTarget.prototype);
return _this;
}
return SentryError;
}(Error));
/**
* Checks whether given value's type is one of a few Error or Error-like
* {@link isError}.
*
* @param wat A value to be checked.
* @returns A boolean representing the result.
*/
/**
* Checks whether given value's type is an regexp
* {@link isRegExp}.
*
* @param wat A value to be checked.
* @returns A boolean representing the result.
*/
function isRegExp(wat) {
return Object.prototype.toString.call(wat) === '[object RegExp]';
}
/**
* Requires a module which is protected _against bundler minification.
*
* @param request The module path to resolve
*/
/**
* Checks whether we're in the Node.js or Browser environment
*
* @returns Answer to given question
*/
function isNodeEnv() {
// tslint:disable:strict-type-predicates
return Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]';
}
var fallbackGlobalObject = {};
/**
* Safely get global scope object
*
* @returns Global scope object
*/
function getGlobalObject() {
return (isNodeEnv()
? global
: typeof window !== 'undefined'
? window
: typeof self !== 'undefined'
? self
: fallbackGlobalObject);
}
/** JSDoc */
function consoleSandbox(callback) {
var global = getGlobalObject();
var levels = ['debug', 'info', 'warn', 'error', 'log', 'assert'];
if (!('console' in global)) {
return callback();
}
var originalConsole = global.console;
var wrappedLevels = {};
// Restore all wrapped console methods
levels.forEach(function (level) {
if (level in global.console && originalConsole[level].__sentry__) {
wrappedLevels[level] = originalConsole[level].__sentry_wrapped__;
originalConsole[level] = originalConsole[level].__sentry_original__;
}
});
// Perform callback manipulations
var result = callback();
// Revert restoration to wrapped state
Object.keys(wrappedLevels).forEach(function (level) {
originalConsole[level] = wrappedLevels[level];
});
return result;
}
// TODO: Implement different loggers for different environments
var global$1 = getGlobalObject();
/** Prefix for logging strings */
var PREFIX = 'Sentry Logger ';
/** JSDoc */
var Logger = /** @class */ (function () {
/** JSDoc */
function Logger() {
this._enabled = false;
}
/** JSDoc */
Logger.prototype.disable = function () {
this._enabled = false;
};
/** JSDoc */
Logger.prototype.enable = function () {
this._enabled = true;
};
/** JSDoc */
Logger.prototype.log = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
if (!this._enabled) {
return;
}
consoleSandbox(function () {
global$1.console.log(PREFIX + "[Log]: " + args.join(' ')); // tslint:disable-line:no-console
});
};
/** JSDoc */
Logger.prototype.warn = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
if (!this._enabled) {
return;
}
consoleSandbox(function () {
global$1.console.warn(PREFIX + "[Warn]: " + args.join(' ')); // tslint:disable-line:no-console
});
};
/** JSDoc */
Logger.prototype.error = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
if (!this._enabled) {
return;
}
consoleSandbox(function () {
global$1.console.error(PREFIX + "[Error]: " + args.join(' ')); // tslint:disable-line:no-console
});
};
return Logger;
}());
// Ensure we only have a single logger instance, even if multiple versions of @sentry/utils are being used
global$1.__SENTRY__ = global$1.__SENTRY__ || {};
var logger = global$1.__SENTRY__.logger || (global$1.__SENTRY__.logger = new Logger());
// tslint:disable:no-unsafe-any
/**
* Wrap a given object method with a higher-order function
*
* @param source An object that contains a method to be wrapped.
* @param name A name of method to be wrapped.
* @param replacement A function that should be used to wrap a given method.
* @returns void
*/
function fill(source, name, replacement) {
if (!(name in source)) {
return;
}
var original = source[name];
var wrapped = replacement(original);
// Make sure it's a function first, as we need to attach an empty prototype for `defineProperties` to work
// otherwise it'll throw "TypeError: Object.defineProperties called on non-object"
// tslint:disable-next-line:strict-type-predicates
if (typeof wrapped === 'function') {
try {
wrapped.prototype = wrapped.prototype || {};
Object.defineProperties(wrapped, {
__sentry__: {
enumerable: false,
value: true,
},
__sentry_original__: {
enumerable: false,
value: original,
},
__sentry_wrapped__: {
enumerable: false,
value: wrapped,
},
});
}
catch (_Oo) {
// This can throw if multiple fill happens on a global object like XMLHttpRequest
// Fixes https://github.com/getsentry/sentry-javascript/issues/2043
}
}
source[name] = wrapped;
}
// Slightly modified (no IE8 support, ES6) and transcribed to TypeScript
/**
* Checks if the value matches a regex or includes the string
* @param value The string value to be checked against
* @param pattern Either a regex or a string that must be contained in value
*/
function isMatchingPattern(value, pattern) {
if (isRegExp(pattern)) {
return pattern.test(value);
}
if (typeof pattern === 'string') {
return value.includes(pattern);
}
return false;
}
/**
* Tells whether current environment supports Fetch API
* {@link supportsFetch}.
*
* @returns Answer to the given question.
*/
function supportsFetch() {
if (!('fetch' in getGlobalObject())) {
return false;
}
try {
// tslint:disable-next-line:no-unused-expression
new Headers();
// tslint:disable-next-line:no-unused-expression
new Request('');
// tslint:disable-next-line:no-unused-expression
new Response();
return true;
}
catch (e) {
return false;
}
}
/**
* Tells whether current environment supports Fetch API natively
* {@link supportsNativeFetch}.
*
* @returns Answer to the given question.
*/
function supportsNativeFetch() {
if (!supportsFetch()) {
return false;
}
var global = getGlobalObject();
return global.fetch.toString().indexOf('native') !== -1;
}
/** SyncPromise internal states */
var States;
(function (States) {
/** Pending */
States["PENDING"] = "PENDING";
/** Resolved / OK */
States["RESOLVED"] = "RESOLVED";
/** Rejected / Error */
States["REJECTED"] = "REJECTED";
})(States || (States = {}));
/**
* Tracing Integration
*/
var Tracing = /** @class */ (function () {
/**
* Constructor for Tracing
*
* @param _options TracingOptions
*/
function Tracing(_options) {
if (_options === void 0) { _options = {}; }
this._options = _options;
/**
* @inheritDoc
*/
this.name = Tracing.id;
if (!Array.isArray(_options.tracingOrigins) || _options.tracingOrigins.length === 0) {
consoleSandbox(function () {
var defaultTracingOrigins = ['localhost', /^\//];
// @ts-ignore
console.warn('Sentry: You need to define `tracingOrigins` in the options. Set an array of urls or patterns to trace.');
// @ts-ignore
console.warn("Sentry: We added a reasonable default for you: " + defaultTracingOrigins);
_options.tracingOrigins = defaultTracingOrigins;
});
}
}
/**
* @inheritDoc
*/
Tracing.prototype.setupOnce = function (_, getCurrentHub) {
if (this._options.traceXHR !== false) {
this._traceXHR(getCurrentHub);
}
if (this._options.traceFetch !== false) {
this._traceFetch(getCurrentHub);
}
if (this._options.autoStartOnDomReady !== false) {
getGlobalObject().addEventListener('DOMContentLoaded', function () {
Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href);
});
getGlobalObject().document.onreadystatechange = function () {
if (document.readyState === 'complete') {
Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href);
}
};
}
};
/**
* Starts a new trace
* @param hub The hub to start the trace on
* @param transaction Optional transaction
*/
Tracing.startTrace = function (hub, transaction) {
hub.configureScope(function (scope) {
scope.startSpan();
scope.setTransaction(transaction);
});
};
/**
* JSDoc
*/
Tracing.prototype._traceXHR = function (getCurrentHub) {
if (!('XMLHttpRequest' in getGlobalObject())) {
return;
}
var xhrproto = XMLHttpRequest.prototype;
fill(xhrproto, 'open', function (originalOpen) {
return function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
// @ts-ignore
var self = getCurrentHub().getIntegration(Tracing);
if (self) {
self._xhrUrl = args[1];
}
// tslint:disable-next-line: no-unsafe-any
return originalOpen.apply(this, args);
};
});
fill(xhrproto, 'send', function (originalSend) {
return function () {
var _this = this;
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
// @ts-ignore
var self = getCurrentHub().getIntegration(Tracing);
if (self && self._xhrUrl && self._options.tracingOrigins) {
var url_1 = self._xhrUrl;
var headers_1 = getCurrentHub().traceHeaders();
// tslint:disable-next-line: prefer-for-of
var isWhitelisted = self._options.tracingOrigins.some(function (origin) {
return isMatchingPattern(url_1, origin);
});
if (isWhitelisted && this.setRequestHeader) {
Object.keys(headers_1).forEach(function (key) {
_this.setRequestHeader(key, headers_1[key]);
});
}
}
// tslint:disable-next-line: no-unsafe-any
return originalSend.apply(this, args);
};
});
};
/**
* JSDoc
*/
Tracing.prototype._traceFetch = function (getCurrentHub) {
if (!supportsNativeFetch()) {
return;
}
console.log("PATCHING FETCH");
// tslint:disable: only-arrow-functions
fill(getGlobalObject(), 'fetch', function (originalFetch) {
return function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
// @ts-ignore
var self = getCurrentHub().getIntegration(Tracing);
if (self && self._options.tracingOrigins) {
console.log("blafalseq");
var url_2 = args[0];
var options = args[1] = args[1] || {};
var whiteListed_1 = false;
self._options.tracingOrigins.forEach(function (whiteListUrl) {
if (!whiteListed_1) {
whiteListed_1 = isMatchingPattern(url_2, whiteListUrl);
console.log('a', url_2, whiteListUrl);
}
});
if (whiteListed_1) {
console.log('aaaaaa', options, whiteListed_1);
if (options.headers) {
if (Array.isArray(options.headers)) {
options.headers = __spread(options.headers, Object.entries(getCurrentHub().traceHeaders()));
}
else {
options.headers = __assign({}, options.headers, getCurrentHub().traceHeaders());
}
}
else {
options.headers = getCurrentHub().traceHeaders();
}
console.log(options.headers);
}
}
args[1] = options;
// tslint:disable-next-line: no-unsafe-any
return originalFetch.apply(getGlobalObject(), args);
};
});
// tslint:enable: only-arrow-functions
};
/**
* @inheritDoc
*/
Tracing.id = 'Tracing';
return Tracing;
}());
exports.Tracing = Tracing;
__window.Sentry = __window.Sentry || {};
__window.Sentry.Integrations = __window.Sentry.Integrations || {};
Object.assign(__window.Sentry.Integrations, exports);
}(window));
//# sourceMappingURL=tracing.js.map
sentry-python-1.4.3/examples/tracing/templates/ 0000775 0000000 0000000 00000000000 14125057761 0021635 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/examples/tracing/templates/index.html 0000664 0000000 0000000 00000002645 14125057761 0023641 0 ustar 00root root 0000000 0000000
Decode your base64 string as a service (that calls another service)
A base64 string
Output:
sentry-python-1.4.3/examples/tracing/traceviewer.py 0000664 0000000 0000000 00000003136 14125057761 0022534 0 ustar 00root root 0000000 0000000 import json
import sys
print("digraph mytrace {")
print("rankdir=LR")
all_spans = []
for line in sys.stdin:
event = json.loads(line)
if event.get("type") != "transaction":
continue
trace_ctx = event["contexts"]["trace"]
trace_span = dict(trace_ctx) # fake a span entry from transaction event
trace_span["description"] = event["transaction"]
trace_span["start_timestamp"] = event["start_timestamp"]
trace_span["timestamp"] = event["timestamp"]
if "parent_span_id" not in trace_ctx:
print(
'{} [label="trace:{} ({})"];'.format(
int(trace_ctx["trace_id"], 16),
event["transaction"],
trace_ctx["trace_id"],
)
)
for span in event["spans"] + [trace_span]:
print(
'{} [label="span:{} ({})"];'.format(
int(span["span_id"], 16), span["description"], span["span_id"]
)
)
if "parent_span_id" in span:
print(
"{} -> {};".format(
int(span["parent_span_id"], 16), int(span["span_id"], 16)
)
)
print(
"{} -> {} [style=dotted];".format(
int(span["trace_id"], 16), int(span["span_id"], 16)
)
)
all_spans.append(span)
for s1 in all_spans:
for s2 in all_spans:
if s1["start_timestamp"] > s2["timestamp"]:
print(
'{} -> {} [color="#efefef"];'.format(
int(s1["span_id"], 16), int(s2["span_id"], 16)
)
)
print("}")
sentry-python-1.4.3/examples/tracing/tracing.py 0000664 0000000 0000000 00000003133 14125057761 0021640 0 ustar 00root root 0000000 0000000 import json
import flask
import os
import redis
import rq
import sentry_sdk
import time
import urllib3
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.rq import RqIntegration
app = flask.Flask(__name__)
redis_conn = redis.Redis()
http = urllib3.PoolManager()
queue = rq.Queue(connection=redis_conn)
def write_event(event):
with open("events", "a") as f:
f.write(json.dumps(event))
f.write("\n")
sentry_sdk.init(
integrations=[FlaskIntegration(), RqIntegration()],
traces_sample_rate=1.0,
debug=True,
transport=write_event,
)
def decode_base64(encoded, redis_key):
time.sleep(1)
r = http.request("GET", "http://httpbin.org/base64/{}".format(encoded))
redis_conn.set(redis_key, r.data)
@app.route("/")
def index():
return flask.render_template(
"index.html",
sentry_dsn=os.environ["SENTRY_DSN"],
traceparent=dict(sentry_sdk.Hub.current.iter_trace_propagation_headers()),
)
@app.route("/compute/")
def compute(input):
redis_key = "sentry-python-tracing-example-result:{}".format(input)
redis_conn.delete(redis_key)
queue.enqueue(decode_base64, encoded=input, redis_key=redis_key)
return redis_key
@app.route("/wait/")
def wait(redis_key):
result = redis_conn.get(redis_key)
if result is None:
return "NONE"
else:
redis_conn.delete(redis_key)
return "RESULT: {}".format(result)
@app.cli.command("worker")
def run_worker():
print("WORKING")
worker = rq.Worker([queue], connection=queue.connection)
worker.work()
sentry-python-1.4.3/linter-requirements.txt 0000664 0000000 0000000 00000000157 14125057761 0021154 0 ustar 00root root 0000000 0000000 black==21.7b0
flake8==3.9.2
flake8-import-order==0.18.1
mypy==0.782
flake8-bugbear==21.4.3
pep8-naming==0.11.1
sentry-python-1.4.3/mypy.ini 0000664 0000000 0000000 00000003026 14125057761 0016072 0 ustar 00root root 0000000 0000000 [mypy]
python_version = 3.7
allow_redefinition = True
check_untyped_defs = True
; disallow_any_decorated = True
; disallow_any_explicit = True
; disallow_any_expr = True
disallow_any_generics = True
; disallow_any_unimported = True
disallow_incomplete_defs = True
disallow_subclassing_any = True
; disallow_untyped_calls = True
disallow_untyped_decorators = True
disallow_untyped_defs = True
no_implicit_optional = True
strict_equality = True
strict_optional = True
warn_redundant_casts = True
; warn_return_any = True
warn_unused_configs = True
warn_unused_ignores = True
; Relaxations for code written before mypy was introduced
;
; Do not use wildcards in module paths, otherwise added modules will
; automatically have the same set of relaxed rules as the rest
[mypy-django.*]
ignore_missing_imports = True
[mypy-pyramid.*]
ignore_missing_imports = True
[mypy-psycopg2.*]
ignore_missing_imports = True
[mypy-pytest.*]
ignore_missing_imports = True
[mypy-aiohttp.*]
ignore_missing_imports = True
[mypy-sanic.*]
ignore_missing_imports = True
[mypy-tornado.*]
ignore_missing_imports = True
[mypy-fakeredis.*]
ignore_missing_imports = True
[mypy-rq.*]
ignore_missing_imports = True
[mypy-pyspark.*]
ignore_missing_imports = True
[mypy-asgiref.*]
ignore_missing_imports = True
[mypy-executing.*]
ignore_missing_imports = True
[mypy-asttokens.*]
ignore_missing_imports = True
[mypy-pure_eval.*]
ignore_missing_imports = True
[mypy-blinker.*]
ignore_missing_imports = True
[mypy-sentry_sdk._queue]
ignore_missing_imports = True
disallow_untyped_defs = False
sentry-python-1.4.3/pytest.ini 0000664 0000000 0000000 00000000616 14125057761 0016426 0 ustar 00root root 0000000 0000000 [pytest]
DJANGO_SETTINGS_MODULE = tests.integrations.django.myapp.settings
addopts = --tb=short
markers =
tests_internal_exceptions: Handle internal exceptions just as the SDK does, to test it. (Otherwise internal exceptions are recorded and reraised.)
only: A temporary marker, to make pytest only run the tests with the mark, similar to jest's `it.only`. To use, run `pytest -v -m only`.
sentry-python-1.4.3/scripts/ 0000775 0000000 0000000 00000000000 14125057761 0016061 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/scripts/aws-cleanup.sh 0000664 0000000 0000000 00000000622 14125057761 0020634 0 ustar 00root root 0000000 0000000 #!/bin/sh
# Delete all AWS Lambda functions
export AWS_ACCESS_KEY_ID="$SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"
export AWS_SECRET_ACCESS_KEY="$SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"
export AWS_IAM_ROLE="$SENTRY_PYTHON_TEST_AWS_IAM_ROLE"
for func in $(aws lambda list-functions | jq -r .Functions[].FunctionName); do
echo "Deleting $func"
aws lambda delete-function --function-name $func
done
sentry-python-1.4.3/scripts/build_awslambda_layer.py 0000664 0000000 0000000 00000007371 14125057761 0022751 0 ustar 00root root 0000000 0000000 import os
import subprocess
import tempfile
import shutil
from sentry_sdk.consts import VERSION as SDK_VERSION
from sentry_sdk._types import MYPY
if MYPY:
from typing import Union
class PackageBuilder:
def __init__(
self,
base_dir, # type: str
pkg_parent_dir, # type: str
dist_rel_path, # type: str
):
# type: (...) -> None
self.base_dir = base_dir
self.pkg_parent_dir = pkg_parent_dir
self.dist_rel_path = dist_rel_path
self.packages_dir = self.get_relative_path_of(pkg_parent_dir)
def make_directories(self):
# type: (...) -> None
os.makedirs(self.packages_dir)
def install_python_binaries(self):
# type: (...) -> None
wheels_filepath = os.path.join(
self.dist_rel_path, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl"
)
subprocess.run(
[
"pip",
"install",
"--no-cache-dir", # Disables the cache -> always accesses PyPI
"-q", # Quiet
wheels_filepath, # Copied to the target directory before installation
"-t", # Target directory flag
self.packages_dir,
],
check=True,
)
def create_init_serverless_sdk_package(self):
# type: (...) -> None
"""
Method that creates the init_serverless_sdk pkg in the
sentry-python-serverless zip
"""
serverless_sdk_path = (
f"{self.packages_dir}/sentry_sdk/" f"integrations/init_serverless_sdk"
)
if not os.path.exists(serverless_sdk_path):
os.makedirs(serverless_sdk_path)
shutil.copy(
"scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py"
)
def zip(
self, filename # type: str
):
# type: (...) -> None
subprocess.run(
[
"zip",
"-q", # Quiet
"-x", # Exclude files
"**/__pycache__/*", # Files to be excluded
"-r", # Recurse paths
filename, # Output filename
self.pkg_parent_dir, # Files to be zipped
],
cwd=self.base_dir,
check=True, # Raises CalledProcessError if exit status is non-zero
)
def get_relative_path_of(
self, subfile # type: str
):
# type: (...) -> str
return os.path.join(self.base_dir, subfile)
# Ref to `pkg_parent_dir` Top directory in the ZIP file.
# Placing the Sentry package in `/python` avoids
# creating a directory for a specific version. For more information, see
# https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path
def build_packaged_zip(
dist_rel_path="dist", # type: str
dest_zip_filename=f"sentry-python-serverless-{SDK_VERSION}.zip", # type: str
pkg_parent_dir="python", # type: str
dest_abs_path=None, # type: Union[str, None]
):
# type: (...) -> None
if dest_abs_path is None:
dest_abs_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", dist_rel_path)
)
with tempfile.TemporaryDirectory() as tmp_dir:
package_builder = PackageBuilder(tmp_dir, pkg_parent_dir, dist_rel_path)
package_builder.make_directories()
package_builder.install_python_binaries()
package_builder.create_init_serverless_sdk_package()
package_builder.zip(dest_zip_filename)
if not os.path.exists(dist_rel_path):
os.makedirs(dist_rel_path)
shutil.copy(
package_builder.get_relative_path_of(dest_zip_filename), dest_abs_path
)
if __name__ == "__main__":
build_packaged_zip()
sentry-python-1.4.3/scripts/bump-version.sh 0000775 0000000 0000000 00000001201 14125057761 0021040 0 ustar 00root root 0000000 0000000 #!/bin/bash
set -eux
if [ "$(uname -s)" != "Linux" ]; then
echo "Please use the GitHub Action."
exit 1
fi
SCRIPT_DIR="$( dirname "$0" )"
cd $SCRIPT_DIR/..
OLD_VERSION="${1}"
NEW_VERSION="${2}"
echo "Current version: $OLD_VERSION"
echo "Bumping version: $NEW_VERSION"
function replace() {
! grep "$2" $3
perl -i -pe "s/$1/$2/g" $3
grep "$2" $3 # verify that replacement was successful
}
replace "version=\"[0-9.]+\"" "version=\"$NEW_VERSION\"" ./setup.py
replace "VERSION = \"[0-9.]+\"" "VERSION = \"$NEW_VERSION\"" ./sentry_sdk/consts.py
replace "release = \"[0-9.]+\"" "release = \"$NEW_VERSION\"" ./docs/conf.py
sentry-python-1.4.3/scripts/init_serverless_sdk.py 0000664 0000000 0000000 00000005614 14125057761 0022522 0 ustar 00root root 0000000 0000000 """
For manual instrumentation,
The Handler function string of an aws lambda function should be added as an
environment variable with a key of 'SENTRY_INITIAL_HANDLER' along with the 'DSN'
Then the Handler function sstring should be replaced with
'sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler'
"""
import os
import sys
import re
import sentry_sdk
from sentry_sdk._types import MYPY
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
if MYPY:
from typing import Any
# Configure Sentry SDK
sentry_sdk.init(
dsn=os.environ["SENTRY_DSN"],
integrations=[AwsLambdaIntegration(timeout_warning=True)],
traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]),
)
class AWSLambdaModuleLoader:
DIR_PATH_REGEX = r"^(.+)\/([^\/]+)$"
def __init__(self, sentry_initial_handler):
try:
module_path, self.handler_name = sentry_initial_handler.rsplit(".", 1)
except ValueError:
raise ValueError("Incorrect AWS Handler path (Not a path)")
self.extract_and_load_lambda_function_module(module_path)
def extract_and_load_lambda_function_module(self, module_path):
"""
Method that extracts and loads lambda function module from module_path
"""
py_version = sys.version_info
if re.match(self.DIR_PATH_REGEX, module_path):
# With a path like -> `scheduler/scheduler/event`
# `module_name` is `event`, and `module_file_path` is `scheduler/scheduler/event.py`
module_name = module_path.split(os.path.sep)[-1]
module_file_path = module_path + ".py"
# Supported python versions are 2.7, 3.6, 3.7, 3.8
if py_version >= (3, 5):
import importlib.util
spec = importlib.util.spec_from_file_location(
module_name, module_file_path
)
self.lambda_function_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(self.lambda_function_module)
elif py_version[0] < 3:
import imp
self.lambda_function_module = imp.load_source(
module_name, module_file_path
)
else:
raise ValueError("Python version %s is not supported." % py_version)
else:
import importlib
self.lambda_function_module = importlib.import_module(module_path)
def get_lambda_handler(self):
return getattr(self.lambda_function_module, self.handler_name)
def sentry_lambda_handler(event, context):
# type: (Any, Any) -> None
"""
Handler function that invokes a lambda handler which path is defined in
environment variables as "SENTRY_INITIAL_HANDLER"
"""
module_loader = AWSLambdaModuleLoader(os.environ["SENTRY_INITIAL_HANDLER"])
return module_loader.get_lambda_handler()(event, context)
sentry-python-1.4.3/scripts/runtox.sh 0000775 0000000 0000000 00000001465 14125057761 0017765 0 ustar 00root root 0000000 0000000 #!/bin/bash
set -ex
if [ -n "$TOXPATH" ]; then
true
elif which tox &> /dev/null; then
TOXPATH=tox
else
TOXPATH=./.venv/bin/tox
fi
# Usage: sh scripts/runtox.sh py3.7
# Runs all environments with substring py3.7 and the given arguments for pytest
if [ -n "$1" ]; then
searchstring="$1"
elif [ -n "$CI_PYTHON_VERSION" ]; then
searchstring="$(echo py$CI_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
if [ "$searchstring" = "pypy-2.7" ]; then
searchstring=pypy
fi
elif [ -n "$AZURE_PYTHON_VERSION" ]; then
searchstring="$(echo py$AZURE_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
if [ "$searchstring" = pypy2 ]; then
searchstring=pypy
fi
fi
exec $TOXPATH -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}"
sentry-python-1.4.3/sentry_sdk/ 0000775 0000000 0000000 00000000000 14125057761 0016557 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/sentry_sdk/__init__.py 0000664 0000000 0000000 00000001526 14125057761 0020674 0 ustar 00root root 0000000 0000000 from sentry_sdk.hub import Hub, init
from sentry_sdk.scope import Scope
from sentry_sdk.transport import Transport, HttpTransport
from sentry_sdk.client import Client
from sentry_sdk.api import * # noqa
from sentry_sdk.consts import VERSION # noqa
__all__ = [ # noqa
"Hub",
"Scope",
"Client",
"Transport",
"HttpTransport",
"init",
"integrations",
# From sentry_sdk.api
"capture_event",
"capture_message",
"capture_exception",
"add_breadcrumb",
"configure_scope",
"push_scope",
"flush",
"last_event_id",
"start_span",
"start_transaction",
"set_tag",
"set_context",
"set_extra",
"set_user",
"set_level",
]
# Initialize the debug support after everything is loaded
from sentry_sdk.debug import init_debug_support
init_debug_support()
del init_debug_support
sentry-python-1.4.3/sentry_sdk/_compat.py 0000664 0000000 0000000 00000004467 14125057761 0020566 0 ustar 00root root 0000000 0000000 import sys
from sentry_sdk._types import MYPY
if MYPY:
from typing import Optional
from typing import Tuple
from typing import Any
from typing import Type
from typing import TypeVar
T = TypeVar("T")
PY2 = sys.version_info[0] == 2
if PY2:
import urlparse # noqa
text_type = unicode # noqa
string_types = (str, text_type)
number_types = (int, long, float) # noqa
int_types = (int, long) # noqa
iteritems = lambda x: x.iteritems() # noqa: B301
def implements_str(cls):
# type: (T) -> T
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: unicode(x).encode("utf-8") # noqa
return cls
exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
else:
import urllib.parse as urlparse # noqa
text_type = str
string_types = (text_type,) # type: Tuple[type]
number_types = (int, float) # type: Tuple[type, type]
int_types = (int,) # noqa
iteritems = lambda x: x.items()
def implements_str(x):
# type: (T) -> T
return x
def reraise(tp, value, tb=None):
# type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> None
assert value is not None
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
def with_metaclass(meta, *bases):
# type: (Any, *Any) -> Any
class MetaClass(type):
def __new__(metacls, name, this_bases, d):
# type: (Any, Any, Any, Any) -> Any
return meta(name, bases, d)
return type.__new__(MetaClass, "temporary_class", (), {})
def check_thread_support():
# type: () -> None
try:
from uwsgi import opt # type: ignore
except ImportError:
return
# When `threads` is passed in as a uwsgi option,
# `enable-threads` is implied on.
if "threads" in opt:
return
if str(opt.get("enable-threads", "0")).lower() in ("false", "off", "no", "0"):
from warnings import warn
warn(
Warning(
"We detected the use of uwsgi with disabled threads. "
"This will cause issues with the transport you are "
"trying to use. Please enable threading for uwsgi. "
'(Add the "enable-threads" flag).'
)
)
sentry-python-1.4.3/sentry_sdk/_functools.py 0000664 0000000 0000000 00000004344 14125057761 0021311 0 ustar 00root root 0000000 0000000 """
A backport of Python 3 functools to Python 2/3. The only important change
we rely upon is that `update_wrapper` handles AttributeError gracefully.
"""
from functools import partial
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Callable
WRAPPER_ASSIGNMENTS = (
"__module__",
"__name__",
"__qualname__",
"__doc__",
"__annotations__",
)
WRAPPER_UPDATES = ("__dict__",)
def update_wrapper(
wrapper, wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES
):
# type: (Any, Any, Any, Any) -> Any
"""Update a wrapper function to look like the wrapped function
wrapper is the function to be updated
wrapped is the original function
assigned is a tuple naming the attributes assigned directly
from the wrapped function to the wrapper function (defaults to
functools.WRAPPER_ASSIGNMENTS)
updated is a tuple naming the attributes of the wrapper that
are updated with the corresponding attribute from the wrapped
function (defaults to functools.WRAPPER_UPDATES)
"""
for attr in assigned:
try:
value = getattr(wrapped, attr)
except AttributeError:
pass
else:
setattr(wrapper, attr, value)
for attr in updated:
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
# Issue #17482: set __wrapped__ last so we don't inadvertently copy it
# from the wrapped function when updating __dict__
wrapper.__wrapped__ = wrapped
# Return the wrapper so this can be used as a decorator via partial()
return wrapper
def wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES):
# type: (Callable[..., Any], Any, Any) -> Callable[[Callable[..., Any]], Callable[..., Any]]
"""Decorator factory to apply update_wrapper() to a wrapper function
Returns a decorator that invokes update_wrapper() with the decorated
function as the wrapper argument and the arguments to wraps() as the
remaining arguments. Default arguments are as for update_wrapper().
This is a convenience function to simplify applying partial() to
update_wrapper().
"""
return partial(update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated)
sentry-python-1.4.3/sentry_sdk/_queue.py 0000664 0000000 0000000 00000020323 14125057761 0020414 0 ustar 00root root 0000000 0000000 """
A fork of Python 3.6's stdlib queue with Lock swapped out for RLock to avoid a
deadlock while garbage collecting.
See
https://codewithoutrules.com/2017/08/16/concurrency-python/
https://bugs.python.org/issue14976
https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1
We also vendor the code to evade eventlet's broken monkeypatching, see
https://github.com/getsentry/sentry-python/pull/484
"""
import threading
from collections import deque
from time import time
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
__all__ = ["Empty", "Full", "Queue"]
class Empty(Exception):
"Exception raised by Queue.get(block=0)/get_nowait()."
pass
class Full(Exception):
"Exception raised by Queue.put(block=0)/put_nowait()."
pass
class Queue(object):
"""Create a queue object with a given maximum size.
If maxsize is <= 0, the queue size is infinite.
"""
def __init__(self, maxsize=0):
self.maxsize = maxsize
self._init(maxsize)
# mutex must be held whenever the queue is mutating. All methods
# that acquire mutex must release it before returning. mutex
# is shared between the three conditions, so acquiring and
# releasing the conditions also acquires and releases mutex.
self.mutex = threading.RLock()
# Notify not_empty whenever an item is added to the queue; a
# thread waiting to get is notified then.
self.not_empty = threading.Condition(self.mutex)
# Notify not_full whenever an item is removed from the queue;
# a thread waiting to put is notified then.
self.not_full = threading.Condition(self.mutex)
# Notify all_tasks_done whenever the number of unfinished tasks
# drops to zero; thread waiting to join() is notified to resume
self.all_tasks_done = threading.Condition(self.mutex)
self.unfinished_tasks = 0
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by Queue consumer threads. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items
have been processed (meaning that a task_done() call was received
for every item that had been put() into the queue).
Raises a ValueError if called more times than there were items
placed in the queue.
"""
with self.all_tasks_done:
unfinished = self.unfinished_tasks - 1
if unfinished <= 0:
if unfinished < 0:
raise ValueError("task_done() called too many times")
self.all_tasks_done.notify_all()
self.unfinished_tasks = unfinished
def join(self):
"""Blocks until all items in the Queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls task_done()
to indicate the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
with self.all_tasks_done:
while self.unfinished_tasks:
self.all_tasks_done.wait()
def qsize(self):
"""Return the approximate size of the queue (not reliable!)."""
with self.mutex:
return self._qsize()
def empty(self):
"""Return True if the queue is empty, False otherwise (not reliable!).
This method is likely to be removed at some point. Use qsize() == 0
as a direct substitute, but be aware that either approach risks a race
condition where a queue can grow before the result of empty() or
qsize() can be used.
To create code that needs to wait for all queued tasks to be
completed, the preferred technique is to use the join() method.
"""
with self.mutex:
return not self._qsize()
def full(self):
"""Return True if the queue is full, False otherwise (not reliable!).
This method is likely to be removed at some point. Use qsize() >= n
as a direct substitute, but be aware that either approach risks a race
condition where a queue can shrink before the result of full() or
qsize() can be used.
"""
with self.mutex:
return 0 < self.maxsize <= self._qsize()
def put(self, item, block=True, timeout=None):
"""Put an item into the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until a free slot is available. If 'timeout' is
a non-negative number, it blocks at most 'timeout' seconds and raises
the Full exception if no free slot was available within that time.
Otherwise ('block' is false), put an item on the queue if a free slot
is immediately available, else raise the Full exception ('timeout'
is ignored in that case).
"""
with self.not_full:
if self.maxsize > 0:
if not block:
if self._qsize() >= self.maxsize:
raise Full()
elif timeout is None:
while self._qsize() >= self.maxsize:
self.not_full.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a non-negative number")
else:
endtime = time() + timeout
while self._qsize() >= self.maxsize:
remaining = endtime - time()
if remaining <= 0.0:
raise Full
self.not_full.wait(remaining)
self._put(item)
self.unfinished_tasks += 1
self.not_empty.notify()
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until an item is available. If 'timeout' is
a non-negative number, it blocks at most 'timeout' seconds and raises
the Empty exception if no item was available within that time.
Otherwise ('block' is false), return an item if one is immediately
available, else raise the Empty exception ('timeout' is ignored
in that case).
"""
with self.not_empty:
if not block:
if not self._qsize():
raise Empty()
elif timeout is None:
while not self._qsize():
self.not_empty.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a non-negative number")
else:
endtime = time() + timeout
while not self._qsize():
remaining = endtime - time()
if remaining <= 0.0:
raise Empty()
self.not_empty.wait(remaining)
item = self._get()
self.not_full.notify()
return item
def put_nowait(self, item):
"""Put an item into the queue without blocking.
Only enqueue the item if a free slot is immediately available.
Otherwise raise the Full exception.
"""
return self.put(item, block=False)
def get_nowait(self):
"""Remove and return an item from the queue without blocking.
Only get an item if one is immediately available. Otherwise
raise the Empty exception.
"""
return self.get(block=False)
# Override these methods to implement other queue organizations
# (e.g. stack or priority queue).
# These will only be called with appropriate locks held
# Initialize the queue representation
def _init(self, maxsize):
self.queue = deque() # type: Any
def _qsize(self):
return len(self.queue)
# Put a new item in the queue
def _put(self, item):
self.queue.append(item)
# Get an item from the queue
def _get(self):
return self.queue.popleft()
sentry-python-1.4.3/sentry_sdk/_types.py 0000664 0000000 0000000 00000002472 14125057761 0020441 0 ustar 00root root 0000000 0000000 try:
from typing import TYPE_CHECKING as MYPY
except ImportError:
MYPY = False
if MYPY:
from types import TracebackType
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from typing_extensions import Literal
ExcInfo = Tuple[
Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]
]
Event = Dict[str, Any]
Hint = Dict[str, Any]
Breadcrumb = Dict[str, Any]
BreadcrumbHint = Dict[str, Any]
SamplingContext = Dict[str, Any]
EventProcessor = Callable[[Event, Hint], Optional[Event]]
ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
TracesSampler = Callable[[SamplingContext], Union[float, int, bool]]
# https://github.com/python/mypy/issues/5710
NotImplementedType = Any
EventDataCategory = Literal[
"default",
"error",
"crash",
"transaction",
"security",
"attachment",
"session",
"internal",
]
SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
EndpointType = Literal["store", "envelope"]
sentry-python-1.4.3/sentry_sdk/api.py 0000664 0000000 0000000 00000011260 14125057761 0017702 0 ustar 00root root 0000000 0000000 import inspect
from sentry_sdk.hub import Hub
from sentry_sdk.scope import Scope
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Dict
from typing import Optional
from typing import overload
from typing import Callable
from typing import TypeVar
from typing import ContextManager
from typing import Union
from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo
from sentry_sdk.tracing import Span, Transaction
T = TypeVar("T")
F = TypeVar("F", bound=Callable[..., Any])
else:
def overload(x):
# type: (T) -> T
return x
# When changing this, update __all__ in __init__.py too
__all__ = [
"capture_event",
"capture_message",
"capture_exception",
"add_breadcrumb",
"configure_scope",
"push_scope",
"flush",
"last_event_id",
"start_span",
"start_transaction",
"set_tag",
"set_context",
"set_extra",
"set_user",
"set_level",
]
def hubmethod(f):
# type: (F) -> F
f.__doc__ = "%s\n\n%s" % (
"Alias for :py:meth:`sentry_sdk.Hub.%s`" % f.__name__,
inspect.getdoc(getattr(Hub, f.__name__)),
)
return f
def scopemethod(f):
# type: (F) -> F
f.__doc__ = "%s\n\n%s" % (
"Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__,
inspect.getdoc(getattr(Scope, f.__name__)),
)
return f
@hubmethod
def capture_event(
event, # type: Event
hint=None, # type: Optional[Hint]
scope=None, # type: Optional[Any]
**scope_args # type: Any
):
# type: (...) -> Optional[str]
return Hub.current.capture_event(event, hint, scope=scope, **scope_args)
@hubmethod
def capture_message(
message, # type: str
level=None, # type: Optional[str]
scope=None, # type: Optional[Any]
**scope_args # type: Any
):
# type: (...) -> Optional[str]
return Hub.current.capture_message(message, level, scope=scope, **scope_args)
@hubmethod
def capture_exception(
error=None, # type: Optional[Union[BaseException, ExcInfo]]
scope=None, # type: Optional[Any]
**scope_args # type: Any
):
# type: (...) -> Optional[str]
return Hub.current.capture_exception(error, scope=scope, **scope_args)
@hubmethod
def add_breadcrumb(
crumb=None, # type: Optional[Breadcrumb]
hint=None, # type: Optional[BreadcrumbHint]
**kwargs # type: Any
):
# type: (...) -> None
return Hub.current.add_breadcrumb(crumb, hint, **kwargs)
@overload
def configure_scope(): # noqa: F811
# type: () -> ContextManager[Scope]
pass
@overload
def configure_scope( # noqa: F811
callback, # type: Callable[[Scope], None]
):
# type: (...) -> None
pass
@hubmethod
def configure_scope( # noqa: F811
callback=None, # type: Optional[Callable[[Scope], None]]
):
# type: (...) -> Optional[ContextManager[Scope]]
return Hub.current.configure_scope(callback)
@overload
def push_scope(): # noqa: F811
# type: () -> ContextManager[Scope]
pass
@overload
def push_scope( # noqa: F811
callback, # type: Callable[[Scope], None]
):
# type: (...) -> None
pass
@hubmethod
def push_scope( # noqa: F811
callback=None, # type: Optional[Callable[[Scope], None]]
):
# type: (...) -> Optional[ContextManager[Scope]]
return Hub.current.push_scope(callback)
@scopemethod # noqa
def set_tag(key, value):
# type: (str, Any) -> None
return Hub.current.scope.set_tag(key, value)
@scopemethod # noqa
def set_context(key, value):
# type: (str, Dict[str, Any]) -> None
return Hub.current.scope.set_context(key, value)
@scopemethod # noqa
def set_extra(key, value):
# type: (str, Any) -> None
return Hub.current.scope.set_extra(key, value)
@scopemethod # noqa
def set_user(value):
# type: (Optional[Dict[str, Any]]) -> None
return Hub.current.scope.set_user(value)
@scopemethod # noqa
def set_level(value):
# type: (str) -> None
return Hub.current.scope.set_level(value)
@hubmethod
def flush(
timeout=None, # type: Optional[float]
callback=None, # type: Optional[Callable[[int, float], None]]
):
# type: (...) -> None
return Hub.current.flush(timeout=timeout, callback=callback)
@hubmethod
def last_event_id():
# type: () -> Optional[str]
return Hub.current.last_event_id()
@hubmethod
def start_span(
span=None, # type: Optional[Span]
**kwargs # type: Any
):
# type: (...) -> Span
return Hub.current.start_span(span=span, **kwargs)
@hubmethod
def start_transaction(
transaction=None, # type: Optional[Transaction]
**kwargs # type: Any
):
# type: (...) -> Transaction
return Hub.current.start_transaction(transaction, **kwargs)
sentry-python-1.4.3/sentry_sdk/attachments.py 0000664 0000000 0000000 00000003401 14125057761 0021442 0 ustar 00root root 0000000 0000000 import os
import mimetypes
from sentry_sdk._types import MYPY
from sentry_sdk.envelope import Item, PayloadRef
if MYPY:
from typing import Optional, Union, Callable
class Attachment(object):
def __init__(
self,
bytes=None, # type: Union[None, bytes, Callable[[], bytes]]
filename=None, # type: Optional[str]
path=None, # type: Optional[str]
content_type=None, # type: Optional[str]
add_to_transactions=False, # type: bool
):
# type: (...) -> None
if bytes is None and path is None:
raise TypeError("path or raw bytes required for attachment")
if filename is None and path is not None:
filename = os.path.basename(path)
if filename is None:
raise TypeError("filename is required for attachment")
if content_type is None:
content_type = mimetypes.guess_type(filename)[0]
self.bytes = bytes
self.filename = filename
self.path = path
self.content_type = content_type
self.add_to_transactions = add_to_transactions
def to_envelope_item(self):
# type: () -> Item
"""Returns an envelope item for this attachment."""
payload = None # type: Union[None, PayloadRef, bytes]
if self.bytes is not None:
if callable(self.bytes):
payload = self.bytes()
else:
payload = self.bytes
else:
payload = PayloadRef(path=self.path)
return Item(
payload=payload,
type="attachment",
content_type=self.content_type,
filename=self.filename,
)
def __repr__(self):
# type: () -> str
return "" % (self.filename,)
sentry-python-1.4.3/sentry_sdk/client.py 0000664 0000000 0000000 00000034641 14125057761 0020417 0 ustar 00root root 0000000 0000000 import os
import uuid
import random
from datetime import datetime
import socket
from sentry_sdk._compat import string_types, text_type, iteritems
from sentry_sdk.utils import (
capture_internal_exceptions,
current_stacktrace,
disable_capture_event,
format_timestamp,
get_type_name,
get_default_release,
handle_in_app,
logger,
)
from sentry_sdk.serializer import serialize
from sentry_sdk.transport import make_transport
from sentry_sdk.consts import DEFAULT_OPTIONS, SDK_INFO, ClientConstructor
from sentry_sdk.integrations import setup_integrations
from sentry_sdk.utils import ContextVar
from sentry_sdk.sessions import SessionFlusher
from sentry_sdk.envelope import Envelope
from sentry_sdk.tracing_utils import has_tracestate_enabled, reinflate_tracestate
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from sentry_sdk.scope import Scope
from sentry_sdk._types import Event, Hint
from sentry_sdk.session import Session
_client_init_debug = ContextVar("client_init_debug")
def _get_options(*args, **kwargs):
# type: (*Optional[str], **Any) -> Dict[str, Any]
if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
dsn = args[0] # type: Optional[str]
args = args[1:]
else:
dsn = None
rv = dict(DEFAULT_OPTIONS)
options = dict(*args, **kwargs)
if dsn is not None and options.get("dsn") is None:
options["dsn"] = dsn
for key, value in iteritems(options):
if key not in rv:
raise TypeError("Unknown option %r" % (key,))
rv[key] = value
if rv["dsn"] is None:
rv["dsn"] = os.environ.get("SENTRY_DSN")
if rv["release"] is None:
rv["release"] = get_default_release()
if rv["environment"] is None:
rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production"
if rv["server_name"] is None and hasattr(socket, "gethostname"):
rv["server_name"] = socket.gethostname()
return rv
class _Client(object):
"""The client is internally responsible for capturing the events and
forwarding them to sentry through the configured transport. It takes
the client options as keyword arguments and optionally the DSN as first
argument.
"""
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
self.options = get_options(*args, **kwargs) # type: Dict[str, Any]
self._init_impl()
def __getstate__(self):
# type: () -> Any
return {"options": self.options}
def __setstate__(self, state):
# type: (Any) -> None
self.options = state["options"]
self._init_impl()
def _init_impl(self):
# type: () -> None
old_debug = _client_init_debug.get(False)
def _capture_envelope(envelope):
# type: (Envelope) -> None
if self.transport is not None:
self.transport.capture_envelope(envelope)
try:
_client_init_debug.set(self.options["debug"])
self.transport = make_transport(self.options)
self.session_flusher = SessionFlusher(capture_func=_capture_envelope)
request_bodies = ("always", "never", "small", "medium")
if self.options["request_bodies"] not in request_bodies:
raise ValueError(
"Invalid value for request_bodies. Must be one of {}".format(
request_bodies
)
)
self.integrations = setup_integrations(
self.options["integrations"],
with_defaults=self.options["default_integrations"],
with_auto_enabling_integrations=self.options[
"auto_enabling_integrations"
],
)
finally:
_client_init_debug.set(old_debug)
@property
def dsn(self):
# type: () -> Optional[str]
"""Returns the configured DSN as string."""
return self.options["dsn"]
def _prepare_event(
self,
event, # type: Event
hint, # type: Hint
scope, # type: Optional[Scope]
):
# type: (...) -> Optional[Event]
if event.get("timestamp") is None:
event["timestamp"] = datetime.utcnow()
if scope is not None:
event_ = scope.apply_to_event(event, hint)
if event_ is None:
return None
event = event_
if (
self.options["attach_stacktrace"]
and "exception" not in event
and "stacktrace" not in event
and "threads" not in event
):
with capture_internal_exceptions():
event["threads"] = {
"values": [
{
"stacktrace": current_stacktrace(
self.options["with_locals"]
),
"crashed": False,
"current": True,
}
]
}
for key in "release", "environment", "server_name", "dist":
if event.get(key) is None and self.options[key] is not None:
event[key] = text_type(self.options[key]).strip()
if event.get("sdk") is None:
sdk_info = dict(SDK_INFO)
sdk_info["integrations"] = sorted(self.integrations.keys())
event["sdk"] = sdk_info
if event.get("platform") is None:
event["platform"] = "python"
event = handle_in_app(
event, self.options["in_app_exclude"], self.options["in_app_include"]
)
# Postprocess the event here so that annotated types do
# generally not surface in before_send
if event is not None:
event = serialize(
event,
smart_transaction_trimming=self.options["_experiments"].get(
"smart_transaction_trimming"
),
)
before_send = self.options["before_send"]
if before_send is not None and event.get("type") != "transaction":
new_event = None
with capture_internal_exceptions():
new_event = before_send(event, hint or {})
if new_event is None:
logger.info("before send dropped event (%s)", event)
event = new_event # type: ignore
return event
def _is_ignored_error(self, event, hint):
# type: (Event, Hint) -> bool
exc_info = hint.get("exc_info")
if exc_info is None:
return False
type_name = get_type_name(exc_info[0])
full_name = "%s.%s" % (exc_info[0].__module__, type_name)
for errcls in self.options["ignore_errors"]:
# String types are matched against the type name in the
# exception only
if isinstance(errcls, string_types):
if errcls == full_name or errcls == type_name:
return True
else:
if issubclass(exc_info[0], errcls):
return True
return False
def _should_capture(
self,
event, # type: Event
hint, # type: Hint
scope=None, # type: Optional[Scope]
):
# type: (...) -> bool
if event.get("type") == "transaction":
# Transactions are sampled independent of error events.
return True
if scope is not None and not scope._should_capture:
return False
if (
self.options["sample_rate"] < 1.0
and random.random() >= self.options["sample_rate"]
):
# record a lost event if we did not sample this.
if self.transport:
self.transport.record_lost_event("sample_rate", data_category="error")
return False
if self._is_ignored_error(event, hint):
return False
return True
def _update_session_from_event(
self,
session, # type: Session
event, # type: Event
):
# type: (...) -> None
crashed = False
errored = False
user_agent = None
exceptions = (event.get("exception") or {}).get("values")
if exceptions:
errored = True
for error in exceptions:
mechanism = error.get("mechanism")
if mechanism and mechanism.get("handled") is False:
crashed = True
break
user = event.get("user")
if session.user_agent is None:
headers = (event.get("request") or {}).get("headers")
for (k, v) in iteritems(headers or {}):
if k.lower() == "user-agent":
user_agent = v
break
session.update(
status="crashed" if crashed else None,
user=user,
user_agent=user_agent,
errors=session.errors + (errored or crashed),
)
def capture_event(
self,
event, # type: Event
hint=None, # type: Optional[Hint]
scope=None, # type: Optional[Scope]
):
# type: (...) -> Optional[str]
"""Captures an event.
:param event: A ready-made event that can be directly sent to Sentry.
:param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
:returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help.
"""
if disable_capture_event.get(False):
return None
if self.transport is None:
return None
if hint is None:
hint = {}
event_id = event.get("event_id")
hint = dict(hint or ()) # type: Hint
if event_id is None:
event["event_id"] = event_id = uuid.uuid4().hex
if not self._should_capture(event, hint, scope):
return None
event_opt = self._prepare_event(event, hint, scope)
if event_opt is None:
return None
# whenever we capture an event we also check if the session needs
# to be updated based on that information.
session = scope._session if scope else None
if session:
self._update_session_from_event(session, event)
attachments = hint.get("attachments")
is_transaction = event_opt.get("type") == "transaction"
# this is outside of the `if` immediately below because even if we don't
# use the value, we want to make sure we remove it before the event is
# sent
raw_tracestate = (
event_opt.get("contexts", {}).get("trace", {}).pop("tracestate", "")
)
# Transactions or events with attachments should go to the /envelope/
# endpoint.
if is_transaction or attachments:
headers = {
"event_id": event_opt["event_id"],
"sent_at": format_timestamp(datetime.utcnow()),
}
tracestate_data = raw_tracestate and reinflate_tracestate(
raw_tracestate.replace("sentry=", "")
)
if tracestate_data and has_tracestate_enabled():
headers["trace"] = tracestate_data
envelope = Envelope(headers=headers)
if is_transaction:
envelope.add_transaction(event_opt)
else:
envelope.add_event(event_opt)
for attachment in attachments or ():
envelope.add_item(attachment.to_envelope_item())
self.transport.capture_envelope(envelope)
else:
# All other events go to the /store/ endpoint.
self.transport.capture_event(event_opt)
return event_id
def capture_session(
self, session # type: Session
):
# type: (...) -> None
if not session.release:
logger.info("Discarded session update because of missing release")
else:
self.session_flusher.add_session(session)
def close(
self,
timeout=None, # type: Optional[float]
callback=None, # type: Optional[Callable[[int, float], None]]
):
# type: (...) -> None
"""
Close the client and shut down the transport. Arguments have the same
semantics as :py:meth:`Client.flush`.
"""
if self.transport is not None:
self.flush(timeout=timeout, callback=callback)
self.session_flusher.kill()
self.transport.kill()
self.transport = None
def flush(
self,
timeout=None, # type: Optional[float]
callback=None, # type: Optional[Callable[[int, float], None]]
):
# type: (...) -> None
"""
Wait for the current events to be sent.
:param timeout: Wait for at most `timeout` seconds. If no `timeout` is provided, the `shutdown_timeout` option value is used.
:param callback: Is invoked with the number of pending events and the configured timeout.
"""
if self.transport is not None:
if timeout is None:
timeout = self.options["shutdown_timeout"]
self.session_flusher.flush()
self.transport.flush(timeout=timeout, callback=callback)
def __enter__(self):
# type: () -> _Client
return self
def __exit__(self, exc_type, exc_value, tb):
# type: (Any, Any, Any) -> None
self.close()
from sentry_sdk._types import MYPY
if MYPY:
# Make mypy, PyCharm and other static analyzers think `get_options` is a
# type to have nicer autocompletion for params.
#
# Use `ClientConstructor` to define the argument types of `init` and
# `Dict[str, Any]` to tell static analyzers about the return type.
class get_options(ClientConstructor, Dict[str, Any]): # noqa: N801
pass
class Client(ClientConstructor, _Client):
pass
else:
# Alias `get_options` for actual usage. Go through the lambda indirection
# to throw PyCharm off of the weakly typed signature (it would otherwise
# discover both the weakly typed signature of `_init` and our faked `init`
# type).
get_options = (lambda: _get_options)()
Client = (lambda: _Client)()
sentry-python-1.4.3/sentry_sdk/consts.py 0000664 0000000 0000000 00000007311 14125057761 0020444 0 ustar 00root root 0000000 0000000 from sentry_sdk._types import MYPY
if MYPY:
import sentry_sdk
from typing import Optional
from typing import Callable
from typing import Union
from typing import List
from typing import Type
from typing import Dict
from typing import Any
from typing import Sequence
from typing_extensions import TypedDict
from sentry_sdk.integrations import Integration
from sentry_sdk._types import (
BreadcrumbProcessor,
Event,
EventProcessor,
TracesSampler,
)
# Experiments are feature flags to enable and disable certain unstable SDK
# functionality. Changing them from the defaults (`None`) in production
# code is highly discouraged. They are not subject to any stability
# guarantees such as the ones from semantic versioning.
Experiments = TypedDict(
"Experiments",
{
"max_spans": Optional[int],
"record_sql_params": Optional[bool],
"smart_transaction_trimming": Optional[bool],
"propagate_tracestate": Optional[bool],
},
total=False,
)
DEFAULT_QUEUE_SIZE = 100
DEFAULT_MAX_BREADCRUMBS = 100
# This type exists to trick mypy and PyCharm into thinking `init` and `Client`
# take these arguments (even though they take opaque **kwargs)
class ClientConstructor(object):
def __init__(
self,
dsn=None, # type: Optional[str]
with_locals=True, # type: bool
max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS, # type: int
release=None, # type: Optional[str]
environment=None, # type: Optional[str]
server_name=None, # type: Optional[str]
shutdown_timeout=2, # type: int
integrations=[], # type: Sequence[Integration] # noqa: B006
in_app_include=[], # type: List[str] # noqa: B006
in_app_exclude=[], # type: List[str] # noqa: B006
default_integrations=True, # type: bool
dist=None, # type: Optional[str]
transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]]
transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int
sample_rate=1.0, # type: float
send_default_pii=False, # type: bool
http_proxy=None, # type: Optional[str]
https_proxy=None, # type: Optional[str]
ignore_errors=[], # type: List[Union[type, str]] # noqa: B006
request_bodies="medium", # type: str
before_send=None, # type: Optional[EventProcessor]
before_breadcrumb=None, # type: Optional[BreadcrumbProcessor]
debug=False, # type: bool
attach_stacktrace=False, # type: bool
ca_certs=None, # type: Optional[str]
propagate_traces=True, # type: bool
traces_sample_rate=None, # type: Optional[float]
traces_sampler=None, # type: Optional[TracesSampler]
auto_enabling_integrations=True, # type: bool
auto_session_tracking=True, # type: bool
send_client_reports=True, # type: bool
_experiments={}, # type: Experiments # noqa: B006
):
# type: (...) -> None
pass
def _get_default_options():
# type: () -> Dict[str, Any]
import inspect
if hasattr(inspect, "getfullargspec"):
getargspec = inspect.getfullargspec
else:
getargspec = inspect.getargspec # type: ignore
a = getargspec(ClientConstructor.__init__)
defaults = a.defaults or ()
return dict(zip(a.args[-len(defaults) :], defaults))
DEFAULT_OPTIONS = _get_default_options()
del _get_default_options
VERSION = "1.4.3"
SDK_INFO = {
"name": "sentry.python",
"version": VERSION,
"packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
}
sentry-python-1.4.3/sentry_sdk/debug.py 0000664 0000000 0000000 00000002154 14125057761 0020221 0 ustar 00root root 0000000 0000000 import sys
import logging
from sentry_sdk import utils
from sentry_sdk.hub import Hub
from sentry_sdk.utils import logger
from sentry_sdk.client import _client_init_debug
from logging import LogRecord
class _HubBasedClientFilter(logging.Filter):
def filter(self, record):
# type: (LogRecord) -> bool
if _client_init_debug.get(False):
return True
hub = Hub.current
if hub is not None and hub.client is not None:
return hub.client.options["debug"]
return False
def init_debug_support():
# type: () -> None
if not logger.handlers:
configure_logger()
configure_debug_hub()
def configure_logger():
# type: () -> None
_handler = logging.StreamHandler(sys.stderr)
_handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s"))
logger.addHandler(_handler)
logger.setLevel(logging.DEBUG)
logger.addFilter(_HubBasedClientFilter())
def configure_debug_hub():
# type: () -> None
def _get_debug_hub():
# type: () -> Hub
return Hub.current
utils._get_debug_hub = _get_debug_hub
sentry-python-1.4.3/sentry_sdk/envelope.py 0000664 0000000 0000000 00000021234 14125057761 0020750 0 ustar 00root root 0000000 0000000 import io
import json
import mimetypes
from sentry_sdk._compat import text_type, PY2
from sentry_sdk._types import MYPY
from sentry_sdk.session import Session
from sentry_sdk.utils import json_dumps, capture_internal_exceptions
if MYPY:
from typing import Any
from typing import Optional
from typing import Union
from typing import Dict
from typing import List
from typing import Iterator
from sentry_sdk._types import Event, EventDataCategory
def parse_json(data):
# type: (Union[bytes, text_type]) -> Any
# on some python 3 versions this needs to be bytes
if not PY2 and isinstance(data, bytes):
data = data.decode("utf-8", "replace")
return json.loads(data)
class Envelope(object):
def __init__(
self,
headers=None, # type: Optional[Dict[str, Any]]
items=None, # type: Optional[List[Item]]
):
# type: (...) -> None
if headers is not None:
headers = dict(headers)
self.headers = headers or {}
if items is None:
items = []
else:
items = list(items)
self.items = items
@property
def description(self):
# type: (...) -> str
return "envelope with %s items (%s)" % (
len(self.items),
", ".join(x.data_category for x in self.items),
)
def add_event(
self, event # type: Event
):
# type: (...) -> None
self.add_item(Item(payload=PayloadRef(json=event), type="event"))
def add_transaction(
self, transaction # type: Event
):
# type: (...) -> None
self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction"))
def add_session(
self, session # type: Union[Session, Any]
):
# type: (...) -> None
if isinstance(session, Session):
session = session.to_json()
self.add_item(Item(payload=PayloadRef(json=session), type="session"))
def add_sessions(
self, sessions # type: Any
):
# type: (...) -> None
self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions"))
def add_item(
self, item # type: Item
):
# type: (...) -> None
self.items.append(item)
def get_event(self):
# type: (...) -> Optional[Event]
for items in self.items:
event = items.get_event()
if event is not None:
return event
return None
def get_transaction_event(self):
# type: (...) -> Optional[Event]
for item in self.items:
event = item.get_transaction_event()
if event is not None:
return event
return None
def __iter__(self):
# type: (...) -> Iterator[Item]
return iter(self.items)
def serialize_into(
self, f # type: Any
):
# type: (...) -> None
f.write(json_dumps(self.headers))
f.write(b"\n")
for item in self.items:
item.serialize_into(f)
def serialize(self):
# type: (...) -> bytes
out = io.BytesIO()
self.serialize_into(out)
return out.getvalue()
@classmethod
def deserialize_from(
cls, f # type: Any
):
# type: (...) -> Envelope
headers = parse_json(f.readline())
items = []
while 1:
item = Item.deserialize_from(f)
if item is None:
break
items.append(item)
return cls(headers=headers, items=items)
@classmethod
def deserialize(
cls, bytes # type: bytes
):
# type: (...) -> Envelope
return cls.deserialize_from(io.BytesIO(bytes))
def __repr__(self):
# type: (...) -> str
return "" % (self.headers, self.items)
class PayloadRef(object):
def __init__(
self,
bytes=None, # type: Optional[bytes]
path=None, # type: Optional[Union[bytes, text_type]]
json=None, # type: Optional[Any]
):
# type: (...) -> None
self.json = json
self.bytes = bytes
self.path = path
def get_bytes(self):
# type: (...) -> bytes
if self.bytes is None:
if self.path is not None:
with capture_internal_exceptions():
with open(self.path, "rb") as f:
self.bytes = f.read()
elif self.json is not None:
self.bytes = json_dumps(self.json)
else:
self.bytes = b""
return self.bytes
@property
def inferred_content_type(self):
# type: (...) -> str
if self.json is not None:
return "application/json"
elif self.path is not None:
path = self.path
if isinstance(path, bytes):
path = path.decode("utf-8", "replace")
ty = mimetypes.guess_type(path)[0]
if ty:
return ty
return "application/octet-stream"
def __repr__(self):
# type: (...) -> str
return "" % (self.inferred_content_type,)
class Item(object):
def __init__(
self,
payload, # type: Union[bytes, text_type, PayloadRef]
headers=None, # type: Optional[Dict[str, Any]]
type=None, # type: Optional[str]
content_type=None, # type: Optional[str]
filename=None, # type: Optional[str]
):
if headers is not None:
headers = dict(headers)
elif headers is None:
headers = {}
self.headers = headers
if isinstance(payload, bytes):
payload = PayloadRef(bytes=payload)
elif isinstance(payload, text_type):
payload = PayloadRef(bytes=payload.encode("utf-8"))
else:
payload = payload
if filename is not None:
headers["filename"] = filename
if type is not None:
headers["type"] = type
if content_type is not None:
headers["content_type"] = content_type
elif "content_type" not in headers:
headers["content_type"] = payload.inferred_content_type
self.payload = payload
def __repr__(self):
# type: (...) -> str
return "" % (
self.headers,
self.payload,
self.data_category,
)
@property
def type(self):
# type: (...) -> Optional[str]
return self.headers.get("type")
@property
def data_category(self):
# type: (...) -> EventDataCategory
ty = self.headers.get("type")
if ty == "session":
return "session"
elif ty == "attachment":
return "attachment"
elif ty == "transaction":
return "transaction"
elif ty == "event":
return "error"
elif ty == "client_report":
return "internal"
else:
return "default"
def get_bytes(self):
# type: (...) -> bytes
return self.payload.get_bytes()
def get_event(self):
# type: (...) -> Optional[Event]
"""
Returns an error event if there is one.
"""
if self.type == "event" and self.payload.json is not None:
return self.payload.json
return None
def get_transaction_event(self):
# type: (...) -> Optional[Event]
if self.type == "transaction" and self.payload.json is not None:
return self.payload.json
return None
def serialize_into(
self, f # type: Any
):
# type: (...) -> None
headers = dict(self.headers)
bytes = self.get_bytes()
headers["length"] = len(bytes)
f.write(json_dumps(headers))
f.write(b"\n")
f.write(bytes)
f.write(b"\n")
def serialize(self):
# type: (...) -> bytes
out = io.BytesIO()
self.serialize_into(out)
return out.getvalue()
@classmethod
def deserialize_from(
cls, f # type: Any
):
# type: (...) -> Optional[Item]
line = f.readline().rstrip()
if not line:
return None
headers = parse_json(line)
length = headers["length"]
payload = f.read(length)
if headers.get("type") in ("event", "transaction"):
rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload)))
else:
rv = cls(headers=headers, payload=payload)
f.readline()
return rv
@classmethod
def deserialize(
cls, bytes # type: bytes
):
# type: (...) -> Optional[Item]
return cls.deserialize_from(io.BytesIO(bytes))
sentry-python-1.4.3/sentry_sdk/hub.py 0000664 0000000 0000000 00000053343 14125057761 0017717 0 ustar 00root root 0000000 0000000 import copy
import sys
from datetime import datetime
from contextlib import contextmanager
from sentry_sdk._compat import with_metaclass
from sentry_sdk.scope import Scope
from sentry_sdk.client import Client
from sentry_sdk.tracing import Span, Transaction
from sentry_sdk.session import Session
from sentry_sdk.utils import (
exc_info_from_error,
event_from_exception,
logger,
ContextVar,
)
from sentry_sdk._types import MYPY
if MYPY:
from typing import Union
from typing import Any
from typing import Optional
from typing import Tuple
from typing import Dict
from typing import List
from typing import Callable
from typing import Generator
from typing import Type
from typing import TypeVar
from typing import overload
from typing import ContextManager
from sentry_sdk.integrations import Integration
from sentry_sdk._types import (
Event,
Hint,
Breadcrumb,
BreadcrumbHint,
ExcInfo,
)
from sentry_sdk.consts import ClientConstructor
T = TypeVar("T")
else:
def overload(x):
# type: (T) -> T
return x
_local = ContextVar("sentry_current_hub")
def _update_scope(base, scope_change, scope_kwargs):
# type: (Scope, Optional[Any], Dict[str, Any]) -> Scope
if scope_change and scope_kwargs:
raise TypeError("cannot provide scope and kwargs")
if scope_change is not None:
final_scope = copy.copy(base)
if callable(scope_change):
scope_change(final_scope)
else:
final_scope.update_from_scope(scope_change)
elif scope_kwargs:
final_scope = copy.copy(base)
final_scope.update_from_kwargs(**scope_kwargs)
else:
final_scope = base
return final_scope
def _should_send_default_pii():
# type: () -> bool
client = Hub.current.client
if not client:
return False
return client.options["send_default_pii"]
class _InitGuard(object):
def __init__(self, client):
# type: (Client) -> None
self._client = client
def __enter__(self):
# type: () -> _InitGuard
return self
def __exit__(self, exc_type, exc_value, tb):
# type: (Any, Any, Any) -> None
c = self._client
if c is not None:
c.close()
def _init(*args, **kwargs):
# type: (*Optional[str], **Any) -> ContextManager[Any]
"""Initializes the SDK and optionally integrations.
This takes the same arguments as the client constructor.
"""
client = Client(*args, **kwargs) # type: ignore
Hub.current.bind_client(client)
rv = _InitGuard(client)
return rv
from sentry_sdk._types import MYPY
if MYPY:
# Make mypy, PyCharm and other static analyzers think `init` is a type to
# have nicer autocompletion for params.
#
# Use `ClientConstructor` to define the argument types of `init` and
# `ContextManager[Any]` to tell static analyzers about the return type.
class init(ClientConstructor, ContextManager[Any]): # noqa: N801
pass
else:
# Alias `init` for actual usage. Go through the lambda indirection to throw
# PyCharm off of the weakly typed signature (it would otherwise discover
# both the weakly typed signature of `_init` and our faked `init` type).
init = (lambda: _init)()
class HubMeta(type):
@property
def current(cls):
# type: () -> Hub
"""Returns the current instance of the hub."""
rv = _local.get(None)
if rv is None:
rv = Hub(GLOBAL_HUB)
_local.set(rv)
return rv
@property
def main(cls):
# type: () -> Hub
"""Returns the main instance of the hub."""
return GLOBAL_HUB
class _ScopeManager(object):
def __init__(self, hub):
# type: (Hub) -> None
self._hub = hub
self._original_len = len(hub._stack)
self._layer = hub._stack[-1]
def __enter__(self):
# type: () -> Scope
scope = self._layer[1]
assert scope is not None
return scope
def __exit__(self, exc_type, exc_value, tb):
# type: (Any, Any, Any) -> None
current_len = len(self._hub._stack)
if current_len < self._original_len:
logger.error(
"Scope popped too soon. Popped %s scopes too many.",
self._original_len - current_len,
)
return
elif current_len > self._original_len:
logger.warning(
"Leaked %s scopes: %s",
current_len - self._original_len,
self._hub._stack[self._original_len :],
)
layer = self._hub._stack[self._original_len - 1]
del self._hub._stack[self._original_len - 1 :]
if layer[1] != self._layer[1]:
logger.error(
"Wrong scope found. Meant to pop %s, but popped %s.",
layer[1],
self._layer[1],
)
elif layer[0] != self._layer[0]:
warning = (
"init() called inside of pushed scope. This might be entirely "
"legitimate but usually occurs when initializing the SDK inside "
"a request handler or task/job function. Try to initialize the "
"SDK as early as possible instead."
)
logger.warning(warning)
class Hub(with_metaclass(HubMeta)): # type: ignore
"""The hub wraps the concurrency management of the SDK. Each thread has
its own hub but the hub might transfer with the flow of execution if
context vars are available.
If the hub is used with a with statement it's temporarily activated.
"""
_stack = None # type: List[Tuple[Optional[Client], Scope]]
# Mypy doesn't pick up on the metaclass.
if MYPY:
current = None # type: Hub
main = None # type: Hub
def __init__(
self,
client_or_hub=None, # type: Optional[Union[Hub, Client]]
scope=None, # type: Optional[Any]
):
# type: (...) -> None
if isinstance(client_or_hub, Hub):
hub = client_or_hub
client, other_scope = hub._stack[-1]
if scope is None:
scope = copy.copy(other_scope)
else:
client = client_or_hub
if scope is None:
scope = Scope()
self._stack = [(client, scope)]
self._last_event_id = None # type: Optional[str]
self._old_hubs = [] # type: List[Hub]
def __enter__(self):
# type: () -> Hub
self._old_hubs.append(Hub.current)
_local.set(self)
return self
def __exit__(
self,
exc_type, # type: Optional[type]
exc_value, # type: Optional[BaseException]
tb, # type: Optional[Any]
):
# type: (...) -> None
old = self._old_hubs.pop()
_local.set(old)
def run(
self, callback # type: Callable[[], T]
):
# type: (...) -> T
"""Runs a callback in the context of the hub. Alternatively the
with statement can be used on the hub directly.
"""
with self:
return callback()
def get_integration(
self, name_or_class # type: Union[str, Type[Integration]]
):
# type: (...) -> Any
"""Returns the integration for this hub by name or class. If there
is no client bound or the client does not have that integration
then `None` is returned.
If the return value is not `None` the hub is guaranteed to have a
client attached.
"""
if isinstance(name_or_class, str):
integration_name = name_or_class
elif name_or_class.identifier is not None:
integration_name = name_or_class.identifier
else:
raise ValueError("Integration has no name")
client = self.client
if client is not None:
rv = client.integrations.get(integration_name)
if rv is not None:
return rv
@property
def client(self):
# type: () -> Optional[Client]
"""Returns the current client on the hub."""
return self._stack[-1][0]
@property
def scope(self):
# type: () -> Scope
"""Returns the current scope on the hub."""
return self._stack[-1][1]
def last_event_id(self):
# type: () -> Optional[str]
"""Returns the last event ID."""
return self._last_event_id
def bind_client(
self, new # type: Optional[Client]
):
# type: (...) -> None
"""Binds a new client to the hub."""
top = self._stack[-1]
self._stack[-1] = (new, top[1])
def capture_event(
self,
event, # type: Event
hint=None, # type: Optional[Hint]
scope=None, # type: Optional[Any]
**scope_args # type: Any
):
# type: (...) -> Optional[str]
"""Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`."""
client, top_scope = self._stack[-1]
scope = _update_scope(top_scope, scope, scope_args)
if client is not None:
is_transaction = event.get("type") == "transaction"
rv = client.capture_event(event, hint, scope)
if rv is not None and not is_transaction:
self._last_event_id = rv
return rv
return None
def capture_message(
self,
message, # type: str
level=None, # type: Optional[str]
scope=None, # type: Optional[Any]
**scope_args # type: Any
):
# type: (...) -> Optional[str]
"""Captures a message. The message is just a string. If no level
is provided the default level is `info`.
:returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
"""
if self.client is None:
return None
if level is None:
level = "info"
return self.capture_event(
{"message": message, "level": level}, scope=scope, **scope_args
)
def capture_exception(
self,
error=None, # type: Optional[Union[BaseException, ExcInfo]]
scope=None, # type: Optional[Any]
**scope_args # type: Any
):
# type: (...) -> Optional[str]
"""Captures an exception.
:param error: An exception to catch. If `None`, `sys.exc_info()` will be used.
:returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
"""
client = self.client
if client is None:
return None
if error is not None:
exc_info = exc_info_from_error(error)
else:
exc_info = sys.exc_info()
event, hint = event_from_exception(exc_info, client_options=client.options)
try:
return self.capture_event(event, hint=hint, scope=scope, **scope_args)
except Exception:
self._capture_internal_exception(sys.exc_info())
return None
def _capture_internal_exception(
self, exc_info # type: Any
):
# type: (...) -> Any
"""
Capture an exception that is likely caused by a bug in the SDK
itself.
These exceptions do not end up in Sentry and are just logged instead.
"""
logger.error("Internal error in sentry_sdk", exc_info=exc_info)
def add_breadcrumb(
self,
crumb=None, # type: Optional[Breadcrumb]
hint=None, # type: Optional[BreadcrumbHint]
**kwargs # type: Any
):
# type: (...) -> None
"""
Adds a breadcrumb.
:param crumb: Dictionary with the data as the sentry v7/v8 protocol expects.
:param hint: An optional value that can be used by `before_breadcrumb`
to customize the breadcrumbs that are emitted.
"""
client, scope = self._stack[-1]
if client is None:
logger.info("Dropped breadcrumb because no client bound")
return
crumb = dict(crumb or ()) # type: Breadcrumb
crumb.update(kwargs)
if not crumb:
return
hint = dict(hint or ()) # type: Hint
if crumb.get("timestamp") is None:
crumb["timestamp"] = datetime.utcnow()
if crumb.get("type") is None:
crumb["type"] = "default"
if client.options["before_breadcrumb"] is not None:
new_crumb = client.options["before_breadcrumb"](crumb, hint)
else:
new_crumb = crumb
if new_crumb is not None:
scope._breadcrumbs.append(new_crumb)
else:
logger.info("before breadcrumb dropped breadcrumb (%s)", crumb)
max_breadcrumbs = client.options["max_breadcrumbs"] # type: int
while len(scope._breadcrumbs) > max_breadcrumbs:
scope._breadcrumbs.popleft()
def start_span(
self,
span=None, # type: Optional[Span]
**kwargs # type: Any
):
# type: (...) -> Span
"""
Create and start timing a new span whose parent is the currently active
span or transaction, if any. The return value is a span instance,
typically used as a context manager to start and stop timing in a `with`
block.
Only spans contained in a transaction are sent to Sentry. Most
integrations start a transaction at the appropriate time, for example
for every incoming HTTP request. Use `start_transaction` to start a new
transaction when one is not already in progress.
"""
# TODO: consider removing this in a future release.
# This is for backwards compatibility with releases before
# start_transaction existed, to allow for a smoother transition.
if isinstance(span, Transaction) or "transaction" in kwargs:
deprecation_msg = (
"Deprecated: use start_transaction to start transactions and "
"Transaction.start_child to start spans."
)
if isinstance(span, Transaction):
logger.warning(deprecation_msg)
return self.start_transaction(span)
if "transaction" in kwargs:
logger.warning(deprecation_msg)
name = kwargs.pop("transaction")
return self.start_transaction(name=name, **kwargs)
if span is not None:
return span
kwargs.setdefault("hub", self)
span = self.scope.span
if span is not None:
return span.start_child(**kwargs)
return Span(**kwargs)
def start_transaction(
self,
transaction=None, # type: Optional[Transaction]
**kwargs # type: Any
):
# type: (...) -> Transaction
"""
Start and return a transaction.
Start an existing transaction if given, otherwise create and start a new
transaction with kwargs.
This is the entry point to manual tracing instrumentation.
A tree structure can be built by adding child spans to the transaction,
and child spans to other spans. To start a new child span within the
transaction or any span, call the respective `.start_child()` method.
Every child span must be finished before the transaction is finished,
otherwise the unfinished spans are discarded.
When used as context managers, spans and transactions are automatically
finished at the end of the `with` block. If not using context managers,
call the `.finish()` method.
When the transaction is finished, it will be sent to Sentry with all its
finished child spans.
"""
custom_sampling_context = kwargs.pop("custom_sampling_context", {})
# if we haven't been given a transaction, make one
if transaction is None:
kwargs.setdefault("hub", self)
transaction = Transaction(**kwargs)
# use traces_sample_rate, traces_sampler, and/or inheritance to make a
# sampling decision
sampling_context = {
"transaction_context": transaction.to_json(),
"parent_sampled": transaction.parent_sampled,
}
sampling_context.update(custom_sampling_context)
transaction._set_initial_sampling_decision(sampling_context=sampling_context)
# we don't bother to keep spans if we already know we're not going to
# send the transaction
if transaction.sampled:
max_spans = (
self.client and self.client.options["_experiments"].get("max_spans")
) or 1000
transaction.init_span_recorder(maxlen=max_spans)
return transaction
@overload
def push_scope( # noqa: F811
self, callback=None # type: Optional[None]
):
# type: (...) -> ContextManager[Scope]
pass
@overload
def push_scope( # noqa: F811
self, callback # type: Callable[[Scope], None]
):
# type: (...) -> None
pass
def push_scope( # noqa
self, callback=None # type: Optional[Callable[[Scope], None]]
):
# type: (...) -> Optional[ContextManager[Scope]]
"""
Pushes a new layer on the scope stack.
:param callback: If provided, this method pushes a scope, calls
`callback`, and pops the scope again.
:returns: If no `callback` is provided, a context manager that should
be used to pop the scope again.
"""
if callback is not None:
with self.push_scope() as scope:
callback(scope)
return None
client, scope = self._stack[-1]
new_layer = (client, copy.copy(scope))
self._stack.append(new_layer)
return _ScopeManager(self)
def pop_scope_unsafe(self):
# type: () -> Tuple[Optional[Client], Scope]
"""
Pops a scope layer from the stack.
Try to use the context manager :py:meth:`push_scope` instead.
"""
rv = self._stack.pop()
assert self._stack, "stack must have at least one layer"
return rv
@overload
def configure_scope( # noqa: F811
self, callback=None # type: Optional[None]
):
# type: (...) -> ContextManager[Scope]
pass
@overload
def configure_scope( # noqa: F811
self, callback # type: Callable[[Scope], None]
):
# type: (...) -> None
pass
def configure_scope( # noqa
self, callback=None # type: Optional[Callable[[Scope], None]]
): # noqa
# type: (...) -> Optional[ContextManager[Scope]]
"""
Reconfigures the scope.
:param callback: If provided, call the callback with the current scope.
:returns: If no callback is provided, returns a context manager that returns the scope.
"""
client, scope = self._stack[-1]
if callback is not None:
if client is not None:
callback(scope)
return None
@contextmanager
def inner():
# type: () -> Generator[Scope, None, None]
if client is not None:
yield scope
else:
yield Scope()
return inner()
def start_session(
self, session_mode="application" # type: str
):
# type: (...) -> None
"""Starts a new session."""
self.end_session()
client, scope = self._stack[-1]
scope._session = Session(
release=client.options["release"] if client else None,
environment=client.options["environment"] if client else None,
user=scope._user,
session_mode=session_mode,
)
def end_session(self):
# type: (...) -> None
"""Ends the current session if there is one."""
client, scope = self._stack[-1]
session = scope._session
self.scope._session = None
if session is not None:
session.close()
if client is not None:
client.capture_session(session)
def stop_auto_session_tracking(self):
# type: (...) -> None
"""Stops automatic session tracking.
This temporarily session tracking for the current scope when called.
To resume session tracking call `resume_auto_session_tracking`.
"""
self.end_session()
client, scope = self._stack[-1]
scope._force_auto_session_tracking = False
def resume_auto_session_tracking(self):
# type: (...) -> None
"""Resumes automatic session tracking for the current scope if
disabled earlier. This requires that generally automatic session
tracking is enabled.
"""
client, scope = self._stack[-1]
scope._force_auto_session_tracking = None
def flush(
self,
timeout=None, # type: Optional[float]
callback=None, # type: Optional[Callable[[int, float], None]]
):
# type: (...) -> None
"""
Alias for :py:meth:`sentry_sdk.Client.flush`
"""
client, scope = self._stack[-1]
if client is not None:
return client.flush(timeout=timeout, callback=callback)
def iter_trace_propagation_headers(self, span=None):
# type: (Optional[Span]) -> Generator[Tuple[str, str], None, None]
"""
Return HTTP headers which allow propagation of trace data. Data taken
from the span representing the request, if available, or the current
span on the scope if not.
"""
span = span or self.scope.span
if not span:
return
client = self._stack[-1][0]
propagate_traces = client and client.options["propagate_traces"]
if not propagate_traces:
return
for header in span.iter_headers():
yield header
GLOBAL_HUB = Hub()
_local.set(GLOBAL_HUB)
sentry-python-1.4.3/sentry_sdk/integrations/ 0000775 0000000 0000000 00000000000 14125057761 0021265 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/sentry_sdk/integrations/__init__.py 0000664 0000000 0000000 00000014555 14125057761 0023410 0 ustar 00root root 0000000 0000000 """This package"""
from __future__ import absolute_import
from threading import Lock
from sentry_sdk._compat import iteritems
from sentry_sdk.utils import logger
from sentry_sdk._types import MYPY
if MYPY:
from typing import Callable
from typing import Dict
from typing import Iterator
from typing import List
from typing import Set
from typing import Tuple
from typing import Type
_installer_lock = Lock()
_installed_integrations = set() # type: Set[str]
def _generate_default_integrations_iterator(integrations, auto_enabling_integrations):
# type: (Tuple[str, ...], Tuple[str, ...]) -> Callable[[bool], Iterator[Type[Integration]]]
def iter_default_integrations(with_auto_enabling_integrations):
# type: (bool) -> Iterator[Type[Integration]]
"""Returns an iterator of the default integration classes:"""
from importlib import import_module
if with_auto_enabling_integrations:
all_import_strings = integrations + auto_enabling_integrations
else:
all_import_strings = integrations
for import_string in all_import_strings:
try:
module, cls = import_string.rsplit(".", 1)
yield getattr(import_module(module), cls)
except (DidNotEnable, SyntaxError) as e:
logger.debug(
"Did not import default integration %s: %s", import_string, e
)
if isinstance(iter_default_integrations.__doc__, str):
for import_string in integrations:
iter_default_integrations.__doc__ += "\n- `{}`".format(import_string)
return iter_default_integrations
_AUTO_ENABLING_INTEGRATIONS = (
"sentry_sdk.integrations.django.DjangoIntegration",
"sentry_sdk.integrations.flask.FlaskIntegration",
"sentry_sdk.integrations.bottle.BottleIntegration",
"sentry_sdk.integrations.falcon.FalconIntegration",
"sentry_sdk.integrations.sanic.SanicIntegration",
"sentry_sdk.integrations.celery.CeleryIntegration",
"sentry_sdk.integrations.rq.RqIntegration",
"sentry_sdk.integrations.aiohttp.AioHttpIntegration",
"sentry_sdk.integrations.tornado.TornadoIntegration",
"sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
"sentry_sdk.integrations.boto3.Boto3Integration",
)
iter_default_integrations = _generate_default_integrations_iterator(
integrations=(
# stdlib/base runtime integrations
"sentry_sdk.integrations.logging.LoggingIntegration",
"sentry_sdk.integrations.stdlib.StdlibIntegration",
"sentry_sdk.integrations.excepthook.ExcepthookIntegration",
"sentry_sdk.integrations.dedupe.DedupeIntegration",
"sentry_sdk.integrations.atexit.AtexitIntegration",
"sentry_sdk.integrations.modules.ModulesIntegration",
"sentry_sdk.integrations.argv.ArgvIntegration",
"sentry_sdk.integrations.threading.ThreadingIntegration",
),
auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS,
)
del _generate_default_integrations_iterator
def setup_integrations(
integrations, with_defaults=True, with_auto_enabling_integrations=False
):
# type: (List[Integration], bool, bool) -> Dict[str, Integration]
"""Given a list of integration instances this installs them all. When
`with_defaults` is set to `True` then all default integrations are added
unless they were already provided before.
"""
integrations = dict(
(integration.identifier, integration) for integration in integrations or ()
)
logger.debug("Setting up integrations (with default = %s)", with_defaults)
# Integrations that are not explicitly set up by the user.
used_as_default_integration = set()
if with_defaults:
for integration_cls in iter_default_integrations(
with_auto_enabling_integrations
):
if integration_cls.identifier not in integrations:
instance = integration_cls()
integrations[instance.identifier] = instance
used_as_default_integration.add(instance.identifier)
for identifier, integration in iteritems(integrations):
with _installer_lock:
if identifier not in _installed_integrations:
logger.debug(
"Setting up previously not enabled integration %s", identifier
)
try:
type(integration).setup_once()
except NotImplementedError:
if getattr(integration, "install", None) is not None:
logger.warning(
"Integration %s: The install method is "
"deprecated. Use `setup_once`.",
identifier,
)
integration.install()
else:
raise
except DidNotEnable as e:
if identifier not in used_as_default_integration:
raise
logger.debug(
"Did not enable default integration %s: %s", identifier, e
)
_installed_integrations.add(identifier)
for identifier in integrations:
logger.debug("Enabling integration %s", identifier)
return integrations
class DidNotEnable(Exception):
"""
The integration could not be enabled due to a trivial user error like
`flask` not being installed for the `FlaskIntegration`.
This exception is silently swallowed for default integrations, but reraised
for explicitly enabled integrations.
"""
class Integration(object):
"""Baseclass for all integrations.
To accept options for an integration, implement your own constructor that
saves those options on `self`.
"""
install = None
"""Legacy method, do not implement."""
identifier = None # type: str
"""String unique ID of integration type"""
@staticmethod
def setup_once():
# type: () -> None
"""
Initialize the integration.
This function is only called once, ever. Configuration is not available
at this point, so the only thing to do here is to hook into exception
handlers, and perhaps do monkeypatches.
Inside those hooks `Integration.current` can be used to access the
instance again.
"""
raise NotImplementedError()
sentry-python-1.4.3/sentry_sdk/integrations/_wsgi_common.py 0000664 0000000 0000000 00000011233 14125057761 0024317 0 ustar 00root root 0000000 0000000 import json
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.utils import AnnotatedValue
from sentry_sdk._compat import text_type, iteritems
from sentry_sdk._types import MYPY
if MYPY:
import sentry_sdk
from typing import Any
from typing import Dict
from typing import Optional
from typing import Union
SENSITIVE_ENV_KEYS = (
"REMOTE_ADDR",
"HTTP_X_FORWARDED_FOR",
"HTTP_SET_COOKIE",
"HTTP_COOKIE",
"HTTP_AUTHORIZATION",
"HTTP_X_FORWARDED_FOR",
"HTTP_X_REAL_IP",
)
SENSITIVE_HEADERS = tuple(
x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_")
)
def request_body_within_bounds(client, content_length):
# type: (Optional[sentry_sdk.Client], int) -> bool
if client is None:
return False
bodies = client.options["request_bodies"]
return not (
bodies == "never"
or (bodies == "small" and content_length > 10 ** 3)
or (bodies == "medium" and content_length > 10 ** 4)
)
class RequestExtractor(object):
def __init__(self, request):
# type: (Any) -> None
self.request = request
def extract_into_event(self, event):
# type: (Dict[str, Any]) -> None
client = Hub.current.client
if client is None:
return
data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
content_length = self.content_length()
request_info = event.get("request", {})
if _should_send_default_pii():
request_info["cookies"] = dict(self.cookies())
if not request_body_within_bounds(client, content_length):
data = AnnotatedValue(
"",
{"rem": [["!config", "x", 0, content_length]], "len": content_length},
)
else:
parsed_body = self.parsed_body()
if parsed_body is not None:
data = parsed_body
elif self.raw_data():
data = AnnotatedValue(
"",
{"rem": [["!raw", "x", 0, content_length]], "len": content_length},
)
else:
data = None
if data is not None:
request_info["data"] = data
event["request"] = request_info
def content_length(self):
# type: () -> int
try:
return int(self.env().get("CONTENT_LENGTH", 0))
except ValueError:
return 0
def cookies(self):
# type: () -> Dict[str, Any]
raise NotImplementedError()
def raw_data(self):
# type: () -> Optional[Union[str, bytes]]
raise NotImplementedError()
def form(self):
# type: () -> Optional[Dict[str, Any]]
raise NotImplementedError()
def parsed_body(self):
# type: () -> Optional[Dict[str, Any]]
form = self.form()
files = self.files()
if form or files:
data = dict(iteritems(form))
for k, v in iteritems(files):
size = self.size_of_file(v)
data[k] = AnnotatedValue(
"", {"len": size, "rem": [["!raw", "x", 0, size]]}
)
return data
return self.json()
def is_json(self):
# type: () -> bool
return _is_json_content_type(self.env().get("CONTENT_TYPE"))
def json(self):
# type: () -> Optional[Any]
try:
if not self.is_json():
return None
raw_data = self.raw_data()
if raw_data is None:
return None
if isinstance(raw_data, text_type):
return json.loads(raw_data)
else:
return json.loads(raw_data.decode("utf-8"))
except ValueError:
pass
return None
def files(self):
# type: () -> Optional[Dict[str, Any]]
raise NotImplementedError()
def size_of_file(self, file):
# type: (Any) -> int
raise NotImplementedError()
def env(self):
# type: () -> Dict[str, Any]
raise NotImplementedError()
def _is_json_content_type(ct):
# type: (Optional[str]) -> bool
mt = (ct or "").split(";", 1)[0]
return (
mt == "application/json"
or (mt.startswith("application/"))
and mt.endswith("+json")
)
def _filter_headers(headers):
# type: (Dict[str, str]) -> Dict[str, str]
if _should_send_default_pii():
return headers
return {
k: (
v
if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
else AnnotatedValue("", {"rem": [["!config", "x", 0, len(v)]]})
)
for k, v in iteritems(headers)
}
sentry-python-1.4.3/sentry_sdk/integrations/aiohttp.py 0000664 0000000 0000000 00000017420 14125057761 0023313 0 ustar 00root root 0000000 0000000 import sys
import weakref
from sentry_sdk._compat import reraise
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk.integrations._wsgi_common import (
_filter_headers,
request_body_within_bounds,
)
from sentry_sdk.tracing import Transaction
from sentry_sdk.utils import (
capture_internal_exceptions,
event_from_exception,
transaction_from_function,
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
AnnotatedValue,
)
try:
import asyncio
from aiohttp import __version__ as AIOHTTP_VERSION
from aiohttp.web import Application, HTTPException, UrlDispatcher
except ImportError:
raise DidNotEnable("AIOHTTP not installed")
from sentry_sdk._types import MYPY
if MYPY:
from aiohttp.web_request import Request
from aiohttp.abc import AbstractMatchInfo
from typing import Any
from typing import Dict
from typing import Optional
from typing import Tuple
from typing import Callable
from typing import Union
from sentry_sdk.utils import ExcInfo
from sentry_sdk._types import EventProcessor
TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern")
class AioHttpIntegration(Integration):
identifier = "aiohttp"
def __init__(self, transaction_style="handler_name"):
# type: (str) -> None
if transaction_style not in TRANSACTION_STYLE_VALUES:
raise ValueError(
"Invalid value for transaction_style: %s (must be in %s)"
% (transaction_style, TRANSACTION_STYLE_VALUES)
)
self.transaction_style = transaction_style
@staticmethod
def setup_once():
# type: () -> None
try:
version = tuple(map(int, AIOHTTP_VERSION.split(".")[:2]))
except (TypeError, ValueError):
raise DidNotEnable(
"AIOHTTP version unparseable: {}".format(AIOHTTP_VERSION)
)
if version < (3, 4):
raise DidNotEnable("AIOHTTP 3.4 or newer required.")
if not HAS_REAL_CONTEXTVARS:
# We better have contextvars or we're going to leak state between
# requests.
raise DidNotEnable(
"The aiohttp integration for Sentry requires Python 3.7+ "
" or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
)
ignore_logger("aiohttp.server")
old_handle = Application._handle
async def sentry_app_handle(self, request, *args, **kwargs):
# type: (Any, Request, *Any, **Any) -> Any
hub = Hub.current
if hub.get_integration(AioHttpIntegration) is None:
return await old_handle(self, request, *args, **kwargs)
weak_request = weakref.ref(request)
with Hub(hub) as hub:
# Scope data will not leak between requests because aiohttp
# create a task to wrap each request.
with hub.configure_scope() as scope:
scope.clear_breadcrumbs()
scope.add_event_processor(_make_request_processor(weak_request))
transaction = Transaction.continue_from_headers(
request.headers,
op="http.server",
# If this transaction name makes it to the UI, AIOHTTP's
# URL resolver did not find a route or died trying.
name="generic AIOHTTP request",
)
with hub.start_transaction(
transaction, custom_sampling_context={"aiohttp_request": request}
):
try:
response = await old_handle(self, request)
except HTTPException as e:
transaction.set_http_status(e.status_code)
raise
except asyncio.CancelledError:
transaction.set_status("cancelled")
raise
except Exception:
# This will probably map to a 500 but seems like we
# have no way to tell. Do not set span status.
reraise(*_capture_exception(hub))
transaction.set_http_status(response.status)
return response
Application._handle = sentry_app_handle
old_urldispatcher_resolve = UrlDispatcher.resolve
async def sentry_urldispatcher_resolve(self, request):
# type: (UrlDispatcher, Request) -> AbstractMatchInfo
rv = await old_urldispatcher_resolve(self, request)
hub = Hub.current
integration = hub.get_integration(AioHttpIntegration)
name = None
try:
if integration.transaction_style == "handler_name":
name = transaction_from_function(rv.handler)
elif integration.transaction_style == "method_and_path_pattern":
route_info = rv.get_info()
pattern = route_info.get("path") or route_info.get("formatter")
name = "{} {}".format(request.method, pattern)
except Exception:
pass
if name is not None:
with Hub.current.configure_scope() as scope:
scope.transaction = name
return rv
UrlDispatcher.resolve = sentry_urldispatcher_resolve
def _make_request_processor(weak_request):
# type: (Callable[[], Request]) -> EventProcessor
def aiohttp_processor(
event, # type: Dict[str, Any]
hint, # type: Dict[str, Tuple[type, BaseException, Any]]
):
# type: (...) -> Dict[str, Any]
request = weak_request()
if request is None:
return event
with capture_internal_exceptions():
request_info = event.setdefault("request", {})
request_info["url"] = "%s://%s%s" % (
request.scheme,
request.host,
request.path,
)
request_info["query_string"] = request.query_string
request_info["method"] = request.method
request_info["env"] = {"REMOTE_ADDR": request.remote}
hub = Hub.current
request_info["headers"] = _filter_headers(dict(request.headers))
# Just attach raw data here if it is within bounds, if available.
# Unfortunately there's no way to get structured data from aiohttp
# without awaiting on some coroutine.
request_info["data"] = get_aiohttp_request_data(hub, request)
return event
return aiohttp_processor
def _capture_exception(hub):
# type: (Hub) -> ExcInfo
exc_info = sys.exc_info()
event, hint = event_from_exception(
exc_info,
client_options=hub.client.options, # type: ignore
mechanism={"type": "aiohttp", "handled": False},
)
hub.capture_event(event, hint=hint)
return exc_info
BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]"
def get_aiohttp_request_data(hub, request):
# type: (Hub, Request) -> Union[Optional[str], AnnotatedValue]
bytes_body = request._read_bytes
if bytes_body is not None:
# we have body to show
if not request_body_within_bounds(hub.client, len(bytes_body)):
return AnnotatedValue(
"",
{"rem": [["!config", "x", 0, len(bytes_body)]], "len": len(bytes_body)},
)
encoding = request.charset or "utf-8"
return bytes_body.decode(encoding, "replace")
if request.can_read_body:
# body exists but we can't show it
return BODY_NOT_READ_MESSAGE
# request has no body
return None
sentry-python-1.4.3/sentry_sdk/integrations/argv.py 0000664 0000000 0000000 00000001661 14125057761 0022602 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import sys
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk._types import MYPY
if MYPY:
from typing import Optional
from sentry_sdk._types import Event, Hint
class ArgvIntegration(Integration):
identifier = "argv"
@staticmethod
def setup_once():
# type: () -> None
@add_global_event_processor
def processor(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
if Hub.current.get_integration(ArgvIntegration) is not None:
extra = event.setdefault("extra", {})
# If some event processor decided to set extra to e.g. an
# `int`, don't crash. Not here.
if isinstance(extra, dict):
extra["sys.argv"] = sys.argv
return event
sentry-python-1.4.3/sentry_sdk/integrations/asgi.py 0000664 0000000 0000000 00000021414 14125057761 0022564 0 ustar 00root root 0000000 0000000 """
An ASGI middleware.
Based on Tom Christie's `sentry-asgi `_.
"""
import asyncio
import inspect
import urllib
from sentry_sdk._functools import partial
from sentry_sdk._types import MYPY
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.integrations._wsgi_common import _filter_headers
from sentry_sdk.utils import (
ContextVar,
event_from_exception,
transaction_from_function,
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
)
from sentry_sdk.tracing import Transaction
if MYPY:
from typing import Dict
from typing import Any
from typing import Optional
from typing import Callable
from typing_extensions import Literal
from sentry_sdk._types import Event, Hint
_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
_DEFAULT_TRANSACTION_NAME = "generic ASGI request"
def _capture_exception(hub, exc):
# type: (Hub, Any) -> None
# Check client here as it might have been unset while streaming response
if hub.client is not None:
event, hint = event_from_exception(
exc,
client_options=hub.client.options,
mechanism={"type": "asgi", "handled": False},
)
hub.capture_event(event, hint=hint)
def _looks_like_asgi3(app):
# type: (Any) -> bool
"""
Try to figure out if an application object supports ASGI3.
This is how uvicorn figures out the application version as well.
"""
if inspect.isclass(app):
return hasattr(app, "__await__")
elif inspect.isfunction(app):
return asyncio.iscoroutinefunction(app)
else:
call = getattr(app, "__call__", None) # noqa
return asyncio.iscoroutinefunction(call)
class SentryAsgiMiddleware:
__slots__ = ("app", "__call__")
def __init__(self, app, unsafe_context_data=False):
# type: (Any, bool) -> None
"""
Instrument an ASGI application with Sentry. Provides HTTP/websocket
data to sent events and basic handling for exceptions bubbling up
through the middleware.
:param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default.
"""
if not unsafe_context_data and not HAS_REAL_CONTEXTVARS:
# We better have contextvars or we're going to leak state between
# requests.
raise RuntimeError(
"The ASGI middleware for Sentry requires Python 3.7+ "
"or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
)
self.app = app
if _looks_like_asgi3(app):
self.__call__ = self._run_asgi3 # type: Callable[..., Any]
else:
self.__call__ = self._run_asgi2
def _run_asgi2(self, scope):
# type: (Any) -> Any
async def inner(receive, send):
# type: (Any, Any) -> Any
return await self._run_app(scope, lambda: self.app(scope)(receive, send))
return inner
async def _run_asgi3(self, scope, receive, send):
# type: (Any, Any, Any) -> Any
return await self._run_app(scope, lambda: self.app(scope, receive, send))
async def _run_app(self, scope, callback):
# type: (Any, Any) -> Any
is_recursive_asgi_middleware = _asgi_middleware_applied.get(False)
if is_recursive_asgi_middleware:
try:
return await callback()
except Exception as exc:
_capture_exception(Hub.current, exc)
raise exc from None
_asgi_middleware_applied.set(True)
try:
hub = Hub(Hub.current)
with hub:
with hub.configure_scope() as sentry_scope:
sentry_scope.clear_breadcrumbs()
sentry_scope._name = "asgi"
processor = partial(self.event_processor, asgi_scope=scope)
sentry_scope.add_event_processor(processor)
ty = scope["type"]
if ty in ("http", "websocket"):
transaction = Transaction.continue_from_headers(
self._get_headers(scope),
op="{}.server".format(ty),
)
else:
transaction = Transaction(op="asgi.server")
transaction.name = _DEFAULT_TRANSACTION_NAME
transaction.set_tag("asgi.type", ty)
with hub.start_transaction(
transaction, custom_sampling_context={"asgi_scope": scope}
):
# XXX: Would be cool to have correct span status, but we
# would have to wrap send(). That is a bit hard to do with
# the current abstraction over ASGI 2/3.
try:
return await callback()
except Exception as exc:
_capture_exception(hub, exc)
raise exc from None
finally:
_asgi_middleware_applied.set(False)
def event_processor(self, event, hint, asgi_scope):
# type: (Event, Hint, Any) -> Optional[Event]
request_info = event.get("request", {})
ty = asgi_scope["type"]
if ty in ("http", "websocket"):
request_info["method"] = asgi_scope.get("method")
request_info["headers"] = headers = _filter_headers(
self._get_headers(asgi_scope)
)
request_info["query_string"] = self._get_query(asgi_scope)
request_info["url"] = self._get_url(
asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
)
client = asgi_scope.get("client")
if client and _should_send_default_pii():
request_info["env"] = {"REMOTE_ADDR": self._get_ip(asgi_scope)}
if (
event.get("transaction", _DEFAULT_TRANSACTION_NAME)
== _DEFAULT_TRANSACTION_NAME
):
endpoint = asgi_scope.get("endpoint")
# Webframeworks like Starlette mutate the ASGI env once routing is
# done, which is sometime after the request has started. If we have
# an endpoint, overwrite our generic transaction name.
if endpoint:
event["transaction"] = transaction_from_function(endpoint)
event["request"] = request_info
return event
# Helper functions for extracting request data.
#
# Note: Those functions are not public API. If you want to mutate request
# data to your liking it's recommended to use the `before_send` callback
# for that.
def _get_url(self, scope, default_scheme, host):
# type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str
"""
Extract URL from the ASGI scope, without also including the querystring.
"""
scheme = scope.get("scheme", default_scheme)
server = scope.get("server", None)
path = scope.get("root_path", "") + scope.get("path", "")
if host:
return "%s://%s%s" % (scheme, host, path)
if server is not None:
host, port = server
default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
if port != default_port:
return "%s://%s:%s%s" % (scheme, host, port, path)
return "%s://%s%s" % (scheme, host, path)
return path
def _get_query(self, scope):
# type: (Any) -> Any
"""
Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
"""
qs = scope.get("query_string")
if not qs:
return None
return urllib.parse.unquote(qs.decode("latin-1"))
def _get_ip(self, scope):
# type: (Any) -> str
"""
Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
"""
headers = self._get_headers(scope)
try:
return headers["x-forwarded-for"].split(",")[0].strip()
except (KeyError, IndexError):
pass
try:
return headers["x-real-ip"]
except KeyError:
pass
return scope.get("client")[0]
def _get_headers(self, scope):
# type: (Any) -> Dict[str, str]
"""
Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
"""
headers = {} # type: Dict[str, str]
for raw_key, raw_value in scope["headers"]:
key = raw_key.decode("latin-1")
value = raw_value.decode("latin-1")
if key in headers:
headers[key] = headers[key] + ", " + value
else:
headers[key] = value
return headers
sentry-python-1.4.3/sentry_sdk/integrations/atexit.py 0000664 0000000 0000000 00000003455 14125057761 0023144 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import os
import sys
import atexit
from sentry_sdk.hub import Hub
from sentry_sdk.utils import logger
from sentry_sdk.integrations import Integration
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Optional
def default_callback(pending, timeout):
# type: (int, int) -> None
"""This is the default shutdown callback that is set on the options.
It prints out a message to stderr that informs the user that some events
are still pending and the process is waiting for them to flush out.
"""
def echo(msg):
# type: (str) -> None
sys.stderr.write(msg + "\n")
echo("Sentry is attempting to send %i pending error messages" % pending)
echo("Waiting up to %s seconds" % timeout)
echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
sys.stderr.flush()
class AtexitIntegration(Integration):
identifier = "atexit"
def __init__(self, callback=None):
# type: (Optional[Any]) -> None
if callback is None:
callback = default_callback
self.callback = callback
@staticmethod
def setup_once():
# type: () -> None
@atexit.register
def _shutdown():
# type: () -> None
logger.debug("atexit: got shutdown signal")
hub = Hub.main
integration = hub.get_integration(AtexitIntegration)
if integration is not None:
logger.debug("atexit: shutting down client")
# If there is a session on the hub, close it now.
hub.end_session()
# If an integration is there, a client has to be there.
client = hub.client # type: Any
client.close(callback=integration.callback)
sentry-python-1.4.3/sentry_sdk/integrations/aws_lambda.py 0000664 0000000 0000000 00000035763 14125057761 0023747 0 ustar 00root root 0000000 0000000 from datetime import datetime, timedelta
from os import environ
import sys
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.tracing import Transaction
from sentry_sdk._compat import reraise
from sentry_sdk.utils import (
AnnotatedValue,
capture_internal_exceptions,
event_from_exception,
logger,
TimeoutThread,
)
from sentry_sdk.integrations import Integration
from sentry_sdk.integrations._wsgi_common import _filter_headers
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import TypeVar
from typing import Callable
from typing import Optional
from sentry_sdk._types import EventProcessor, Event, Hint
F = TypeVar("F", bound=Callable[..., Any])
# Constants
TIMEOUT_WARNING_BUFFER = 1500 # Buffer time required to send timeout warning to Sentry
MILLIS_TO_SECONDS = 1000.0
def _wrap_init_error(init_error):
# type: (F) -> F
def sentry_init_error(*args, **kwargs):
# type: (*Any, **Any) -> Any
hub = Hub.current
integration = hub.get_integration(AwsLambdaIntegration)
if integration is None:
return init_error(*args, **kwargs)
# If an integration is there, a client has to be there.
client = hub.client # type: Any
with capture_internal_exceptions():
with hub.configure_scope() as scope:
scope.clear_breadcrumbs()
exc_info = sys.exc_info()
if exc_info and all(exc_info):
sentry_event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "aws_lambda", "handled": False},
)
hub.capture_event(sentry_event, hint=hint)
return init_error(*args, **kwargs)
return sentry_init_error # type: ignore
def _wrap_handler(handler):
# type: (F) -> F
def sentry_handler(aws_event, aws_context, *args, **kwargs):
# type: (Any, Any, *Any, **Any) -> Any
# Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html,
# `event` here is *likely* a dictionary, but also might be a number of
# other types (str, int, float, None).
#
# In some cases, it is a list (if the user is batch-invoking their
# function, for example), in which case we'll use the first entry as a
# representative from which to try pulling request data. (Presumably it
# will be the same for all events in the list, since they're all hitting
# the lambda in the same request.)
if isinstance(aws_event, list):
request_data = aws_event[0]
batch_size = len(aws_event)
else:
request_data = aws_event
batch_size = 1
if not isinstance(request_data, dict):
# If we're not dealing with a dictionary, we won't be able to get
# headers, path, http method, etc in any case, so it's fine that
# this is empty
request_data = {}
hub = Hub.current
integration = hub.get_integration(AwsLambdaIntegration)
if integration is None:
return handler(aws_event, aws_context, *args, **kwargs)
# If an integration is there, a client has to be there.
client = hub.client # type: Any
configured_time = aws_context.get_remaining_time_in_millis()
with hub.push_scope() as scope:
timeout_thread = None
with capture_internal_exceptions():
scope.clear_breadcrumbs()
scope.add_event_processor(
_make_request_event_processor(
request_data, aws_context, configured_time
)
)
scope.set_tag(
"aws_region", aws_context.invoked_function_arn.split(":")[3]
)
if batch_size > 1:
scope.set_tag("batch_request", True)
scope.set_tag("batch_size", batch_size)
# Starting the Timeout thread only if the configured time is greater than Timeout warning
# buffer and timeout_warning parameter is set True.
if (
integration.timeout_warning
and configured_time > TIMEOUT_WARNING_BUFFER
):
waiting_time = (
configured_time - TIMEOUT_WARNING_BUFFER
) / MILLIS_TO_SECONDS
timeout_thread = TimeoutThread(
waiting_time,
configured_time / MILLIS_TO_SECONDS,
)
# Starting the thread to raise timeout warning exception
timeout_thread.start()
headers = request_data.get("headers")
# AWS Service may set an explicit `{headers: None}`, we can't rely on `.get()`'s default.
if headers is None:
headers = {}
transaction = Transaction.continue_from_headers(
headers, op="serverless.function", name=aws_context.function_name
)
with hub.start_transaction(
transaction,
custom_sampling_context={
"aws_event": aws_event,
"aws_context": aws_context,
},
):
try:
return handler(aws_event, aws_context, *args, **kwargs)
except Exception:
exc_info = sys.exc_info()
sentry_event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "aws_lambda", "handled": False},
)
hub.capture_event(sentry_event, hint=hint)
reraise(*exc_info)
finally:
if timeout_thread:
timeout_thread.stop()
return sentry_handler # type: ignore
def _drain_queue():
# type: () -> None
with capture_internal_exceptions():
hub = Hub.current
integration = hub.get_integration(AwsLambdaIntegration)
if integration is not None:
# Flush out the event queue before AWS kills the
# process.
hub.flush()
class AwsLambdaIntegration(Integration):
identifier = "aws_lambda"
def __init__(self, timeout_warning=False):
# type: (bool) -> None
self.timeout_warning = timeout_warning
@staticmethod
def setup_once():
# type: () -> None
lambda_bootstrap = get_lambda_bootstrap()
if not lambda_bootstrap:
logger.warning(
"Not running in AWS Lambda environment, "
"AwsLambdaIntegration disabled (could not find bootstrap module)"
)
return
if not hasattr(lambda_bootstrap, "handle_event_request"):
logger.warning(
"Not running in AWS Lambda environment, "
"AwsLambdaIntegration disabled (could not find handle_event_request)"
)
return
pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6 or 2.7
if pre_37:
old_handle_event_request = lambda_bootstrap.handle_event_request
def sentry_handle_event_request(request_handler, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
request_handler = _wrap_handler(request_handler)
return old_handle_event_request(request_handler, *args, **kwargs)
lambda_bootstrap.handle_event_request = sentry_handle_event_request
old_handle_http_request = lambda_bootstrap.handle_http_request
def sentry_handle_http_request(request_handler, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
request_handler = _wrap_handler(request_handler)
return old_handle_http_request(request_handler, *args, **kwargs)
lambda_bootstrap.handle_http_request = sentry_handle_http_request
# Patch to_json to drain the queue. This should work even when the
# SDK is initialized inside of the handler
old_to_json = lambda_bootstrap.to_json
def sentry_to_json(*args, **kwargs):
# type: (*Any, **Any) -> Any
_drain_queue()
return old_to_json(*args, **kwargs)
lambda_bootstrap.to_json = sentry_to_json
else:
lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error(
lambda_bootstrap.LambdaRuntimeClient.post_init_error
)
old_handle_event_request = lambda_bootstrap.handle_event_request
def sentry_handle_event_request( # type: ignore
lambda_runtime_client, request_handler, *args, **kwargs
):
request_handler = _wrap_handler(request_handler)
return old_handle_event_request(
lambda_runtime_client, request_handler, *args, **kwargs
)
lambda_bootstrap.handle_event_request = sentry_handle_event_request
# Patch the runtime client to drain the queue. This should work
# even when the SDK is initialized inside of the handler
def _wrap_post_function(f):
# type: (F) -> F
def inner(*args, **kwargs):
# type: (*Any, **Any) -> Any
_drain_queue()
return f(*args, **kwargs)
return inner # type: ignore
lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = (
_wrap_post_function(
lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
)
)
lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = (
_wrap_post_function(
lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
)
)
def get_lambda_bootstrap():
# type: () -> Optional[Any]
# Python 2.7: Everything is in `__main__`.
#
# Python 3.7: If the bootstrap module is *already imported*, it is the
# one we actually want to use (no idea what's in __main__)
#
# On Python 3.8 bootstrap is also importable, but will be the same file
# as __main__ imported under a different name:
#
# sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__
# sys.modules['__main__'] is not sys.modules['bootstrap']
#
# On container builds using the `aws-lambda-python-runtime-interface-client`
# (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap
#
# Such a setup would then make all monkeypatches useless.
if "bootstrap" in sys.modules:
return sys.modules["bootstrap"]
elif "__main__" in sys.modules:
if hasattr(sys.modules["__main__"], "bootstrap"):
# awslambdaric python module in container builds
return sys.modules["__main__"].bootstrap # type: ignore
return sys.modules["__main__"]
else:
return None
def _make_request_event_processor(aws_event, aws_context, configured_timeout):
# type: (Any, Any, Any) -> EventProcessor
start_time = datetime.utcnow()
def event_processor(sentry_event, hint, start_time=start_time):
# type: (Event, Hint, datetime) -> Optional[Event]
remaining_time_in_milis = aws_context.get_remaining_time_in_millis()
exec_duration = configured_timeout - remaining_time_in_milis
extra = sentry_event.setdefault("extra", {})
extra["lambda"] = {
"function_name": aws_context.function_name,
"function_version": aws_context.function_version,
"invoked_function_arn": aws_context.invoked_function_arn,
"aws_request_id": aws_context.aws_request_id,
"execution_duration_in_millis": exec_duration,
"remaining_time_in_millis": remaining_time_in_milis,
}
extra["cloudwatch logs"] = {
"url": _get_cloudwatch_logs_url(aws_context, start_time),
"log_group": aws_context.log_group_name,
"log_stream": aws_context.log_stream_name,
}
request = sentry_event.get("request", {})
if "httpMethod" in aws_event:
request["method"] = aws_event["httpMethod"]
request["url"] = _get_url(aws_event, aws_context)
if "queryStringParameters" in aws_event:
request["query_string"] = aws_event["queryStringParameters"]
if "headers" in aws_event:
request["headers"] = _filter_headers(aws_event["headers"])
if _should_send_default_pii():
user_info = sentry_event.setdefault("user", {})
identity = aws_event.get("identity")
if identity is None:
identity = {}
id = identity.get("userArn")
if id is not None:
user_info.setdefault("id", id)
ip = identity.get("sourceIp")
if ip is not None:
user_info.setdefault("ip_address", ip)
if "body" in aws_event:
request["data"] = aws_event.get("body", "")
else:
if aws_event.get("body", None):
# Unfortunately couldn't find a way to get structured body from AWS
# event. Meaning every body is unstructured to us.
request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
sentry_event["request"] = request
return sentry_event
return event_processor
def _get_url(aws_event, aws_context):
# type: (Any, Any) -> str
path = aws_event.get("path", None)
headers = aws_event.get("headers")
if headers is None:
headers = {}
host = headers.get("Host", None)
proto = headers.get("X-Forwarded-Proto", None)
if proto and host and path:
return "{}://{}{}".format(proto, host, path)
return "awslambda:///{}".format(aws_context.function_name)
def _get_cloudwatch_logs_url(aws_context, start_time):
# type: (Any, datetime) -> str
"""
Generates a CloudWatchLogs console URL based on the context object
Arguments:
aws_context {Any} -- context from lambda handler
Returns:
str -- AWS Console URL to logs.
"""
formatstring = "%Y-%m-%dT%H:%M:%SZ"
region = environ.get("AWS_REGION", "")
url = (
"https://console.{domain}/cloudwatch/home?region={region}"
"#logEventViewer:group={log_group};stream={log_stream}"
";start={start_time};end={end_time}"
).format(
domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com",
region=region,
log_group=aws_context.log_group_name,
log_stream=aws_context.log_stream_name,
start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
end_time=(datetime.utcnow() + timedelta(seconds=2)).strftime(formatstring),
)
return url
sentry-python-1.4.3/sentry_sdk/integrations/beam.py 0000664 0000000 0000000 00000013036 14125057761 0022546 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import sys
import types
from sentry_sdk._functools import wraps
from sentry_sdk.hub import Hub
from sentry_sdk._compat import reraise
from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
from sentry_sdk.integrations import Integration
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Iterator
from typing import TypeVar
from typing import Optional
from typing import Callable
from sentry_sdk.client import Client
from sentry_sdk._types import ExcInfo
T = TypeVar("T")
F = TypeVar("F", bound=Callable[..., Any])
WRAPPED_FUNC = "_wrapped_{}_"
INSPECT_FUNC = "_inspect_{}" # Required format per apache_beam/transforms/core.py
USED_FUNC = "_sentry_used_"
class BeamIntegration(Integration):
identifier = "beam"
@staticmethod
def setup_once():
# type: () -> None
from apache_beam.transforms.core import DoFn, ParDo # type: ignore
ignore_logger("root")
ignore_logger("bundle_processor.create")
function_patches = ["process", "start_bundle", "finish_bundle", "setup"]
for func_name in function_patches:
setattr(
DoFn,
INSPECT_FUNC.format(func_name),
_wrap_inspect_call(DoFn, func_name),
)
old_init = ParDo.__init__
def sentry_init_pardo(self, fn, *args, **kwargs):
# type: (ParDo, Any, *Any, **Any) -> Any
# Do not monkey patch init twice
if not getattr(self, "_sentry_is_patched", False):
for func_name in function_patches:
if not hasattr(fn, func_name):
continue
wrapped_func = WRAPPED_FUNC.format(func_name)
# Check to see if inspect is set and process is not
# to avoid monkey patching process twice.
# Check to see if function is part of object for
# backwards compatibility.
process_func = getattr(fn, func_name)
inspect_func = getattr(fn, INSPECT_FUNC.format(func_name))
if not getattr(inspect_func, USED_FUNC, False) and not getattr(
process_func, USED_FUNC, False
):
setattr(fn, wrapped_func, process_func)
setattr(fn, func_name, _wrap_task_call(process_func))
self._sentry_is_patched = True
old_init(self, fn, *args, **kwargs)
ParDo.__init__ = sentry_init_pardo
def _wrap_inspect_call(cls, func_name):
# type: (Any, Any) -> Any
from apache_beam.typehints.decorators import getfullargspec # type: ignore
if not hasattr(cls, func_name):
return None
def _inspect(self):
# type: (Any) -> Any
"""
Inspect function overrides the way Beam gets argspec.
"""
wrapped_func = WRAPPED_FUNC.format(func_name)
if hasattr(self, wrapped_func):
process_func = getattr(self, wrapped_func)
else:
process_func = getattr(self, func_name)
setattr(self, func_name, _wrap_task_call(process_func))
setattr(self, wrapped_func, process_func)
# getfullargspec is deprecated in more recent beam versions and get_function_args_defaults
# (which uses Signatures internally) should be used instead.
try:
from apache_beam.transforms.core import get_function_args_defaults
return get_function_args_defaults(process_func)
except ImportError:
return getfullargspec(process_func)
setattr(_inspect, USED_FUNC, True)
return _inspect
def _wrap_task_call(func):
# type: (F) -> F
"""
Wrap task call with a try catch to get exceptions.
Pass the client on to raise_exception so it can get rebinded.
"""
client = Hub.current.client
@wraps(func)
def _inner(*args, **kwargs):
# type: (*Any, **Any) -> Any
try:
gen = func(*args, **kwargs)
except Exception:
raise_exception(client)
if not isinstance(gen, types.GeneratorType):
return gen
return _wrap_generator_call(gen, client)
setattr(_inner, USED_FUNC, True)
return _inner # type: ignore
def _capture_exception(exc_info, hub):
# type: (ExcInfo, Hub) -> None
"""
Send Beam exception to Sentry.
"""
integration = hub.get_integration(BeamIntegration)
if integration is None:
return
client = hub.client
if client is None:
return
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "beam", "handled": False},
)
hub.capture_event(event, hint=hint)
def raise_exception(client):
# type: (Optional[Client]) -> None
"""
Raise an exception. If the client is not in the hub, rebind it.
"""
hub = Hub.current
if hub.client is None:
hub.bind_client(client)
exc_info = sys.exc_info()
with capture_internal_exceptions():
_capture_exception(exc_info, hub)
reraise(*exc_info)
def _wrap_generator_call(gen, client):
# type: (Iterator[T], Optional[Client]) -> Iterator[T]
"""
Wrap the generator to handle any failures.
"""
while True:
try:
yield next(gen)
except StopIteration:
break
except Exception:
raise_exception(client)
sentry-python-1.4.3/sentry_sdk/integrations/boto3.py 0000664 0000000 0000000 00000010061 14125057761 0022663 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from sentry_sdk import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.tracing import Span
from sentry_sdk._functools import partial
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Dict
from typing import Optional
from typing import Type
try:
from botocore import __version__ as BOTOCORE_VERSION # type: ignore
from botocore.client import BaseClient # type: ignore
from botocore.response import StreamingBody # type: ignore
from botocore.awsrequest import AWSRequest # type: ignore
except ImportError:
raise DidNotEnable("botocore is not installed")
class Boto3Integration(Integration):
identifier = "boto3"
@staticmethod
def setup_once():
# type: () -> None
try:
version = tuple(map(int, BOTOCORE_VERSION.split(".")[:3]))
except (ValueError, TypeError):
raise DidNotEnable(
"Unparsable botocore version: {}".format(BOTOCORE_VERSION)
)
if version < (1, 12):
raise DidNotEnable("Botocore 1.12 or newer is required.")
orig_init = BaseClient.__init__
def sentry_patched_init(self, *args, **kwargs):
# type: (Type[BaseClient], *Any, **Any) -> None
orig_init(self, *args, **kwargs)
meta = self.meta
service_id = meta.service_model.service_id.hyphenize()
meta.events.register(
"request-created",
partial(_sentry_request_created, service_id=service_id),
)
meta.events.register("after-call", _sentry_after_call)
meta.events.register("after-call-error", _sentry_after_call_error)
BaseClient.__init__ = sentry_patched_init
def _sentry_request_created(service_id, request, operation_name, **kwargs):
# type: (str, AWSRequest, str, **Any) -> None
hub = Hub.current
if hub.get_integration(Boto3Integration) is None:
return
description = "aws.%s.%s" % (service_id, operation_name)
span = hub.start_span(
hub=hub,
op="aws.request",
description=description,
)
span.set_tag("aws.service_id", service_id)
span.set_tag("aws.operation_name", operation_name)
span.set_data("aws.request.url", request.url)
# We do it in order for subsequent http calls/retries be
# attached to this span.
span.__enter__()
# request.context is an open-ended data-structure
# where we can add anything useful in request life cycle.
request.context["_sentrysdk_span"] = span
def _sentry_after_call(context, parsed, **kwargs):
# type: (Dict[str, Any], Dict[str, Any], **Any) -> None
span = context.pop("_sentrysdk_span", None) # type: Optional[Span]
# Span could be absent if the integration is disabled.
if span is None:
return
span.__exit__(None, None, None)
body = parsed.get("Body")
if not isinstance(body, StreamingBody):
return
streaming_span = span.start_child(
op="aws.request.stream",
description=span.description,
)
orig_read = body.read
orig_close = body.close
def sentry_streaming_body_read(*args, **kwargs):
# type: (*Any, **Any) -> bytes
try:
ret = orig_read(*args, **kwargs)
if not ret:
streaming_span.finish()
return ret
except Exception:
streaming_span.finish()
raise
body.read = sentry_streaming_body_read
def sentry_streaming_body_close(*args, **kwargs):
# type: (*Any, **Any) -> None
streaming_span.finish()
orig_close(*args, **kwargs)
body.close = sentry_streaming_body_close
def _sentry_after_call_error(context, exception, **kwargs):
# type: (Dict[str, Any], Type[BaseException], **Any) -> None
span = context.pop("_sentrysdk_span", None) # type: Optional[Span]
# Span could be absent if the integration is disabled.
if span is None:
return
span.__exit__(type(exception), exception, None)
sentry-python-1.4.3/sentry_sdk/integrations/bottle.py 0000664 0000000 0000000 00000014077 14125057761 0023141 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from sentry_sdk.hub import Hub
from sentry_sdk.utils import (
capture_internal_exceptions,
event_from_exception,
transaction_from_function,
)
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from sentry_sdk.integrations._wsgi_common import RequestExtractor
from sentry_sdk._types import MYPY
if MYPY:
from sentry_sdk.integrations.wsgi import _ScopedResponse
from typing import Any
from typing import Dict
from typing import Callable
from typing import Optional
from bottle import FileUpload, FormsDict, LocalRequest # type: ignore
from sentry_sdk._types import EventProcessor
try:
from bottle import (
Bottle,
Route,
request as bottle_request,
HTTPResponse,
__version__ as BOTTLE_VERSION,
)
except ImportError:
raise DidNotEnable("Bottle not installed")
TRANSACTION_STYLE_VALUES = ("endpoint", "url")
class BottleIntegration(Integration):
identifier = "bottle"
transaction_style = None
def __init__(self, transaction_style="endpoint"):
# type: (str) -> None
if transaction_style not in TRANSACTION_STYLE_VALUES:
raise ValueError(
"Invalid value for transaction_style: %s (must be in %s)"
% (transaction_style, TRANSACTION_STYLE_VALUES)
)
self.transaction_style = transaction_style
@staticmethod
def setup_once():
# type: () -> None
try:
version = tuple(map(int, BOTTLE_VERSION.replace("-dev", "").split(".")))
except (TypeError, ValueError):
raise DidNotEnable("Unparsable Bottle version: {}".format(version))
if version < (0, 12):
raise DidNotEnable("Bottle 0.12 or newer required.")
# monkey patch method Bottle.__call__
old_app = Bottle.__call__
def sentry_patched_wsgi_app(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
hub = Hub.current
integration = hub.get_integration(BottleIntegration)
if integration is None:
return old_app(self, environ, start_response)
return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
environ, start_response
)
Bottle.__call__ = sentry_patched_wsgi_app
# monkey patch method Bottle._handle
old_handle = Bottle._handle
def _patched_handle(self, environ):
# type: (Bottle, Dict[str, Any]) -> Any
hub = Hub.current
integration = hub.get_integration(BottleIntegration)
if integration is None:
return old_handle(self, environ)
# create new scope
scope_manager = hub.push_scope()
with scope_manager:
app = self
with hub.configure_scope() as scope:
scope._name = "bottle"
scope.add_event_processor(
_make_request_event_processor(app, bottle_request, integration)
)
res = old_handle(self, environ)
# scope cleanup
return res
Bottle._handle = _patched_handle
# monkey patch method Route._make_callback
old_make_callback = Route._make_callback
def patched_make_callback(self, *args, **kwargs):
# type: (Route, *object, **object) -> Any
hub = Hub.current
integration = hub.get_integration(BottleIntegration)
prepared_callback = old_make_callback(self, *args, **kwargs)
if integration is None:
return prepared_callback
# If an integration is there, a client has to be there.
client = hub.client # type: Any
def wrapped_callback(*args, **kwargs):
# type: (*object, **object) -> Any
try:
res = prepared_callback(*args, **kwargs)
except HTTPResponse:
raise
except Exception as exception:
event, hint = event_from_exception(
exception,
client_options=client.options,
mechanism={"type": "bottle", "handled": False},
)
hub.capture_event(event, hint=hint)
raise exception
return res
return wrapped_callback
Route._make_callback = patched_make_callback
class BottleRequestExtractor(RequestExtractor):
def env(self):
# type: () -> Dict[str, str]
return self.request.environ
def cookies(self):
# type: () -> Dict[str, str]
return self.request.cookies
def raw_data(self):
# type: () -> bytes
return self.request.body.read()
def form(self):
# type: () -> FormsDict
if self.is_json():
return None
return self.request.forms.decode()
def files(self):
# type: () -> Optional[Dict[str, str]]
if self.is_json():
return None
return self.request.files
def size_of_file(self, file):
# type: (FileUpload) -> int
return file.content_length
def _make_request_event_processor(app, request, integration):
# type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor
def inner(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
try:
if integration.transaction_style == "endpoint":
event["transaction"] = request.route.name or transaction_from_function(
request.route.callback
)
elif integration.transaction_style == "url":
event["transaction"] = request.route.rule
except Exception:
pass
with capture_internal_exceptions():
BottleRequestExtractor(request).extract_into_event(event)
return event
return inner
sentry-python-1.4.3/sentry_sdk/integrations/celery.py 0000664 0000000 0000000 00000022477 14125057761 0023136 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import sys
from sentry_sdk.hub import Hub
from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
from sentry_sdk.tracing import Transaction
from sentry_sdk._compat import reraise
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk._types import MYPY
from sentry_sdk._functools import wraps
if MYPY:
from typing import Any
from typing import TypeVar
from typing import Callable
from typing import Optional
from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo
F = TypeVar("F", bound=Callable[..., Any])
try:
from celery import VERSION as CELERY_VERSION # type: ignore
from celery.exceptions import ( # type: ignore
SoftTimeLimitExceeded,
Retry,
Ignore,
Reject,
)
except ImportError:
raise DidNotEnable("Celery not installed")
CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
class CeleryIntegration(Integration):
identifier = "celery"
def __init__(self, propagate_traces=True):
# type: (bool) -> None
self.propagate_traces = propagate_traces
@staticmethod
def setup_once():
# type: () -> None
if CELERY_VERSION < (3,):
raise DidNotEnable("Celery 3 or newer required.")
import celery.app.trace as trace # type: ignore
old_build_tracer = trace.build_tracer
def sentry_build_tracer(name, task, *args, **kwargs):
# type: (Any, Any, *Any, **Any) -> Any
if not getattr(task, "_sentry_is_patched", False):
# Need to patch both methods because older celery sometimes
# short-circuits to task.run if it thinks it's safe.
task.__call__ = _wrap_task_call(task, task.__call__)
task.run = _wrap_task_call(task, task.run)
# `build_tracer` is apparently called for every task
# invocation. Can't wrap every celery task for every invocation
# or we will get infinitely nested wrapper functions.
task._sentry_is_patched = True
return _wrap_tracer(task, old_build_tracer(name, task, *args, **kwargs))
trace.build_tracer = sentry_build_tracer
from celery.app.task import Task # type: ignore
Task.apply_async = _wrap_apply_async(Task.apply_async)
_patch_worker_exit()
# This logger logs every status of every task that ran on the worker.
# Meaning that every task's breadcrumbs are full of stuff like "Task
# raised unexpected ".
ignore_logger("celery.worker.job")
ignore_logger("celery.app.trace")
# This is stdout/err redirected to a logger, can't deal with this
# (need event_level=logging.WARN to reproduce)
ignore_logger("celery.redirected")
def _wrap_apply_async(f):
# type: (F) -> F
@wraps(f)
def apply_async(*args, **kwargs):
# type: (*Any, **Any) -> Any
hub = Hub.current
integration = hub.get_integration(CeleryIntegration)
if integration is not None and integration.propagate_traces:
with hub.start_span(op="celery.submit", description=args[0].name) as span:
with capture_internal_exceptions():
headers = dict(hub.iter_trace_propagation_headers(span))
if headers:
# Note: kwargs can contain headers=None, so no setdefault!
# Unsure which backend though.
kwarg_headers = kwargs.get("headers") or {}
kwarg_headers.update(headers)
# https://github.com/celery/celery/issues/4875
#
# Need to setdefault the inner headers too since other
# tracing tools (dd-trace-py) also employ this exact
# workaround and we don't want to break them.
kwarg_headers.setdefault("headers", {}).update(headers)
kwargs["headers"] = kwarg_headers
return f(*args, **kwargs)
else:
return f(*args, **kwargs)
return apply_async # type: ignore
def _wrap_tracer(task, f):
# type: (Any, F) -> F
# Need to wrap tracer for pushing the scope before prerun is sent, and
# popping it after postrun is sent.
#
# This is the reason we don't use signals for hooking in the first place.
# Also because in Celery 3, signal dispatch returns early if one handler
# crashes.
@wraps(f)
def _inner(*args, **kwargs):
# type: (*Any, **Any) -> Any
hub = Hub.current
if hub.get_integration(CeleryIntegration) is None:
return f(*args, **kwargs)
with hub.push_scope() as scope:
scope._name = "celery"
scope.clear_breadcrumbs()
scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
transaction = None
# Celery task objects are not a thing to be trusted. Even
# something such as attribute access can fail.
with capture_internal_exceptions():
transaction = Transaction.continue_from_headers(
args[3].get("headers") or {},
op="celery.task",
name="unknown celery task",
)
transaction.name = task.name
transaction.set_status("ok")
if transaction is None:
return f(*args, **kwargs)
with hub.start_transaction(
transaction,
custom_sampling_context={
"celery_job": {
"task": task.name,
# for some reason, args[1] is a list if non-empty but a
# tuple if empty
"args": list(args[1]),
"kwargs": args[2],
}
},
):
return f(*args, **kwargs)
return _inner # type: ignore
def _wrap_task_call(task, f):
# type: (Any, F) -> F
# Need to wrap task call because the exception is caught before we get to
# see it. Also celery's reported stacktrace is untrustworthy.
# functools.wraps is important here because celery-once looks at this
# method's name.
# https://github.com/getsentry/sentry-python/issues/421
@wraps(f)
def _inner(*args, **kwargs):
# type: (*Any, **Any) -> Any
try:
return f(*args, **kwargs)
except Exception:
exc_info = sys.exc_info()
with capture_internal_exceptions():
_capture_exception(task, exc_info)
reraise(*exc_info)
return _inner # type: ignore
def _make_event_processor(task, uuid, args, kwargs, request=None):
# type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor
def event_processor(event, hint):
# type: (Event, Hint) -> Optional[Event]
with capture_internal_exceptions():
tags = event.setdefault("tags", {})
tags["celery_task_id"] = uuid
extra = event.setdefault("extra", {})
extra["celery-job"] = {
"task_name": task.name,
"args": args,
"kwargs": kwargs,
}
if "exc_info" in hint:
with capture_internal_exceptions():
if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
event["fingerprint"] = [
"celery",
"SoftTimeLimitExceeded",
getattr(task, "name", task),
]
return event
return event_processor
def _capture_exception(task, exc_info):
# type: (Any, ExcInfo) -> None
hub = Hub.current
if hub.get_integration(CeleryIntegration) is None:
return
if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
# ??? Doesn't map to anything
_set_status(hub, "aborted")
return
_set_status(hub, "internal_error")
if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
return
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "celery", "handled": False},
)
hub.capture_event(event, hint=hint)
def _set_status(hub, status):
# type: (Hub, str) -> None
with capture_internal_exceptions():
with hub.configure_scope() as scope:
if scope.span is not None:
scope.span.set_status(status)
def _patch_worker_exit():
# type: () -> None
# Need to flush queue before worker shutdown because a crashing worker will
# call os._exit
from billiard.pool import Worker # type: ignore
old_workloop = Worker.workloop
def sentry_workloop(*args, **kwargs):
# type: (*Any, **Any) -> Any
try:
return old_workloop(*args, **kwargs)
finally:
with capture_internal_exceptions():
hub = Hub.current
if hub.get_integration(CeleryIntegration) is not None:
hub.flush()
Worker.workloop = sentry_workloop
sentry-python-1.4.3/sentry_sdk/integrations/chalice.py 0000664 0000000 0000000 00000011002 14125057761 0023221 0 ustar 00root root 0000000 0000000 import sys
from sentry_sdk._compat import reraise
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.aws_lambda import _make_request_event_processor
from sentry_sdk.utils import (
capture_internal_exceptions,
event_from_exception,
)
from sentry_sdk._types import MYPY
from sentry_sdk._functools import wraps
import chalice # type: ignore
from chalice import Chalice, ChaliceViewError
from chalice.app import EventSourceHandler as ChaliceEventSourceHandler # type: ignore
if MYPY:
from typing import Any
from typing import Dict
from typing import TypeVar
from typing import Callable
F = TypeVar("F", bound=Callable[..., Any])
try:
from chalice import __version__ as CHALICE_VERSION
except ImportError:
raise DidNotEnable("Chalice is not installed")
class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore
def __call__(self, event, context):
# type: (Any, Any) -> Any
hub = Hub.current
client = hub.client # type: Any
with hub.push_scope() as scope:
with capture_internal_exceptions():
configured_time = context.get_remaining_time_in_millis()
scope.add_event_processor(
_make_request_event_processor(event, context, configured_time)
)
try:
return ChaliceEventSourceHandler.__call__(self, event, context)
except Exception:
exc_info = sys.exc_info()
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "chalice", "handled": False},
)
hub.capture_event(event, hint=hint)
hub.flush()
reraise(*exc_info)
def _get_view_function_response(app, view_function, function_args):
# type: (Any, F, Any) -> F
@wraps(view_function)
def wrapped_view_function(**function_args):
# type: (**Any) -> Any
hub = Hub.current
client = hub.client # type: Any
with hub.push_scope() as scope:
with capture_internal_exceptions():
configured_time = app.lambda_context.get_remaining_time_in_millis()
scope.transaction = app.lambda_context.function_name
scope.add_event_processor(
_make_request_event_processor(
app.current_request.to_dict(),
app.lambda_context,
configured_time,
)
)
try:
return view_function(**function_args)
except Exception as exc:
if isinstance(exc, ChaliceViewError):
raise
exc_info = sys.exc_info()
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "chalice", "handled": False},
)
hub.capture_event(event, hint=hint)
hub.flush()
raise
return wrapped_view_function # type: ignore
class ChaliceIntegration(Integration):
identifier = "chalice"
@staticmethod
def setup_once():
# type: () -> None
try:
version = tuple(map(int, CHALICE_VERSION.split(".")[:3]))
except (ValueError, TypeError):
raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION))
if version < (1, 20):
old_get_view_function_response = Chalice._get_view_function_response
else:
from chalice.app import RestAPIEventHandler
old_get_view_function_response = (
RestAPIEventHandler._get_view_function_response
)
def sentry_event_response(app, view_function, function_args):
# type: (Any, F, Dict[str, Any]) -> Any
wrapped_view_function = _get_view_function_response(
app, view_function, function_args
)
return old_get_view_function_response(
app, wrapped_view_function, function_args
)
if version < (1, 20):
Chalice._get_view_function_response = sentry_event_response
else:
RestAPIEventHandler._get_view_function_response = sentry_event_response
# for everything else (like events)
chalice.app.EventSourceHandler = EventSourceHandler
sentry-python-1.4.3/sentry_sdk/integrations/dedupe.py 0000664 0000000 0000000 00000002216 14125057761 0023106 0 ustar 00root root 0000000 0000000 from sentry_sdk.hub import Hub
from sentry_sdk.utils import ContextVar
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk._types import MYPY
if MYPY:
from typing import Optional
from sentry_sdk._types import Event, Hint
class DedupeIntegration(Integration):
identifier = "dedupe"
def __init__(self):
# type: () -> None
self._last_seen = ContextVar("last-seen")
@staticmethod
def setup_once():
# type: () -> None
@add_global_event_processor
def processor(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
if hint is None:
return event
integration = Hub.current.get_integration(DedupeIntegration)
if integration is None:
return event
exc_info = hint.get("exc_info", None)
if exc_info is None:
return event
exc = exc_info[1]
if integration._last_seen.get(None) is exc:
return None
integration._last_seen.set(exc)
return event
sentry-python-1.4.3/sentry_sdk/integrations/django/ 0000775 0000000 0000000 00000000000 14125057761 0022527 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/sentry_sdk/integrations/django/__init__.py 0000664 0000000 0000000 00000037757 14125057761 0024663 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
from __future__ import absolute_import
import sys
import threading
import weakref
from sentry_sdk._types import MYPY
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.serializer import add_global_repr_processor
from sentry_sdk.tracing_utils import record_sql_queries
from sentry_sdk.utils import (
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
logger,
capture_internal_exceptions,
event_from_exception,
transaction_from_function,
walk_exception_chain,
)
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from sentry_sdk.integrations._wsgi_common import RequestExtractor
try:
from django import VERSION as DJANGO_VERSION
from django.core import signals
try:
from django.urls import resolve
except ImportError:
from django.core.urlresolvers import resolve
except ImportError:
raise DidNotEnable("Django not installed")
from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
from sentry_sdk.integrations.django.templates import (
get_template_frame_from_exception,
patch_templates,
)
from sentry_sdk.integrations.django.middleware import patch_django_middlewares
from sentry_sdk.integrations.django.views import patch_views
if MYPY:
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from typing import Union
from typing import List
from django.core.handlers.wsgi import WSGIRequest
from django.http.response import HttpResponse
from django.http.request import QueryDict
from django.utils.datastructures import MultiValueDict
from sentry_sdk.integrations.wsgi import _ScopedResponse
from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
if DJANGO_VERSION < (1, 10):
def is_authenticated(request_user):
# type: (Any) -> bool
return request_user.is_authenticated()
else:
def is_authenticated(request_user):
# type: (Any) -> bool
return request_user.is_authenticated
TRANSACTION_STYLE_VALUES = ("function_name", "url")
class DjangoIntegration(Integration):
identifier = "django"
transaction_style = None
middleware_spans = None
def __init__(self, transaction_style="url", middleware_spans=True):
# type: (str, bool) -> None
if transaction_style not in TRANSACTION_STYLE_VALUES:
raise ValueError(
"Invalid value for transaction_style: %s (must be in %s)"
% (transaction_style, TRANSACTION_STYLE_VALUES)
)
self.transaction_style = transaction_style
self.middleware_spans = middleware_spans
@staticmethod
def setup_once():
# type: () -> None
if DJANGO_VERSION < (1, 6):
raise DidNotEnable("Django 1.6 or newer is required.")
install_sql_hook()
# Patch in our custom middleware.
# logs an error for every 500
ignore_logger("django.server")
ignore_logger("django.request")
from django.core.handlers.wsgi import WSGIHandler
old_app = WSGIHandler.__call__
def sentry_patched_wsgi_handler(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
if Hub.current.get_integration(DjangoIntegration) is None:
return old_app(self, environ, start_response)
bound_old_app = old_app.__get__(self, WSGIHandler)
from django.conf import settings
use_x_forwarded_for = settings.USE_X_FORWARDED_HOST
return SentryWsgiMiddleware(bound_old_app, use_x_forwarded_for)(
environ, start_response
)
WSGIHandler.__call__ = sentry_patched_wsgi_handler
_patch_get_response()
_patch_django_asgi_handler()
signals.got_request_exception.connect(_got_request_exception)
@add_global_event_processor
def process_django_templates(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
if hint is None:
return event
exc_info = hint.get("exc_info", None)
if exc_info is None:
return event
exception = event.get("exception", None)
if exception is None:
return event
values = exception.get("values", None)
if values is None:
return event
for exception, (_, exc_value, _) in zip(
reversed(values), walk_exception_chain(exc_info)
):
frame = get_template_frame_from_exception(exc_value)
if frame is not None:
frames = exception.get("stacktrace", {}).get("frames", [])
for i in reversed(range(len(frames))):
f = frames[i]
if (
f.get("function") in ("Parser.parse", "parse", "render")
and f.get("module") == "django.template.base"
):
i += 1
break
else:
i = len(frames)
frames.insert(i, frame)
return event
@add_global_repr_processor
def _django_queryset_repr(value, hint):
# type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str]
try:
# Django 1.6 can fail to import `QuerySet` when Django settings
# have not yet been initialized.
#
# If we fail to import, return `NotImplemented`. It's at least
# unlikely that we have a query set in `value` when importing
# `QuerySet` fails.
from django.db.models.query import QuerySet
except Exception:
return NotImplemented
if not isinstance(value, QuerySet) or value._result_cache:
return NotImplemented
# Do not call Hub.get_integration here. It is intentional that
# running under a new hub does not suddenly start executing
# querysets. This might be surprising to the user but it's likely
# less annoying.
return u"<%s from %s at 0x%x>" % (
value.__class__.__name__,
value.__module__,
id(value),
)
_patch_channels()
patch_django_middlewares()
patch_views()
patch_templates()
_DRF_PATCHED = False
_DRF_PATCH_LOCK = threading.Lock()
def _patch_drf():
# type: () -> None
"""
Patch Django Rest Framework for more/better request data. DRF's request
type is a wrapper around Django's request type. The attribute we're
interested in is `request.data`, which is a cached property containing a
parsed request body. Reading a request body from that property is more
reliable than reading from any of Django's own properties, as those don't
hold payloads in memory and therefore can only be accessed once.
We patch the Django request object to include a weak backreference to the
DRF request object, such that we can later use either in
`DjangoRequestExtractor`.
This function is not called directly on SDK setup, because importing almost
any part of Django Rest Framework will try to access Django settings (where
`sentry_sdk.init()` might be called from in the first place). Instead we
run this function on every request and do the patching on the first
request.
"""
global _DRF_PATCHED
if _DRF_PATCHED:
# Double-checked locking
return
with _DRF_PATCH_LOCK:
if _DRF_PATCHED:
return
# We set this regardless of whether the code below succeeds or fails.
# There is no point in trying to patch again on the next request.
_DRF_PATCHED = True
with capture_internal_exceptions():
try:
from rest_framework.views import APIView # type: ignore
except ImportError:
pass
else:
old_drf_initial = APIView.initial
def sentry_patched_drf_initial(self, request, *args, **kwargs):
# type: (APIView, Any, *Any, **Any) -> Any
with capture_internal_exceptions():
request._request._sentry_drf_request_backref = weakref.ref(
request
)
pass
return old_drf_initial(self, request, *args, **kwargs)
APIView.initial = sentry_patched_drf_initial
def _patch_channels():
# type: () -> None
try:
from channels.http import AsgiHandler # type: ignore
except ImportError:
return
if not HAS_REAL_CONTEXTVARS:
# We better have contextvars or we're going to leak state between
# requests.
#
# We cannot hard-raise here because channels may not be used at all in
# the current process. That is the case when running traditional WSGI
# workers in gunicorn+gevent and the websocket stuff in a separate
# process.
logger.warning(
"We detected that you are using Django channels 2.0."
+ CONTEXTVARS_ERROR_MESSAGE
)
from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl
patch_channels_asgi_handler_impl(AsgiHandler)
def _patch_django_asgi_handler():
# type: () -> None
try:
from django.core.handlers.asgi import ASGIHandler
except ImportError:
return
if not HAS_REAL_CONTEXTVARS:
# We better have contextvars or we're going to leak state between
# requests.
#
# We cannot hard-raise here because Django's ASGI stuff may not be used
# at all.
logger.warning(
"We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE
)
from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl
patch_django_asgi_handler_impl(ASGIHandler)
def _before_get_response(request):
# type: (WSGIRequest) -> None
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is None:
return
_patch_drf()
with hub.configure_scope() as scope:
# Rely on WSGI middleware to start a trace
try:
if integration.transaction_style == "function_name":
fn = resolve(request.path).func
scope.transaction = transaction_from_function(
getattr(fn, "view_class", fn)
)
elif integration.transaction_style == "url":
scope.transaction = LEGACY_RESOLVER.resolve(request.path_info)
except Exception:
pass
scope.add_event_processor(
_make_event_processor(weakref.ref(request), integration)
)
def _patch_get_response():
# type: () -> None
"""
patch get_response, because at that point we have the Django request object
"""
from django.core.handlers.base import BaseHandler
old_get_response = BaseHandler.get_response
def sentry_patched_get_response(self, request):
# type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
_before_get_response(request)
return old_get_response(self, request)
BaseHandler.get_response = sentry_patched_get_response
if hasattr(BaseHandler, "get_response_async"):
from sentry_sdk.integrations.django.asgi import patch_get_response_async
patch_get_response_async(BaseHandler, _before_get_response)
def _make_event_processor(weak_request, integration):
# type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
def event_processor(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
# if the request is gone we are fine not logging the data from
# it. This might happen if the processor is pushed away to
# another thread.
request = weak_request()
if request is None:
return event
try:
drf_request = request._sentry_drf_request_backref()
if drf_request is not None:
request = drf_request
except AttributeError:
pass
with capture_internal_exceptions():
DjangoRequestExtractor(request).extract_into_event(event)
if _should_send_default_pii():
with capture_internal_exceptions():
_set_user_info(request, event)
return event
return event_processor
def _got_request_exception(request=None, **kwargs):
# type: (WSGIRequest, **Any) -> None
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is not None:
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
sys.exc_info(),
client_options=client.options,
mechanism={"type": "django", "handled": False},
)
hub.capture_event(event, hint=hint)
class DjangoRequestExtractor(RequestExtractor):
def env(self):
# type: () -> Dict[str, str]
return self.request.META
def cookies(self):
# type: () -> Dict[str, str]
return self.request.COOKIES
def raw_data(self):
# type: () -> bytes
return self.request.body
def form(self):
# type: () -> QueryDict
return self.request.POST
def files(self):
# type: () -> MultiValueDict
return self.request.FILES
def size_of_file(self, file):
# type: (Any) -> int
return file.size
def parsed_body(self):
# type: () -> Optional[Dict[str, Any]]
try:
return self.request.data
except AttributeError:
return RequestExtractor.parsed_body(self)
def _set_user_info(request, event):
# type: (WSGIRequest, Dict[str, Any]) -> None
user_info = event.setdefault("user", {})
user = getattr(request, "user", None)
if user is None or not is_authenticated(user):
return
try:
user_info.setdefault("id", str(user.pk))
except Exception:
pass
try:
user_info.setdefault("email", user.email)
except Exception:
pass
try:
user_info.setdefault("username", user.get_username())
except Exception:
pass
def install_sql_hook():
# type: () -> None
"""If installed this causes Django's queries to be captured."""
try:
from django.db.backends.utils import CursorWrapper
except ImportError:
from django.db.backends.util import CursorWrapper
try:
real_execute = CursorWrapper.execute
real_executemany = CursorWrapper.executemany
except AttributeError:
# This won't work on Django versions < 1.6
return
def execute(self, sql, params=None):
# type: (CursorWrapper, Any, Optional[Any]) -> Any
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_execute(self, sql, params)
with record_sql_queries(
hub, self.cursor, sql, params, paramstyle="format", executemany=False
):
return real_execute(self, sql, params)
def executemany(self, sql, param_list):
# type: (CursorWrapper, Any, List[Any]) -> Any
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_executemany(self, sql, param_list)
with record_sql_queries(
hub, self.cursor, sql, param_list, paramstyle="format", executemany=True
):
return real_executemany(self, sql, param_list)
CursorWrapper.execute = execute
CursorWrapper.executemany = executemany
ignore_logger("django.db.backends")
sentry-python-1.4.3/sentry_sdk/integrations/django/asgi.py 0000664 0000000 0000000 00000011274 14125057761 0024031 0 ustar 00root root 0000000 0000000 """
Instrumentation for Django 3.0
Since this file contains `async def` it is conditionally imported in
`sentry_sdk.integrations.django` (depending on the existence of
`django.core.handlers.asgi`.
"""
import asyncio
from sentry_sdk import Hub, _functools
from sentry_sdk._types import MYPY
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
if MYPY:
from typing import Any
from typing import Union
from typing import Callable
from django.http.response import HttpResponse
def patch_django_asgi_handler_impl(cls):
# type: (Any) -> None
from sentry_sdk.integrations.django import DjangoIntegration
old_app = cls.__call__
async def sentry_patched_asgi_handler(self, scope, receive, send):
# type: (Any, Any, Any, Any) -> Any
if Hub.current.get_integration(DjangoIntegration) is None:
return await old_app(self, scope, receive, send)
middleware = SentryAsgiMiddleware(
old_app.__get__(self, cls), unsafe_context_data=True
)._run_asgi3
return await middleware(scope, receive, send)
cls.__call__ = sentry_patched_asgi_handler
def patch_get_response_async(cls, _before_get_response):
# type: (Any, Any) -> None
old_get_response_async = cls.get_response_async
async def sentry_patched_get_response_async(self, request):
# type: (Any, Any) -> Union[HttpResponse, BaseException]
_before_get_response(request)
return await old_get_response_async(self, request)
cls.get_response_async = sentry_patched_get_response_async
def patch_channels_asgi_handler_impl(cls):
# type: (Any) -> None
import channels # type: ignore
from sentry_sdk.integrations.django import DjangoIntegration
if channels.__version__ < "3.0.0":
old_app = cls.__call__
async def sentry_patched_asgi_handler(self, receive, send):
# type: (Any, Any, Any) -> Any
if Hub.current.get_integration(DjangoIntegration) is None:
return await old_app(self, receive, send)
middleware = SentryAsgiMiddleware(
lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True
)
return await middleware(self.scope)(receive, send)
cls.__call__ = sentry_patched_asgi_handler
else:
# The ASGI handler in Channels >= 3 has the same signature as
# the Django handler.
patch_django_asgi_handler_impl(cls)
def wrap_async_view(hub, callback):
# type: (Hub, Any) -> Any
@_functools.wraps(callback)
async def sentry_wrapped_callback(request, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
with hub.start_span(
op="django.view", description=request.resolver_match.view_name
):
return await callback(request, *args, **kwargs)
return sentry_wrapped_callback
def _asgi_middleware_mixin_factory(_check_middleware_span):
# type: (Callable[..., Any]) -> Any
"""
Mixin class factory that generates a middleware mixin for handling requests
in async mode.
"""
class SentryASGIMixin:
if MYPY:
_inner = None
def __init__(self, get_response):
# type: (Callable[..., Any]) -> None
self.get_response = get_response
self._acall_method = None
self._async_check()
def _async_check(self):
# type: () -> None
"""
If get_response is a coroutine function, turns us into async mode so
a thread is not consumed during a whole request.
Taken from django.utils.deprecation::MiddlewareMixin._async_check
"""
if asyncio.iscoroutinefunction(self.get_response):
self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore
def async_route_check(self):
# type: () -> bool
"""
Function that checks if we are in async mode,
and if we are forwards the handling of requests to __acall__
"""
return asyncio.iscoroutinefunction(self.get_response)
async def __acall__(self, *args, **kwargs):
# type: (*Any, **Any) -> Any
f = self._acall_method
if f is None:
if hasattr(self._inner, "__acall__"):
self._acall_method = f = self._inner.__acall__ # type: ignore
else:
self._acall_method = f = self._inner
middleware_span = _check_middleware_span(old_method=f)
if middleware_span is None:
return await f(*args, **kwargs)
with middleware_span:
return await f(*args, **kwargs)
return SentryASGIMixin
sentry-python-1.4.3/sentry_sdk/integrations/django/middleware.py 0000664 0000000 0000000 00000013264 14125057761 0025224 0 ustar 00root root 0000000 0000000 """
Create spans from Django middleware invocations
"""
from django import VERSION as DJANGO_VERSION
from sentry_sdk import Hub
from sentry_sdk._functools import wraps
from sentry_sdk._types import MYPY
from sentry_sdk.utils import (
ContextVar,
transaction_from_function,
capture_internal_exceptions,
)
if MYPY:
from typing import Any
from typing import Callable
from typing import Optional
from typing import TypeVar
from sentry_sdk.tracing import Span
F = TypeVar("F", bound=Callable[..., Any])
_import_string_should_wrap_middleware = ContextVar(
"import_string_should_wrap_middleware"
)
if DJANGO_VERSION < (1, 7):
import_string_name = "import_by_path"
else:
import_string_name = "import_string"
if DJANGO_VERSION < (3, 1):
_asgi_middleware_mixin_factory = lambda _: object
else:
from .asgi import _asgi_middleware_mixin_factory
def patch_django_middlewares():
# type: () -> None
from django.core.handlers import base
old_import_string = getattr(base, import_string_name)
def sentry_patched_import_string(dotted_path):
# type: (str) -> Any
rv = old_import_string(dotted_path)
if _import_string_should_wrap_middleware.get(None):
rv = _wrap_middleware(rv, dotted_path)
return rv
setattr(base, import_string_name, sentry_patched_import_string)
old_load_middleware = base.BaseHandler.load_middleware
def sentry_patched_load_middleware(*args, **kwargs):
# type: (Any, Any) -> Any
_import_string_should_wrap_middleware.set(True)
try:
return old_load_middleware(*args, **kwargs)
finally:
_import_string_should_wrap_middleware.set(False)
base.BaseHandler.load_middleware = sentry_patched_load_middleware
def _wrap_middleware(middleware, middleware_name):
# type: (Any, str) -> Any
from sentry_sdk.integrations.django import DjangoIntegration
def _check_middleware_span(old_method):
# type: (Callable[..., Any]) -> Optional[Span]
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is None or not integration.middleware_spans:
return None
function_name = transaction_from_function(old_method)
description = middleware_name
function_basename = getattr(old_method, "__name__", None)
if function_basename:
description = "{}.{}".format(description, function_basename)
middleware_span = hub.start_span(
op="django.middleware", description=description
)
middleware_span.set_tag("django.function_name", function_name)
middleware_span.set_tag("django.middleware_name", middleware_name)
return middleware_span
def _get_wrapped_method(old_method):
# type: (F) -> F
with capture_internal_exceptions():
def sentry_wrapped_method(*args, **kwargs):
# type: (*Any, **Any) -> Any
middleware_span = _check_middleware_span(old_method)
if middleware_span is None:
return old_method(*args, **kwargs)
with middleware_span:
return old_method(*args, **kwargs)
try:
# fails for __call__ of function on Python 2 (see py2.7-django-1.11)
sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method)
# Necessary for Django 3.1
sentry_wrapped_method.__self__ = old_method.__self__ # type: ignore
except Exception:
pass
return sentry_wrapped_method # type: ignore
return old_method
class SentryWrappingMiddleware(
_asgi_middleware_mixin_factory(_check_middleware_span) # type: ignore
):
async_capable = getattr(middleware, "async_capable", False)
def __init__(self, get_response=None, *args, **kwargs):
# type: (Optional[Callable[..., Any]], *Any, **Any) -> None
if get_response:
self._inner = middleware(get_response, *args, **kwargs)
else:
self._inner = middleware(*args, **kwargs)
self.get_response = get_response
self._call_method = None
if self.async_capable:
super(SentryWrappingMiddleware, self).__init__(get_response)
# We need correct behavior for `hasattr()`, which we can only determine
# when we have an instance of the middleware we're wrapping.
def __getattr__(self, method_name):
# type: (str) -> Any
if method_name not in (
"process_request",
"process_view",
"process_template_response",
"process_response",
"process_exception",
):
raise AttributeError()
old_method = getattr(self._inner, method_name)
rv = _get_wrapped_method(old_method)
self.__dict__[method_name] = rv
return rv
def __call__(self, *args, **kwargs):
# type: (*Any, **Any) -> Any
if hasattr(self, "async_route_check") and self.async_route_check():
return self.__acall__(*args, **kwargs)
f = self._call_method
if f is None:
self._call_method = f = self._inner.__call__
middleware_span = _check_middleware_span(old_method=f)
if middleware_span is None:
return f(*args, **kwargs)
with middleware_span:
return f(*args, **kwargs)
if hasattr(middleware, "__name__"):
SentryWrappingMiddleware.__name__ = middleware.__name__
return SentryWrappingMiddleware
sentry-python-1.4.3/sentry_sdk/integrations/django/templates.py 0000664 0000000 0000000 00000012422 14125057761 0025100 0 ustar 00root root 0000000 0000000 from django.template import TemplateSyntaxError
from django import VERSION as DJANGO_VERSION
from sentry_sdk import _functools, Hub
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Dict
from typing import Optional
from typing import Iterator
from typing import Tuple
try:
# support Django 1.9
from django.template.base import Origin
except ImportError:
# backward compatibility
from django.template.loader import LoaderOrigin as Origin
def get_template_frame_from_exception(exc_value):
# type: (Optional[BaseException]) -> Optional[Dict[str, Any]]
# As of Django 1.9 or so the new template debug thing showed up.
if hasattr(exc_value, "template_debug"):
return _get_template_frame_from_debug(exc_value.template_debug) # type: ignore
# As of r16833 (Django) all exceptions may contain a
# ``django_template_source`` attribute (rather than the legacy
# ``TemplateSyntaxError.source`` check)
if hasattr(exc_value, "django_template_source"):
return _get_template_frame_from_source(
exc_value.django_template_source # type: ignore
)
if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"):
source = exc_value.source
if isinstance(source, (tuple, list)) and isinstance(source[0], Origin):
return _get_template_frame_from_source(source) # type: ignore
return None
def _get_template_name_description(template_name):
# type: (str) -> str
if isinstance(template_name, (list, tuple)):
if template_name:
return "[{}, ...]".format(template_name[0])
else:
return template_name
def patch_templates():
# type: () -> None
from django.template.response import SimpleTemplateResponse
from sentry_sdk.integrations.django import DjangoIntegration
real_rendered_content = SimpleTemplateResponse.rendered_content
@property # type: ignore
def rendered_content(self):
# type: (SimpleTemplateResponse) -> str
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_rendered_content.fget(self)
with hub.start_span(
op="django.template.render",
description=_get_template_name_description(self.template_name),
) as span:
span.set_data("context", self.context_data)
return real_rendered_content.fget(self)
SimpleTemplateResponse.rendered_content = rendered_content
if DJANGO_VERSION < (1, 7):
return
import django.shortcuts
real_render = django.shortcuts.render
@_functools.wraps(real_render)
def render(request, template_name, context=None, *args, **kwargs):
# type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_render(request, template_name, context, *args, **kwargs)
with hub.start_span(
op="django.template.render",
description=_get_template_name_description(template_name),
) as span:
span.set_data("context", context)
return real_render(request, template_name, context, *args, **kwargs)
django.shortcuts.render = render
def _get_template_frame_from_debug(debug):
# type: (Dict[str, Any]) -> Dict[str, Any]
if debug is None:
return None
lineno = debug["line"]
filename = debug["name"]
if filename is None:
filename = ""
pre_context = []
post_context = []
context_line = None
for i, line in debug["source_lines"]:
if i < lineno:
pre_context.append(line)
elif i > lineno:
post_context.append(line)
else:
context_line = line
return {
"filename": filename,
"lineno": lineno,
"pre_context": pre_context[-5:],
"post_context": post_context[:5],
"context_line": context_line,
"in_app": True,
}
def _linebreak_iter(template_source):
# type: (str) -> Iterator[int]
yield 0
p = template_source.find("\n")
while p >= 0:
yield p + 1
p = template_source.find("\n", p + 1)
def _get_template_frame_from_source(source):
# type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]]
if not source:
return None
origin, (start, end) = source
filename = getattr(origin, "loadname", None)
if filename is None:
filename = ""
template_source = origin.reload()
lineno = None
upto = 0
pre_context = []
post_context = []
context_line = None
for num, next in enumerate(_linebreak_iter(template_source)):
line = template_source[upto:next]
if start >= upto and end <= next:
lineno = num
context_line = line
elif lineno is None:
pre_context.append(line)
else:
post_context.append(line)
upto = next
if context_line is None or lineno is None:
return None
return {
"filename": filename,
"lineno": lineno,
"pre_context": pre_context[-5:],
"post_context": post_context[:5],
"context_line": context_line,
}
sentry-python-1.4.3/sentry_sdk/integrations/django/transactions.py 0000664 0000000 0000000 00000007777 14125057761 0025633 0 ustar 00root root 0000000 0000000 """
Copied from raven-python. Used for
`DjangoIntegration(transaction_fron="raven_legacy")`.
"""
from __future__ import absolute_import
import re
from sentry_sdk._types import MYPY
if MYPY:
from django.urls.resolvers import URLResolver
from typing import Dict
from typing import List
from typing import Optional
from django.urls.resolvers import URLPattern
from typing import Tuple
from typing import Union
from re import Pattern
try:
from django.urls import get_resolver
except ImportError:
from django.core.urlresolvers import get_resolver
def get_regex(resolver_or_pattern):
# type: (Union[URLPattern, URLResolver]) -> Pattern[str]
"""Utility method for django's deprecated resolver.regex"""
try:
regex = resolver_or_pattern.regex
except AttributeError:
regex = resolver_or_pattern.pattern.regex
return regex
class RavenResolver(object):
_optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
_named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+")
_non_named_group_matcher = re.compile(r"\([^\)]+\)")
# [foo|bar|baz]
_either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
_camel_re = re.compile(r"([A-Z]+)([a-z])")
_cache = {} # type: Dict[URLPattern, str]
def _simplify(self, pattern):
# type: (str) -> str
r"""
Clean up urlpattern regexes into something readable by humans:
From:
> "^(?P\w+)/athletes/(?P\w+)/$"
To:
> "{sport_slug}/athletes/{athlete_slug}/"
"""
# remove optional params
# TODO(dcramer): it'd be nice to change these into [%s] but it currently
# conflicts with the other rules because we're doing regexp matches
# rather than parsing tokens
result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), pattern)
# handle named groups first
result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
# handle non-named groups
result = self._non_named_group_matcher.sub("{var}", result)
# handle optional params
result = self._either_option_matcher.sub(lambda m: m.group(1), result)
# clean up any outstanding regex-y characters.
result = (
result.replace("^", "")
.replace("$", "")
.replace("?", "")
.replace("//", "/")
.replace("\\", "")
)
return result
def _resolve(self, resolver, path, parents=None):
# type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
match = get_regex(resolver).search(path) # Django < 2.0
if not match:
return None
if parents is None:
parents = [resolver]
elif resolver not in parents:
parents = parents + [resolver]
new_path = path[match.end() :]
for pattern in resolver.url_patterns:
# this is an include()
if not pattern.callback:
match_ = self._resolve(pattern, new_path, parents)
if match_:
return match_
continue
elif not get_regex(pattern).search(new_path):
continue
try:
return self._cache[pattern]
except KeyError:
pass
prefix = "".join(self._simplify(get_regex(p).pattern) for p in parents)
result = prefix + self._simplify(get_regex(pattern).pattern)
if not result.startswith("/"):
result = "/" + result
self._cache[pattern] = result
return result
return None
def resolve(
self,
path, # type: str
urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
):
# type: (...) -> str
resolver = get_resolver(urlconf)
match = self._resolve(resolver, path)
return match or path
LEGACY_RESOLVER = RavenResolver()
sentry-python-1.4.3/sentry_sdk/integrations/django/views.py 0000664 0000000 0000000 00000004007 14125057761 0024237 0 ustar 00root root 0000000 0000000 from sentry_sdk.hub import Hub
from sentry_sdk._types import MYPY
from sentry_sdk import _functools
if MYPY:
from typing import Any
try:
from asyncio import iscoroutinefunction
except ImportError:
iscoroutinefunction = None # type: ignore
try:
from sentry_sdk.integrations.django.asgi import wrap_async_view
except (ImportError, SyntaxError):
wrap_async_view = None # type: ignore
def patch_views():
# type: () -> None
from django.core.handlers.base import BaseHandler
from sentry_sdk.integrations.django import DjangoIntegration
old_make_view_atomic = BaseHandler.make_view_atomic
@_functools.wraps(old_make_view_atomic)
def sentry_patched_make_view_atomic(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
callback = old_make_view_atomic(self, *args, **kwargs)
# XXX: The wrapper function is created for every request. Find more
# efficient way to wrap views (or build a cache?)
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is not None and integration.middleware_spans:
if (
iscoroutinefunction is not None
and wrap_async_view is not None
and iscoroutinefunction(callback)
):
sentry_wrapped_callback = wrap_async_view(hub, callback)
else:
sentry_wrapped_callback = _wrap_sync_view(hub, callback)
else:
sentry_wrapped_callback = callback
return sentry_wrapped_callback
BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
def _wrap_sync_view(hub, callback):
# type: (Hub, Any) -> Any
@_functools.wraps(callback)
def sentry_wrapped_callback(request, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
with hub.start_span(
op="django.view", description=request.resolver_match.view_name
):
return callback(request, *args, **kwargs)
return sentry_wrapped_callback
sentry-python-1.4.3/sentry_sdk/integrations/excepthook.py 0000664 0000000 0000000 00000004216 14125057761 0024013 0 ustar 00root root 0000000 0000000 import sys
from sentry_sdk.hub import Hub
from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
from sentry_sdk.integrations import Integration
from sentry_sdk._types import MYPY
if MYPY:
from typing import Callable
from typing import Any
from typing import Type
from types import TracebackType
Excepthook = Callable[
[Type[BaseException], BaseException, TracebackType],
Any,
]
class ExcepthookIntegration(Integration):
identifier = "excepthook"
always_run = False
def __init__(self, always_run=False):
# type: (bool) -> None
if not isinstance(always_run, bool):
raise ValueError(
"Invalid value for always_run: %s (must be type boolean)"
% (always_run,)
)
self.always_run = always_run
@staticmethod
def setup_once():
# type: () -> None
sys.excepthook = _make_excepthook(sys.excepthook)
def _make_excepthook(old_excepthook):
# type: (Excepthook) -> Excepthook
def sentry_sdk_excepthook(type_, value, traceback):
# type: (Type[BaseException], BaseException, TracebackType) -> None
hub = Hub.current
integration = hub.get_integration(ExcepthookIntegration)
if integration is not None and _should_send(integration.always_run):
# If an integration is there, a client has to be there.
client = hub.client # type: Any
with capture_internal_exceptions():
event, hint = event_from_exception(
(type_, value, traceback),
client_options=client.options,
mechanism={"type": "excepthook", "handled": False},
)
hub.capture_event(event, hint=hint)
return old_excepthook(type_, value, traceback)
return sentry_sdk_excepthook
def _should_send(always_run=False):
# type: (bool) -> bool
if always_run:
return True
if hasattr(sys, "ps1"):
# Disable the excepthook for interactive Python shells, otherwise
# every typo gets sent to Sentry.
return False
return True
sentry-python-1.4.3/sentry_sdk/integrations/executing.py 0000664 0000000 0000000 00000003747 14125057761 0023645 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from sentry_sdk import Hub
from sentry_sdk._types import MYPY
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.utils import walk_exception_chain, iter_stacks
if MYPY:
from typing import Optional
from sentry_sdk._types import Event, Hint
try:
import executing
except ImportError:
raise DidNotEnable("executing is not installed")
class ExecutingIntegration(Integration):
identifier = "executing"
@staticmethod
def setup_once():
# type: () -> None
@add_global_event_processor
def add_executing_info(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
if Hub.current.get_integration(ExecutingIntegration) is None:
return event
if hint is None:
return event
exc_info = hint.get("exc_info", None)
if exc_info is None:
return event
exception = event.get("exception", None)
if exception is None:
return event
values = exception.get("values", None)
if values is None:
return event
for exception, (_exc_type, _exc_value, exc_tb) in zip(
reversed(values), walk_exception_chain(exc_info)
):
sentry_frames = [
frame
for frame in exception.get("stacktrace", {}).get("frames", [])
if frame.get("function")
]
tbs = list(iter_stacks(exc_tb))
if len(sentry_frames) != len(tbs):
continue
for sentry_frame, tb in zip(sentry_frames, tbs):
frame = tb.tb_frame
source = executing.Source.for_frame(frame)
sentry_frame["function"] = source.code_qualname(frame.f_code)
return event
sentry-python-1.4.3/sentry_sdk/integrations/falcon.py 0000664 0000000 0000000 00000015214 14125057761 0023104 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations._wsgi_common import RequestExtractor
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Dict
from typing import Optional
from sentry_sdk._types import EventProcessor
try:
import falcon # type: ignore
import falcon.api_helpers # type: ignore
from falcon import __version__ as FALCON_VERSION
except ImportError:
raise DidNotEnable("Falcon not installed")
class FalconRequestExtractor(RequestExtractor):
def env(self):
# type: () -> Dict[str, Any]
return self.request.env
def cookies(self):
# type: () -> Dict[str, Any]
return self.request.cookies
def form(self):
# type: () -> None
return None # No such concept in Falcon
def files(self):
# type: () -> None
return None # No such concept in Falcon
def raw_data(self):
# type: () -> Optional[str]
# As request data can only be read once we won't make this available
# to Sentry. Just send back a dummy string in case there was a
# content length.
# TODO(jmagnusson): Figure out if there's a way to support this
content_length = self.content_length()
if content_length > 0:
return "[REQUEST_CONTAINING_RAW_DATA]"
else:
return None
def json(self):
# type: () -> Optional[Dict[str, Any]]
try:
return self.request.media
except falcon.errors.HTTPBadRequest:
# NOTE(jmagnusson): We return `falcon.Request._media` here because
# falcon 1.4 doesn't do proper type checking in
# `falcon.Request.media`. This has been fixed in 2.0.
# Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
return self.request._media
class SentryFalconMiddleware(object):
"""Captures exceptions in Falcon requests and send to Sentry"""
def process_request(self, req, resp, *args, **kwargs):
# type: (Any, Any, *Any, **Any) -> None
hub = Hub.current
integration = hub.get_integration(FalconIntegration)
if integration is None:
return
with hub.configure_scope() as scope:
scope._name = "falcon"
scope.add_event_processor(_make_request_event_processor(req, integration))
TRANSACTION_STYLE_VALUES = ("uri_template", "path")
class FalconIntegration(Integration):
identifier = "falcon"
transaction_style = None
def __init__(self, transaction_style="uri_template"):
# type: (str) -> None
if transaction_style not in TRANSACTION_STYLE_VALUES:
raise ValueError(
"Invalid value for transaction_style: %s (must be in %s)"
% (transaction_style, TRANSACTION_STYLE_VALUES)
)
self.transaction_style = transaction_style
@staticmethod
def setup_once():
# type: () -> None
try:
version = tuple(map(int, FALCON_VERSION.split(".")))
except (ValueError, TypeError):
raise DidNotEnable("Unparsable Falcon version: {}".format(FALCON_VERSION))
if version < (1, 4):
raise DidNotEnable("Falcon 1.4 or newer required.")
_patch_wsgi_app()
_patch_handle_exception()
_patch_prepare_middleware()
def _patch_wsgi_app():
# type: () -> None
original_wsgi_app = falcon.API.__call__
def sentry_patched_wsgi_app(self, env, start_response):
# type: (falcon.API, Any, Any) -> Any
hub = Hub.current
integration = hub.get_integration(FalconIntegration)
if integration is None:
return original_wsgi_app(self, env, start_response)
sentry_wrapped = SentryWsgiMiddleware(
lambda envi, start_resp: original_wsgi_app(self, envi, start_resp)
)
return sentry_wrapped(env, start_response)
falcon.API.__call__ = sentry_patched_wsgi_app
def _patch_handle_exception():
# type: () -> None
original_handle_exception = falcon.API._handle_exception
def sentry_patched_handle_exception(self, *args):
# type: (falcon.API, *Any) -> Any
# NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
# method signature from `(ex, req, resp, params)` to
# `(req, resp, ex, params)`
if isinstance(args[0], Exception):
ex = args[0]
else:
ex = args[2]
was_handled = original_handle_exception(self, *args)
hub = Hub.current
integration = hub.get_integration(FalconIntegration)
if integration is not None and not _is_falcon_http_error(ex):
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
ex,
client_options=client.options,
mechanism={"type": "falcon", "handled": False},
)
hub.capture_event(event, hint=hint)
return was_handled
falcon.API._handle_exception = sentry_patched_handle_exception
def _patch_prepare_middleware():
# type: () -> None
original_prepare_middleware = falcon.api_helpers.prepare_middleware
def sentry_patched_prepare_middleware(
middleware=None, independent_middleware=False
):
# type: (Any, Any) -> Any
hub = Hub.current
integration = hub.get_integration(FalconIntegration)
if integration is not None:
middleware = [SentryFalconMiddleware()] + (middleware or [])
return original_prepare_middleware(middleware, independent_middleware)
falcon.api_helpers.prepare_middleware = sentry_patched_prepare_middleware
def _is_falcon_http_error(ex):
# type: (BaseException) -> bool
return isinstance(ex, (falcon.HTTPError, falcon.http_status.HTTPStatus))
def _make_request_event_processor(req, integration):
# type: (falcon.Request, FalconIntegration) -> EventProcessor
def inner(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
if integration.transaction_style == "uri_template":
event["transaction"] = req.uri_template
elif integration.transaction_style == "path":
event["transaction"] = req.path
with capture_internal_exceptions():
FalconRequestExtractor(req).extract_into_event(event)
return event
return inner
sentry-python-1.4.3/sentry_sdk/integrations/flask.py 0000664 0000000 0000000 00000016457 14125057761 0022754 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from sentry_sdk.integrations._wsgi_common import RequestExtractor
from sentry_sdk._types import MYPY
if MYPY:
from sentry_sdk.integrations.wsgi import _ScopedResponse
from typing import Any
from typing import Dict
from werkzeug.datastructures import ImmutableMultiDict
from werkzeug.datastructures import FileStorage
from typing import Union
from typing import Callable
from sentry_sdk._types import EventProcessor
try:
import flask_login # type: ignore
except ImportError:
flask_login = None
try:
from flask import ( # type: ignore
Request,
Flask,
_request_ctx_stack,
_app_ctx_stack,
__version__ as FLASK_VERSION,
)
from flask.signals import (
got_request_exception,
request_started,
)
except ImportError:
raise DidNotEnable("Flask is not installed")
try:
import blinker # noqa
except ImportError:
raise DidNotEnable("blinker is not installed")
TRANSACTION_STYLE_VALUES = ("endpoint", "url")
class FlaskIntegration(Integration):
identifier = "flask"
transaction_style = None
def __init__(self, transaction_style="endpoint"):
# type: (str) -> None
if transaction_style not in TRANSACTION_STYLE_VALUES:
raise ValueError(
"Invalid value for transaction_style: %s (must be in %s)"
% (transaction_style, TRANSACTION_STYLE_VALUES)
)
self.transaction_style = transaction_style
@staticmethod
def setup_once():
# type: () -> None
# This version parsing is absolutely naive but the alternative is to
# import pkg_resources which slows down the SDK a lot.
try:
version = tuple(map(int, FLASK_VERSION.split(".")[:3]))
except (ValueError, TypeError):
# It's probably a release candidate, we assume it's fine.
pass
else:
if version < (0, 10):
raise DidNotEnable("Flask 0.10 or newer is required.")
request_started.connect(_request_started)
got_request_exception.connect(_capture_exception)
old_app = Flask.__call__
def sentry_patched_wsgi_app(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
if Hub.current.get_integration(FlaskIntegration) is None:
return old_app(self, environ, start_response)
return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
environ, start_response
)
Flask.__call__ = sentry_patched_wsgi_app # type: ignore
def _request_started(sender, **kwargs):
# type: (Flask, **Any) -> None
hub = Hub.current
integration = hub.get_integration(FlaskIntegration)
if integration is None:
return
app = _app_ctx_stack.top.app
with hub.configure_scope() as scope:
request = _request_ctx_stack.top.request
# Set the transaction name here, but rely on WSGI middleware to actually
# start the transaction
try:
if integration.transaction_style == "endpoint":
scope.transaction = request.url_rule.endpoint
elif integration.transaction_style == "url":
scope.transaction = request.url_rule.rule
except Exception:
pass
evt_processor = _make_request_event_processor(app, request, integration)
scope.add_event_processor(evt_processor)
class FlaskRequestExtractor(RequestExtractor):
def env(self):
# type: () -> Dict[str, str]
return self.request.environ
def cookies(self):
# type: () -> Dict[Any, Any]
return {
k: v[0] if isinstance(v, list) and len(v) == 1 else v
for k, v in self.request.cookies.items()
}
def raw_data(self):
# type: () -> bytes
return self.request.get_data()
def form(self):
# type: () -> ImmutableMultiDict[str, Any]
return self.request.form
def files(self):
# type: () -> ImmutableMultiDict[str, Any]
return self.request.files
def is_json(self):
# type: () -> bool
return self.request.is_json
def json(self):
# type: () -> Any
return self.request.get_json()
def size_of_file(self, file):
# type: (FileStorage) -> int
return file.content_length
def _make_request_event_processor(app, request, integration):
# type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor
def inner(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
# if the request is gone we are fine not logging the data from
# it. This might happen if the processor is pushed away to
# another thread.
if request is None:
return event
with capture_internal_exceptions():
FlaskRequestExtractor(request).extract_into_event(event)
if _should_send_default_pii():
with capture_internal_exceptions():
_add_user_to_event(event)
return event
return inner
def _capture_exception(sender, exception, **kwargs):
# type: (Flask, Union[ValueError, BaseException], **Any) -> None
hub = Hub.current
if hub.get_integration(FlaskIntegration) is None:
return
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
exception,
client_options=client.options,
mechanism={"type": "flask", "handled": False},
)
hub.capture_event(event, hint=hint)
def _add_user_to_event(event):
# type: (Dict[str, Any]) -> None
if flask_login is None:
return
user = flask_login.current_user
if user is None:
return
with capture_internal_exceptions():
# Access this object as late as possible as accessing the user
# is relatively costly
user_info = event.setdefault("user", {})
try:
user_info.setdefault("id", user.get_id())
# TODO: more configurable user attrs here
except AttributeError:
# might happen if:
# - flask_login could not be imported
# - flask_login is not configured
# - no user is logged in
pass
# The following attribute accesses are ineffective for the general
# Flask-Login case, because the User interface of Flask-Login does not
# care about anything but the ID. However, Flask-User (based on
# Flask-Login) documents a few optional extra attributes.
#
# https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names
try:
user_info.setdefault("email", user.email)
except Exception:
pass
try:
user_info.setdefault("username", user.username)
user_info.setdefault("username", user.email)
except Exception:
pass
sentry-python-1.4.3/sentry_sdk/integrations/gcp.py 0000664 0000000 0000000 00000017536 14125057761 0022424 0 ustar 00root root 0000000 0000000 from datetime import datetime, timedelta
from os import environ
import sys
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.tracing import Transaction
from sentry_sdk._compat import reraise
from sentry_sdk.utils import (
AnnotatedValue,
capture_internal_exceptions,
event_from_exception,
logger,
TimeoutThread,
)
from sentry_sdk.integrations import Integration
from sentry_sdk.integrations._wsgi_common import _filter_headers
from sentry_sdk._types import MYPY
# Constants
TIMEOUT_WARNING_BUFFER = 1.5 # Buffer time required to send timeout warning to Sentry
MILLIS_TO_SECONDS = 1000.0
if MYPY:
from typing import Any
from typing import TypeVar
from typing import Callable
from typing import Optional
from sentry_sdk._types import EventProcessor, Event, Hint
F = TypeVar("F", bound=Callable[..., Any])
def _wrap_func(func):
# type: (F) -> F
def sentry_func(functionhandler, gcp_event, *args, **kwargs):
# type: (Any, Any, *Any, **Any) -> Any
hub = Hub.current
integration = hub.get_integration(GcpIntegration)
if integration is None:
return func(functionhandler, gcp_event, *args, **kwargs)
# If an integration is there, a client has to be there.
client = hub.client # type: Any
configured_time = environ.get("FUNCTION_TIMEOUT_SEC")
if not configured_time:
logger.debug(
"The configured timeout could not be fetched from Cloud Functions configuration."
)
return func(functionhandler, gcp_event, *args, **kwargs)
configured_time = int(configured_time)
initial_time = datetime.utcnow()
with hub.push_scope() as scope:
with capture_internal_exceptions():
scope.clear_breadcrumbs()
scope.add_event_processor(
_make_request_event_processor(
gcp_event, configured_time, initial_time
)
)
scope.set_tag("gcp_region", environ.get("FUNCTION_REGION"))
timeout_thread = None
if (
integration.timeout_warning
and configured_time > TIMEOUT_WARNING_BUFFER
):
waiting_time = configured_time - TIMEOUT_WARNING_BUFFER
timeout_thread = TimeoutThread(waiting_time, configured_time)
# Starting the thread to raise timeout warning exception
timeout_thread.start()
headers = {}
if hasattr(gcp_event, "headers"):
headers = gcp_event.headers
transaction = Transaction.continue_from_headers(
headers, op="serverless.function", name=environ.get("FUNCTION_NAME", "")
)
sampling_context = {
"gcp_env": {
"function_name": environ.get("FUNCTION_NAME"),
"function_entry_point": environ.get("ENTRY_POINT"),
"function_identity": environ.get("FUNCTION_IDENTITY"),
"function_region": environ.get("FUNCTION_REGION"),
"function_project": environ.get("GCP_PROJECT"),
},
"gcp_event": gcp_event,
}
with hub.start_transaction(
transaction, custom_sampling_context=sampling_context
):
try:
return func(functionhandler, gcp_event, *args, **kwargs)
except Exception:
exc_info = sys.exc_info()
sentry_event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "gcp", "handled": False},
)
hub.capture_event(sentry_event, hint=hint)
reraise(*exc_info)
finally:
if timeout_thread:
timeout_thread.stop()
# Flush out the event queue
hub.flush()
return sentry_func # type: ignore
class GcpIntegration(Integration):
identifier = "gcp"
def __init__(self, timeout_warning=False):
# type: (bool) -> None
self.timeout_warning = timeout_warning
@staticmethod
def setup_once():
# type: () -> None
import __main__ as gcp_functions # type: ignore
if not hasattr(gcp_functions, "worker_v1"):
logger.warning(
"GcpIntegration currently supports only Python 3.7 runtime environment."
)
return
worker1 = gcp_functions.worker_v1
worker1.FunctionHandler.invoke_user_function = _wrap_func(
worker1.FunctionHandler.invoke_user_function
)
def _make_request_event_processor(gcp_event, configured_timeout, initial_time):
# type: (Any, Any, Any) -> EventProcessor
def event_processor(event, hint):
# type: (Event, Hint) -> Optional[Event]
final_time = datetime.utcnow()
time_diff = final_time - initial_time
execution_duration_in_millis = time_diff.microseconds / MILLIS_TO_SECONDS
extra = event.setdefault("extra", {})
extra["google cloud functions"] = {
"function_name": environ.get("FUNCTION_NAME"),
"function_entry_point": environ.get("ENTRY_POINT"),
"function_identity": environ.get("FUNCTION_IDENTITY"),
"function_region": environ.get("FUNCTION_REGION"),
"function_project": environ.get("GCP_PROJECT"),
"execution_duration_in_millis": execution_duration_in_millis,
"configured_timeout_in_seconds": configured_timeout,
}
extra["google cloud logs"] = {
"url": _get_google_cloud_logs_url(final_time),
}
request = event.get("request", {})
request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME"))
if hasattr(gcp_event, "method"):
request["method"] = gcp_event.method
if hasattr(gcp_event, "query_string"):
request["query_string"] = gcp_event.query_string.decode("utf-8")
if hasattr(gcp_event, "headers"):
request["headers"] = _filter_headers(gcp_event.headers)
if _should_send_default_pii():
if hasattr(gcp_event, "data"):
request["data"] = gcp_event.data
else:
if hasattr(gcp_event, "data"):
# Unfortunately couldn't find a way to get structured body from GCP
# event. Meaning every body is unstructured to us.
request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
event["request"] = request
return event
return event_processor
def _get_google_cloud_logs_url(final_time):
# type: (datetime) -> str
"""
Generates a Google Cloud Logs console URL based on the environment variables
Arguments:
final_time {datetime} -- Final time
Returns:
str -- Google Cloud Logs Console URL to logs.
"""
hour_ago = final_time - timedelta(hours=1)
formatstring = "%Y-%m-%dT%H:%M:%SZ"
url = (
"https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function"
"%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false"
"×tamp={timestamp_end}&customFacets=&limitCustomFacetWidth=true"
"&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}"
"&interval=PT1H&scrollTimestamp={timestamp_end}"
).format(
project=environ.get("GCP_PROJECT"),
function_name=environ.get("FUNCTION_NAME"),
region=environ.get("FUNCTION_REGION"),
timestamp_end=final_time.strftime(formatstring),
timestamp_start=hour_ago.strftime(formatstring),
)
return url
sentry-python-1.4.3/sentry_sdk/integrations/gnu_backtrace.py 0000664 0000000 0000000 00000005540 14125057761 0024433 0 ustar 00root root 0000000 0000000 import re
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.utils import capture_internal_exceptions
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Dict
MODULE_RE = r"[a-zA-Z0-9/._:\\-]+"
TYPE_RE = r"[a-zA-Z0-9._:<>,-]+"
HEXVAL_RE = r"[A-Fa-f0-9]+"
FRAME_RE = r"""
^(?P\d+)\.\s
(?P{MODULE_RE})\(
(?P{TYPE_RE}\ )?
((?P{TYPE_RE})
(?P\(.*\))?
)?
((?P\ const)?\+0x(?P{HEXVAL_RE}))?
\)\s
\[0x(?P{HEXVAL_RE})\]$
""".format(
MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE
)
FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE)
class GnuBacktraceIntegration(Integration):
identifier = "gnu_backtrace"
@staticmethod
def setup_once():
# type: () -> None
@add_global_event_processor
def process_gnu_backtrace(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
with capture_internal_exceptions():
return _process_gnu_backtrace(event, hint)
def _process_gnu_backtrace(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
if Hub.current.get_integration(GnuBacktraceIntegration) is None:
return event
exc_info = hint.get("exc_info", None)
if exc_info is None:
return event
exception = event.get("exception", None)
if exception is None:
return event
values = exception.get("values", None)
if values is None:
return event
for exception in values:
frames = exception.get("stacktrace", {}).get("frames", [])
if not frames:
continue
msg = exception.get("value", None)
if not msg:
continue
additional_frames = []
new_msg = []
for line in msg.splitlines():
match = FRAME_RE.match(line)
if match:
additional_frames.append(
(
int(match.group("index")),
{
"package": match.group("package") or None,
"function": match.group("function") or None,
"platform": "native",
},
)
)
else:
# Put garbage lines back into message, not sure what to do with them.
new_msg.append(line)
if additional_frames:
additional_frames.sort(key=lambda x: -x[0])
for _, frame in additional_frames:
frames.append(frame)
new_msg.append("")
exception["value"] = "\n".join(new_msg)
return event
sentry-python-1.4.3/sentry_sdk/integrations/httpx.py 0000664 0000000 0000000 00000006051 14125057761 0023010 0 ustar 00root root 0000000 0000000 from sentry_sdk import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.utils import logger
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
try:
from httpx import AsyncClient, Client, Request, Response # type: ignore
except ImportError:
raise DidNotEnable("httpx is not installed")
__all__ = ["HttpxIntegration"]
class HttpxIntegration(Integration):
identifier = "httpx"
@staticmethod
def setup_once():
# type: () -> None
"""
httpx has its own transport layer and can be customized when needed,
so patch Client.send and AsyncClient.send to support both synchronous and async interfaces.
"""
_install_httpx_client()
_install_httpx_async_client()
def _install_httpx_client():
# type: () -> None
real_send = Client.send
def send(self, request, **kwargs):
# type: (Client, Request, **Any) -> Response
hub = Hub.current
if hub.get_integration(HttpxIntegration) is None:
return real_send(self, request, **kwargs)
with hub.start_span(
op="http", description="%s %s" % (request.method, request.url)
) as span:
span.set_data("method", request.method)
span.set_data("url", str(request.url))
for key, value in hub.iter_trace_propagation_headers():
logger.debug(
"[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
key=key, value=value, url=request.url
)
)
request.headers[key] = value
rv = real_send(self, request, **kwargs)
span.set_data("status_code", rv.status_code)
span.set_http_status(rv.status_code)
span.set_data("reason", rv.reason_phrase)
return rv
Client.send = send
def _install_httpx_async_client():
# type: () -> None
real_send = AsyncClient.send
async def send(self, request, **kwargs):
# type: (AsyncClient, Request, **Any) -> Response
hub = Hub.current
if hub.get_integration(HttpxIntegration) is None:
return await real_send(self, request, **kwargs)
with hub.start_span(
op="http", description="%s %s" % (request.method, request.url)
) as span:
span.set_data("method", request.method)
span.set_data("url", str(request.url))
for key, value in hub.iter_trace_propagation_headers():
logger.debug(
"[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
key=key, value=value, url=request.url
)
)
request.headers[key] = value
rv = await real_send(self, request, **kwargs)
span.set_data("status_code", rv.status_code)
span.set_http_status(rv.status_code)
span.set_data("reason", rv.reason_phrase)
return rv
AsyncClient.send = send
sentry-python-1.4.3/sentry_sdk/integrations/logging.py 0000664 0000000 0000000 00000017054 14125057761 0023274 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import logging
import datetime
from fnmatch import fnmatch
from sentry_sdk.hub import Hub
from sentry_sdk.utils import (
to_string,
event_from_exception,
current_stacktrace,
capture_internal_exceptions,
)
from sentry_sdk.integrations import Integration
from sentry_sdk._compat import iteritems
from sentry_sdk._types import MYPY
if MYPY:
from logging import LogRecord
from typing import Any
from typing import Dict
from typing import Optional
DEFAULT_LEVEL = logging.INFO
DEFAULT_EVENT_LEVEL = logging.ERROR
# Capturing events from those loggers causes recursion errors. We cannot allow
# the user to unconditionally create events from those loggers under any
# circumstances.
#
# Note: Ignoring by logger name here is better than mucking with thread-locals.
# We do not necessarily know whether thread-locals work 100% correctly in the user's environment.
_IGNORED_LOGGERS = set(
["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"]
)
def ignore_logger(
name, # type: str
):
# type: (...) -> None
"""This disables recording (both in breadcrumbs and as events) calls to
a logger of a specific name. Among other uses, many of our integrations
use this to prevent their actions being recorded as breadcrumbs. Exposed
to users as a way to quiet spammy loggers.
:param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``).
"""
_IGNORED_LOGGERS.add(name)
class LoggingIntegration(Integration):
identifier = "logging"
def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL):
# type: (Optional[int], Optional[int]) -> None
self._handler = None
self._breadcrumb_handler = None
if level is not None:
self._breadcrumb_handler = BreadcrumbHandler(level=level)
if event_level is not None:
self._handler = EventHandler(level=event_level)
def _handle_record(self, record):
# type: (LogRecord) -> None
if self._handler is not None and record.levelno >= self._handler.level:
self._handler.handle(record)
if (
self._breadcrumb_handler is not None
and record.levelno >= self._breadcrumb_handler.level
):
self._breadcrumb_handler.handle(record)
@staticmethod
def setup_once():
# type: () -> None
old_callhandlers = logging.Logger.callHandlers # type: ignore
def sentry_patched_callhandlers(self, record):
# type: (Any, LogRecord) -> Any
try:
return old_callhandlers(self, record)
finally:
# This check is done twice, once also here before we even get
# the integration. Otherwise we have a high chance of getting
# into a recursion error when the integration is resolved
# (this also is slower).
if record.name not in _IGNORED_LOGGERS:
integration = Hub.current.get_integration(LoggingIntegration)
if integration is not None:
integration._handle_record(record)
logging.Logger.callHandlers = sentry_patched_callhandlers # type: ignore
def _can_record(record):
# type: (LogRecord) -> bool
"""Prevents ignored loggers from recording"""
for logger in _IGNORED_LOGGERS:
if fnmatch(record.name, logger):
return False
return True
def _breadcrumb_from_record(record):
# type: (LogRecord) -> Dict[str, Any]
return {
"type": "log",
"level": _logging_to_event_level(record.levelname),
"category": record.name,
"message": record.message,
"timestamp": datetime.datetime.utcfromtimestamp(record.created),
"data": _extra_from_record(record),
}
def _logging_to_event_level(levelname):
# type: (str) -> str
return {"critical": "fatal"}.get(levelname.lower(), levelname.lower())
COMMON_RECORD_ATTRS = frozenset(
(
"args",
"created",
"exc_info",
"exc_text",
"filename",
"funcName",
"levelname",
"levelno",
"linenno",
"lineno",
"message",
"module",
"msecs",
"msg",
"name",
"pathname",
"process",
"processName",
"relativeCreated",
"stack",
"tags",
"thread",
"threadName",
"stack_info",
)
)
def _extra_from_record(record):
# type: (LogRecord) -> Dict[str, None]
return {
k: v
for k, v in iteritems(vars(record))
if k not in COMMON_RECORD_ATTRS
and (not isinstance(k, str) or not k.startswith("_"))
}
class EventHandler(logging.Handler, object):
"""
A logging handler that emits Sentry events for each log record
Note that you do not have to use this class if the logging integration is enabled, which it is by default.
"""
def emit(self, record):
# type: (LogRecord) -> Any
with capture_internal_exceptions():
self.format(record)
return self._emit(record)
def _emit(self, record):
# type: (LogRecord) -> None
if not _can_record(record):
return
hub = Hub.current
if hub.client is None:
return
client_options = hub.client.options
# exc_info might be None or (None, None, None)
#
# exc_info may also be any falsy value due to Python stdlib being
# liberal with what it receives and Celery's billiard being "liberal"
# with what it sends. See
# https://github.com/getsentry/sentry-python/issues/904
if record.exc_info and record.exc_info[0] is not None:
event, hint = event_from_exception(
record.exc_info,
client_options=client_options,
mechanism={"type": "logging", "handled": True},
)
elif record.exc_info and record.exc_info[0] is None:
event = {}
hint = {}
with capture_internal_exceptions():
event["threads"] = {
"values": [
{
"stacktrace": current_stacktrace(
client_options["with_locals"]
),
"crashed": False,
"current": True,
}
]
}
else:
event = {}
hint = {}
hint["log_record"] = record
event["level"] = _logging_to_event_level(record.levelname)
event["logger"] = record.name
event["logentry"] = {"message": to_string(record.msg), "params": record.args}
event["extra"] = _extra_from_record(record)
hub.capture_event(event, hint=hint)
# Legacy name
SentryHandler = EventHandler
class BreadcrumbHandler(logging.Handler, object):
"""
A logging handler that records breadcrumbs for each log record.
Note that you do not have to use this class if the logging integration is enabled, which it is by default.
"""
def emit(self, record):
# type: (LogRecord) -> Any
with capture_internal_exceptions():
self.format(record)
return self._emit(record)
def _emit(self, record):
# type: (LogRecord) -> None
if not _can_record(record):
return
Hub.current.add_breadcrumb(
_breadcrumb_from_record(record), hint={"log_record": record}
)
sentry-python-1.4.3/sentry_sdk/integrations/modules.py 0000664 0000000 0000000 00000002561 14125057761 0023313 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Dict
from typing import Tuple
from typing import Iterator
from sentry_sdk._types import Event
_installed_modules = None
def _generate_installed_modules():
# type: () -> Iterator[Tuple[str, str]]
try:
import pkg_resources
except ImportError:
return
for info in pkg_resources.working_set:
yield info.key, info.version
def _get_installed_modules():
# type: () -> Dict[str, str]
global _installed_modules
if _installed_modules is None:
_installed_modules = dict(_generate_installed_modules())
return _installed_modules
class ModulesIntegration(Integration):
identifier = "modules"
@staticmethod
def setup_once():
# type: () -> None
@add_global_event_processor
def processor(event, hint):
# type: (Event, Any) -> Dict[str, Any]
if event.get("type") == "transaction":
return event
if Hub.current.get_integration(ModulesIntegration) is None:
return event
event["modules"] = _get_installed_modules()
return event
sentry-python-1.4.3/sentry_sdk/integrations/pure_eval.py 0000664 0000000 0000000 00000010650 14125057761 0023623 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import ast
from sentry_sdk import Hub, serializer
from sentry_sdk._types import MYPY
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.utils import walk_exception_chain, iter_stacks
if MYPY:
from typing import Optional, Dict, Any, Tuple, List
from types import FrameType
from sentry_sdk._types import Event, Hint
try:
import executing
except ImportError:
raise DidNotEnable("executing is not installed")
try:
import pure_eval
except ImportError:
raise DidNotEnable("pure_eval is not installed")
try:
# Used implicitly, just testing it's available
import asttokens # noqa
except ImportError:
raise DidNotEnable("asttokens is not installed")
class PureEvalIntegration(Integration):
identifier = "pure_eval"
@staticmethod
def setup_once():
# type: () -> None
@add_global_event_processor
def add_executing_info(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
if Hub.current.get_integration(PureEvalIntegration) is None:
return event
if hint is None:
return event
exc_info = hint.get("exc_info", None)
if exc_info is None:
return event
exception = event.get("exception", None)
if exception is None:
return event
values = exception.get("values", None)
if values is None:
return event
for exception, (_exc_type, _exc_value, exc_tb) in zip(
reversed(values), walk_exception_chain(exc_info)
):
sentry_frames = [
frame
for frame in exception.get("stacktrace", {}).get("frames", [])
if frame.get("function")
]
tbs = list(iter_stacks(exc_tb))
if len(sentry_frames) != len(tbs):
continue
for sentry_frame, tb in zip(sentry_frames, tbs):
sentry_frame["vars"] = (
pure_eval_frame(tb.tb_frame) or sentry_frame["vars"]
)
return event
def pure_eval_frame(frame):
# type: (FrameType) -> Dict[str, Any]
source = executing.Source.for_frame(frame)
if not source.tree:
return {}
statements = source.statements_at_line(frame.f_lineno)
if not statements:
return {}
scope = stmt = list(statements)[0]
while True:
# Get the parent first in case the original statement is already
# a function definition, e.g. if we're calling a decorator
# In that case we still want the surrounding scope, not that function
scope = scope.parent
if isinstance(scope, (ast.FunctionDef, ast.ClassDef, ast.Module)):
break
evaluator = pure_eval.Evaluator.from_frame(frame)
expressions = evaluator.interesting_expressions_grouped(scope)
def closeness(expression):
# type: (Tuple[List[Any], Any]) -> Tuple[int, int]
# Prioritise expressions with a node closer to the statement executed
# without being after that statement
# A higher return value is better - the expression will appear
# earlier in the list of values and is less likely to be trimmed
nodes, _value = expression
def start(n):
# type: (ast.expr) -> Tuple[int, int]
return (n.lineno, n.col_offset)
nodes_before_stmt = [
node for node in nodes if start(node) < stmt.last_token.end
]
if nodes_before_stmt:
# The position of the last node before or in the statement
return max(start(node) for node in nodes_before_stmt)
else:
# The position of the first node after the statement
# Negative means it's always lower priority than nodes that come before
# Less negative means closer to the statement and higher priority
lineno, col_offset = min(start(node) for node in nodes)
return (-lineno, -col_offset)
# This adds the first_token and last_token attributes to nodes
atok = source.asttokens()
expressions.sort(key=closeness, reverse=True)
return {
atok.get_text(nodes[0]): value
for nodes, value in expressions[: serializer.MAX_DATABAG_BREADTH]
}
sentry-python-1.4.3/sentry_sdk/integrations/pyramid.py 0000664 0000000 0000000 00000015640 14125057761 0023312 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import os
import sys
import weakref
from pyramid.httpexceptions import HTTPException
from pyramid.request import Request
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
from sentry_sdk._compat import reraise, iteritems
from sentry_sdk.integrations import Integration
from sentry_sdk.integrations._wsgi_common import RequestExtractor
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from sentry_sdk._types import MYPY
if MYPY:
from pyramid.response import Response
from typing import Any
from sentry_sdk.integrations.wsgi import _ScopedResponse
from typing import Callable
from typing import Dict
from typing import Optional
from webob.cookies import RequestCookies # type: ignore
from webob.compat import cgi_FieldStorage # type: ignore
from sentry_sdk.utils import ExcInfo
from sentry_sdk._types import EventProcessor
if getattr(Request, "authenticated_userid", None):
def authenticated_userid(request):
# type: (Request) -> Optional[Any]
return request.authenticated_userid
else:
# bw-compat for pyramid < 1.5
from pyramid.security import authenticated_userid # type: ignore
TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern")
class PyramidIntegration(Integration):
identifier = "pyramid"
transaction_style = None
def __init__(self, transaction_style="route_name"):
# type: (str) -> None
if transaction_style not in TRANSACTION_STYLE_VALUES:
raise ValueError(
"Invalid value for transaction_style: %s (must be in %s)"
% (transaction_style, TRANSACTION_STYLE_VALUES)
)
self.transaction_style = transaction_style
@staticmethod
def setup_once():
# type: () -> None
from pyramid import router
from pyramid.request import Request
old_call_view = router._call_view
def sentry_patched_call_view(registry, request, *args, **kwargs):
# type: (Any, Request, *Any, **Any) -> Response
hub = Hub.current
integration = hub.get_integration(PyramidIntegration)
if integration is not None:
with hub.configure_scope() as scope:
try:
if integration.transaction_style == "route_name":
scope.transaction = request.matched_route.name
elif integration.transaction_style == "route_pattern":
scope.transaction = request.matched_route.pattern
except Exception:
pass
scope.add_event_processor(
_make_event_processor(weakref.ref(request), integration)
)
return old_call_view(registry, request, *args, **kwargs)
router._call_view = sentry_patched_call_view
if hasattr(Request, "invoke_exception_view"):
old_invoke_exception_view = Request.invoke_exception_view
def sentry_patched_invoke_exception_view(self, *args, **kwargs):
# type: (Request, *Any, **Any) -> Any
rv = old_invoke_exception_view(self, *args, **kwargs)
if (
self.exc_info
and all(self.exc_info)
and rv.status_int == 500
and Hub.current.get_integration(PyramidIntegration) is not None
):
_capture_exception(self.exc_info)
return rv
Request.invoke_exception_view = sentry_patched_invoke_exception_view
old_wsgi_call = router.Router.__call__
def sentry_patched_wsgi_call(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
hub = Hub.current
integration = hub.get_integration(PyramidIntegration)
if integration is None:
return old_wsgi_call(self, environ, start_response)
def sentry_patched_inner_wsgi_call(environ, start_response):
# type: (Dict[str, Any], Callable[..., Any]) -> Any
try:
return old_wsgi_call(self, environ, start_response)
except Exception:
einfo = sys.exc_info()
_capture_exception(einfo)
reraise(*einfo)
return SentryWsgiMiddleware(sentry_patched_inner_wsgi_call)(
environ, start_response
)
router.Router.__call__ = sentry_patched_wsgi_call
def _capture_exception(exc_info):
# type: (ExcInfo) -> None
if exc_info[0] is None or issubclass(exc_info[0], HTTPException):
return
hub = Hub.current
if hub.get_integration(PyramidIntegration) is None:
return
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "pyramid", "handled": False},
)
hub.capture_event(event, hint=hint)
class PyramidRequestExtractor(RequestExtractor):
def url(self):
# type: () -> str
return self.request.path_url
def env(self):
# type: () -> Dict[str, str]
return self.request.environ
def cookies(self):
# type: () -> RequestCookies
return self.request.cookies
def raw_data(self):
# type: () -> str
return self.request.text
def form(self):
# type: () -> Dict[str, str]
return {
key: value
for key, value in iteritems(self.request.POST)
if not getattr(value, "filename", None)
}
def files(self):
# type: () -> Dict[str, cgi_FieldStorage]
return {
key: value
for key, value in iteritems(self.request.POST)
if getattr(value, "filename", None)
}
def size_of_file(self, postdata):
# type: (cgi_FieldStorage) -> int
file = postdata.file
try:
return os.fstat(file.fileno()).st_size
except Exception:
return 0
def _make_event_processor(weak_request, integration):
# type: (Callable[[], Request], PyramidIntegration) -> EventProcessor
def event_processor(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
request = weak_request()
if request is None:
return event
with capture_internal_exceptions():
PyramidRequestExtractor(request).extract_into_event(event)
if _should_send_default_pii():
with capture_internal_exceptions():
user_info = event.setdefault("user", {})
user_info.setdefault("id", authenticated_userid(request))
return event
return event_processor
sentry-python-1.4.3/sentry_sdk/integrations/redis.py 0000664 0000000 0000000 00000006004 14125057761 0022745 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from sentry_sdk import Hub
from sentry_sdk.utils import capture_internal_exceptions, logger
from sentry_sdk.integrations import Integration
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
_SINGLE_KEY_COMMANDS = frozenset(
["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"]
)
_MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
def _patch_rediscluster():
# type: () -> None
try:
import rediscluster # type: ignore
except ImportError:
return
patch_redis_client(rediscluster.RedisCluster)
# up to v1.3.6, __version__ attribute is a tuple
# from v2.0.0, __version__ is a string and VERSION a tuple
version = getattr(rediscluster, "VERSION", rediscluster.__version__)
# StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
# https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
if (0, 2, 0) < version < (2, 0, 0):
patch_redis_client(rediscluster.StrictRedisCluster)
class RedisIntegration(Integration):
identifier = "redis"
@staticmethod
def setup_once():
# type: () -> None
import redis
patch_redis_client(redis.StrictRedis)
try:
import rb.clients # type: ignore
except ImportError:
pass
else:
patch_redis_client(rb.clients.FanoutClient)
patch_redis_client(rb.clients.MappingClient)
patch_redis_client(rb.clients.RoutingClient)
try:
_patch_rediscluster()
except Exception:
logger.exception("Error occurred while patching `rediscluster` library")
def patch_redis_client(cls):
# type: (Any) -> None
"""
This function can be used to instrument custom redis client classes or
subclasses.
"""
old_execute_command = cls.execute_command
def sentry_patched_execute_command(self, name, *args, **kwargs):
# type: (Any, str, *Any, **Any) -> Any
hub = Hub.current
if hub.get_integration(RedisIntegration) is None:
return old_execute_command(self, name, *args, **kwargs)
description = name
with capture_internal_exceptions():
description_parts = [name]
for i, arg in enumerate(args):
if i > 10:
break
description_parts.append(repr(arg))
description = " ".join(description_parts)
with hub.start_span(op="redis", description=description) as span:
if name:
span.set_tag("redis.command", name)
if name and args:
name_low = name.lower()
if (name_low in _SINGLE_KEY_COMMANDS) or (
name_low in _MULTI_KEY_COMMANDS and len(args) == 1
):
span.set_tag("redis.key", args[0])
return old_execute_command(self, name, *args, **kwargs)
cls.execute_command = sentry_patched_execute_command
sentry-python-1.4.3/sentry_sdk/integrations/rq.py 0000664 0000000 0000000 00000011532 14125057761 0022263 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import weakref
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk.tracing import Transaction
from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
try:
from rq.queue import Queue
from rq.timeouts import JobTimeoutException
from rq.version import VERSION as RQ_VERSION
from rq.worker import Worker
except ImportError:
raise DidNotEnable("RQ not installed")
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any, Callable, Dict
from sentry_sdk._types import EventProcessor
from sentry_sdk.utils import ExcInfo
from rq.job import Job
class RqIntegration(Integration):
identifier = "rq"
@staticmethod
def setup_once():
# type: () -> None
try:
version = tuple(map(int, RQ_VERSION.split(".")[:3]))
except (ValueError, TypeError):
raise DidNotEnable("Unparsable RQ version: {}".format(RQ_VERSION))
if version < (0, 6):
raise DidNotEnable("RQ 0.6 or newer is required.")
old_perform_job = Worker.perform_job
def sentry_patched_perform_job(self, job, *args, **kwargs):
# type: (Any, Job, *Queue, **Any) -> bool
hub = Hub.current
integration = hub.get_integration(RqIntegration)
if integration is None:
return old_perform_job(self, job, *args, **kwargs)
client = hub.client
assert client is not None
with hub.push_scope() as scope:
scope.clear_breadcrumbs()
scope.add_event_processor(_make_event_processor(weakref.ref(job)))
transaction = Transaction.continue_from_headers(
job.meta.get("_sentry_trace_headers") or {},
op="rq.task",
name="unknown RQ task",
)
with capture_internal_exceptions():
transaction.name = job.func_name
with hub.start_transaction(
transaction, custom_sampling_context={"rq_job": job}
):
rv = old_perform_job(self, job, *args, **kwargs)
if self.is_horse:
# We're inside of a forked process and RQ is
# about to call `os._exit`. Make sure that our
# events get sent out.
client.flush()
return rv
Worker.perform_job = sentry_patched_perform_job
old_handle_exception = Worker.handle_exception
def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
# type: (Worker, Any, *Any, **Any) -> Any
if job.is_failed:
_capture_exception(exc_info) # type: ignore
return old_handle_exception(self, job, *exc_info, **kwargs)
Worker.handle_exception = sentry_patched_handle_exception
old_enqueue_job = Queue.enqueue_job
def sentry_patched_enqueue_job(self, job, **kwargs):
# type: (Queue, Any, **Any) -> Any
hub = Hub.current
if hub.get_integration(RqIntegration) is not None:
job.meta["_sentry_trace_headers"] = dict(
hub.iter_trace_propagation_headers()
)
return old_enqueue_job(self, job, **kwargs)
Queue.enqueue_job = sentry_patched_enqueue_job
ignore_logger("rq.worker")
def _make_event_processor(weak_job):
# type: (Callable[[], Job]) -> EventProcessor
def event_processor(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
job = weak_job()
if job is not None:
with capture_internal_exceptions():
extra = event.setdefault("extra", {})
extra["rq-job"] = {
"job_id": job.id,
"func": job.func_name,
"args": job.args,
"kwargs": job.kwargs,
"description": job.description,
}
if "exc_info" in hint:
with capture_internal_exceptions():
if issubclass(hint["exc_info"][0], JobTimeoutException):
event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name]
return event
return event_processor
def _capture_exception(exc_info, **kwargs):
# type: (ExcInfo, **Any) -> None
hub = Hub.current
if hub.get_integration(RqIntegration) is None:
return
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "rq", "handled": False},
)
hub.capture_event(event, hint=hint)
sentry-python-1.4.3/sentry_sdk/integrations/sanic.py 0000664 0000000 0000000 00000020707 14125057761 0022742 0 ustar 00root root 0000000 0000000 import sys
import weakref
from inspect import isawaitable
from sentry_sdk._compat import urlparse, reraise
from sentry_sdk.hub import Hub
from sentry_sdk.utils import (
capture_internal_exceptions,
event_from_exception,
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
)
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Callable
from typing import Optional
from typing import Union
from typing import Tuple
from typing import Dict
from sanic.request import Request, RequestParameters
from sentry_sdk._types import Event, EventProcessor, Hint
try:
from sanic import Sanic, __version__ as SANIC_VERSION
from sanic.exceptions import SanicException
from sanic.router import Router
from sanic.handlers import ErrorHandler
except ImportError:
raise DidNotEnable("Sanic not installed")
class SanicIntegration(Integration):
identifier = "sanic"
@staticmethod
def setup_once():
# type: () -> None
try:
version = tuple(map(int, SANIC_VERSION.split(".")))
except (TypeError, ValueError):
raise DidNotEnable("Unparsable Sanic version: {}".format(SANIC_VERSION))
if version < (0, 8):
raise DidNotEnable("Sanic 0.8 or newer required.")
if not HAS_REAL_CONTEXTVARS:
# We better have contextvars or we're going to leak state between
# requests.
raise DidNotEnable(
"The sanic integration for Sentry requires Python 3.7+ "
" or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
)
if SANIC_VERSION.startswith("0.8."):
# Sanic 0.8 and older creates a logger named "root" and puts a
# stringified version of every exception in there (without exc_info),
# which our error deduplication can't detect.
#
# We explicitly check the version here because it is a very
# invasive step to ignore this logger and not necessary in newer
# versions at all.
#
# https://github.com/huge-success/sanic/issues/1332
ignore_logger("root")
old_handle_request = Sanic.handle_request
async def sentry_handle_request(self, request, *args, **kwargs):
# type: (Any, Request, *Any, **Any) -> Any
hub = Hub.current
if hub.get_integration(SanicIntegration) is None:
return old_handle_request(self, request, *args, **kwargs)
weak_request = weakref.ref(request)
with Hub(hub) as hub:
with hub.configure_scope() as scope:
scope.clear_breadcrumbs()
scope.add_event_processor(_make_request_processor(weak_request))
response = old_handle_request(self, request, *args, **kwargs)
if isawaitable(response):
response = await response
return response
Sanic.handle_request = sentry_handle_request
old_router_get = Router.get
def sentry_router_get(self, *args):
# type: (Any, Union[Any, Request]) -> Any
rv = old_router_get(self, *args)
hub = Hub.current
if hub.get_integration(SanicIntegration) is not None:
with capture_internal_exceptions():
with hub.configure_scope() as scope:
if version >= (21, 3):
# Sanic versions above and including 21.3 append the app name to the
# route name, and so we need to remove it from Route name so the
# transaction name is consistent across all versions
sanic_app_name = self.ctx.app.name
sanic_route = rv[0].name
if sanic_route.startswith("%s." % sanic_app_name):
# We add a 1 to the len of the sanic_app_name because there is a dot
# that joins app name and the route name
# Format: app_name.route_name
sanic_route = sanic_route[len(sanic_app_name) + 1 :]
scope.transaction = sanic_route
else:
scope.transaction = rv[0].__name__
return rv
Router.get = sentry_router_get
old_error_handler_lookup = ErrorHandler.lookup
def sentry_error_handler_lookup(self, exception):
# type: (Any, Exception) -> Optional[object]
_capture_exception(exception)
old_error_handler = old_error_handler_lookup(self, exception)
if old_error_handler is None:
return None
if Hub.current.get_integration(SanicIntegration) is None:
return old_error_handler
async def sentry_wrapped_error_handler(request, exception):
# type: (Request, Exception) -> Any
try:
response = old_error_handler(request, exception)
if isawaitable(response):
response = await response
return response
except Exception:
# Report errors that occur in Sanic error handler. These
# exceptions will not even show up in Sanic's
# `sanic.exceptions` logger.
exc_info = sys.exc_info()
_capture_exception(exc_info)
reraise(*exc_info)
return sentry_wrapped_error_handler
ErrorHandler.lookup = sentry_error_handler_lookup
def _capture_exception(exception):
# type: (Union[Tuple[Optional[type], Optional[BaseException], Any], BaseException]) -> None
hub = Hub.current
integration = hub.get_integration(SanicIntegration)
if integration is None:
return
# If an integration is there, a client has to be there.
client = hub.client # type: Any
with capture_internal_exceptions():
event, hint = event_from_exception(
exception,
client_options=client.options,
mechanism={"type": "sanic", "handled": False},
)
hub.capture_event(event, hint=hint)
def _make_request_processor(weak_request):
# type: (Callable[[], Request]) -> EventProcessor
def sanic_processor(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
try:
if hint and issubclass(hint["exc_info"][0], SanicException):
return None
except KeyError:
pass
request = weak_request()
if request is None:
return event
with capture_internal_exceptions():
extractor = SanicRequestExtractor(request)
extractor.extract_into_event(event)
request_info = event["request"]
urlparts = urlparse.urlsplit(request.url)
request_info["url"] = "%s://%s%s" % (
urlparts.scheme,
urlparts.netloc,
urlparts.path,
)
request_info["query_string"] = urlparts.query
request_info["method"] = request.method
request_info["env"] = {"REMOTE_ADDR": request.remote_addr}
request_info["headers"] = _filter_headers(dict(request.headers))
return event
return sanic_processor
class SanicRequestExtractor(RequestExtractor):
def content_length(self):
# type: () -> int
if self.request.body is None:
return 0
return len(self.request.body)
def cookies(self):
# type: () -> Dict[str, str]
return dict(self.request.cookies)
def raw_data(self):
# type: () -> bytes
return self.request.body
def form(self):
# type: () -> RequestParameters
return self.request.form
def is_json(self):
# type: () -> bool
raise NotImplementedError()
def json(self):
# type: () -> Optional[Any]
return self.request.json
def files(self):
# type: () -> RequestParameters
return self.request.files
def size_of_file(self, file):
# type: (Any) -> int
return len(file.body or ())
sentry-python-1.4.3/sentry_sdk/integrations/serverless.py 0000664 0000000 0000000 00000003663 14125057761 0024044 0 ustar 00root root 0000000 0000000 import sys
from sentry_sdk.hub import Hub
from sentry_sdk.utils import event_from_exception
from sentry_sdk._compat import reraise
from sentry_sdk._functools import wraps
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Callable
from typing import TypeVar
from typing import Union
from typing import Optional
from typing import overload
F = TypeVar("F", bound=Callable[..., Any])
else:
def overload(x):
# type: (F) -> F
return x
@overload
def serverless_function(f, flush=True): # noqa: F811
# type: (F, bool) -> F
pass
@overload
def serverless_function(f=None, flush=True): # noqa: F811
# type: (None, bool) -> Callable[[F], F]
pass
def serverless_function(f=None, flush=True): # noqa
# type: (Optional[F], bool) -> Union[F, Callable[[F], F]]
def wrapper(f):
# type: (F) -> F
@wraps(f)
def inner(*args, **kwargs):
# type: (*Any, **Any) -> Any
with Hub(Hub.current) as hub:
with hub.configure_scope() as scope:
scope.clear_breadcrumbs()
try:
return f(*args, **kwargs)
except Exception:
_capture_and_reraise()
finally:
if flush:
_flush_client()
return inner # type: ignore
if f is None:
return wrapper
else:
return wrapper(f)
def _capture_and_reraise():
# type: () -> None
exc_info = sys.exc_info()
hub = Hub.current
if hub.client is not None:
event, hint = event_from_exception(
exc_info,
client_options=hub.client.options,
mechanism={"type": "serverless", "handled": False},
)
hub.capture_event(event, hint=hint)
reraise(*exc_info)
def _flush_client():
# type: () -> None
return Hub.current.flush()
sentry-python-1.4.3/sentry_sdk/integrations/spark/ 0000775 0000000 0000000 00000000000 14125057761 0022405 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/sentry_sdk/integrations/spark/__init__.py 0000664 0000000 0000000 00000000320 14125057761 0024511 0 ustar 00root root 0000000 0000000 from sentry_sdk.integrations.spark.spark_driver import SparkIntegration
from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
__all__ = ["SparkIntegration", "SparkWorkerIntegration"]
sentry-python-1.4.3/sentry_sdk/integrations/spark/spark_driver.py 0000664 0000000 0000000 00000020421 14125057761 0025451 0 ustar 00root root 0000000 0000000 from sentry_sdk import configure_scope
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration
from sentry_sdk.utils import capture_internal_exceptions
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Optional
from sentry_sdk._types import Event, Hint
class SparkIntegration(Integration):
identifier = "spark"
@staticmethod
def setup_once():
# type: () -> None
patch_spark_context_init()
def _set_app_properties():
# type: () -> None
"""
Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties.
This allows worker integration to have access to app_name and application_id.
"""
from pyspark import SparkContext
spark_context = SparkContext._active_spark_context
if spark_context:
spark_context.setLocalProperty("sentry_app_name", spark_context.appName)
spark_context.setLocalProperty(
"sentry_application_id", spark_context.applicationId
)
def _start_sentry_listener(sc):
# type: (Any) -> None
"""
Start java gateway server to add custom `SparkListener`
"""
from pyspark.java_gateway import ensure_callback_server_started
gw = sc._gateway
ensure_callback_server_started(gw)
listener = SentryListener()
sc._jsc.sc().addSparkListener(listener)
def patch_spark_context_init():
# type: () -> None
from pyspark import SparkContext
spark_context_init = SparkContext._do_init
def _sentry_patched_spark_context_init(self, *args, **kwargs):
# type: (SparkContext, *Any, **Any) -> Optional[Any]
init = spark_context_init(self, *args, **kwargs)
if Hub.current.get_integration(SparkIntegration) is None:
return init
_start_sentry_listener(self)
_set_app_properties()
with configure_scope() as scope:
@scope.add_event_processor
def process_event(event, hint):
# type: (Event, Hint) -> Optional[Event]
with capture_internal_exceptions():
if Hub.current.get_integration(SparkIntegration) is None:
return event
event.setdefault("user", {}).setdefault("id", self.sparkUser())
event.setdefault("tags", {}).setdefault(
"executor.id", self._conf.get("spark.executor.id")
)
event["tags"].setdefault(
"spark-submit.deployMode",
self._conf.get("spark.submit.deployMode"),
)
event["tags"].setdefault(
"driver.host", self._conf.get("spark.driver.host")
)
event["tags"].setdefault(
"driver.port", self._conf.get("spark.driver.port")
)
event["tags"].setdefault("spark_version", self.version)
event["tags"].setdefault("app_name", self.appName)
event["tags"].setdefault("application_id", self.applicationId)
event["tags"].setdefault("master", self.master)
event["tags"].setdefault("spark_home", self.sparkHome)
event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl)
return event
return init
SparkContext._do_init = _sentry_patched_spark_context_init
class SparkListener(object):
def onApplicationEnd(self, applicationEnd): # noqa: N802,N803
# type: (Any) -> None
pass
def onApplicationStart(self, applicationStart): # noqa: N802,N803
# type: (Any) -> None
pass
def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803
# type: (Any) -> None
pass
def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803
# type: (Any) -> None
pass
def onBlockUpdated(self, blockUpdated): # noqa: N802,N803
# type: (Any) -> None
pass
def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803
# type: (Any) -> None
pass
def onExecutorAdded(self, executorAdded): # noqa: N802,N803
# type: (Any) -> None
pass
def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803
# type: (Any) -> None
pass
def onExecutorBlacklistedForStage( # noqa: N802
self, executorBlacklistedForStage # noqa: N803
):
# type: (Any) -> None
pass
def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803
# type: (Any) -> None
pass
def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803
# type: (Any) -> None
pass
def onJobEnd(self, jobEnd): # noqa: N802,N803
# type: (Any) -> None
pass
def onJobStart(self, jobStart): # noqa: N802,N803
# type: (Any) -> None
pass
def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803
# type: (Any) -> None
pass
def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803
# type: (Any) -> None
pass
def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803
# type: (Any) -> None
pass
def onOtherEvent(self, event): # noqa: N802,N803
# type: (Any) -> None
pass
def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803
# type: (Any) -> None
pass
def onStageCompleted(self, stageCompleted): # noqa: N802,N803
# type: (Any) -> None
pass
def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
# type: (Any) -> None
pass
def onTaskEnd(self, taskEnd): # noqa: N802,N803
# type: (Any) -> None
pass
def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803
# type: (Any) -> None
pass
def onTaskStart(self, taskStart): # noqa: N802,N803
# type: (Any) -> None
pass
def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803
# type: (Any) -> None
pass
class Java:
implements = ["org.apache.spark.scheduler.SparkListenerInterface"]
class SentryListener(SparkListener):
def __init__(self):
# type: () -> None
self.hub = Hub.current
def onJobStart(self, jobStart): # noqa: N802,N803
# type: (Any) -> None
message = "Job {} Started".format(jobStart.jobId())
self.hub.add_breadcrumb(level="info", message=message)
_set_app_properties()
def onJobEnd(self, jobEnd): # noqa: N802,N803
# type: (Any) -> None
level = ""
message = ""
data = {"result": jobEnd.jobResult().toString()}
if jobEnd.jobResult().toString() == "JobSucceeded":
level = "info"
message = "Job {} Ended".format(jobEnd.jobId())
else:
level = "warning"
message = "Job {} Failed".format(jobEnd.jobId())
self.hub.add_breadcrumb(level=level, message=message, data=data)
def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
# type: (Any) -> None
stage_info = stageSubmitted.stageInfo()
message = "Stage {} Submitted".format(stage_info.stageId())
data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
self.hub.add_breadcrumb(level="info", message=message, data=data)
_set_app_properties()
def onStageCompleted(self, stageCompleted): # noqa: N802,N803
# type: (Any) -> None
from py4j.protocol import Py4JJavaError # type: ignore
stage_info = stageCompleted.stageInfo()
message = ""
level = ""
data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
# Have to Try Except because stageInfo.failureReason() is typed with Scala Option
try:
data["reason"] = stage_info.failureReason().get()
message = "Stage {} Failed".format(stage_info.stageId())
level = "warning"
except Py4JJavaError:
message = "Stage {} Completed".format(stage_info.stageId())
level = "info"
self.hub.add_breadcrumb(level=level, message=message, data=data)
sentry-python-1.4.3/sentry_sdk/integrations/spark/spark_worker.py 0000664 0000000 0000000 00000007614 14125057761 0025500 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import sys
from sentry_sdk import configure_scope
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration
from sentry_sdk.utils import (
capture_internal_exceptions,
exc_info_from_error,
single_exception_from_error_tuple,
walk_exception_chain,
event_hint_with_exc_info,
)
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Optional
from sentry_sdk._types import ExcInfo, Event, Hint
class SparkWorkerIntegration(Integration):
identifier = "spark_worker"
@staticmethod
def setup_once():
# type: () -> None
import pyspark.daemon as original_daemon
original_daemon.worker_main = _sentry_worker_main
def _capture_exception(exc_info, hub):
# type: (ExcInfo, Hub) -> None
client = hub.client
client_options = client.options # type: ignore
mechanism = {"type": "spark", "handled": False}
exc_info = exc_info_from_error(exc_info)
exc_type, exc_value, tb = exc_info
rv = []
# On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors
for exc_type, exc_value, tb in walk_exception_chain(exc_info):
if exc_type not in (SystemExit, EOFError, ConnectionResetError):
rv.append(
single_exception_from_error_tuple(
exc_type, exc_value, tb, client_options, mechanism
)
)
if rv:
rv.reverse()
hint = event_hint_with_exc_info(exc_info)
event = {"level": "error", "exception": {"values": rv}}
_tag_task_context()
hub.capture_event(event, hint=hint)
def _tag_task_context():
# type: () -> None
from pyspark.taskcontext import TaskContext
with configure_scope() as scope:
@scope.add_event_processor
def process_event(event, hint):
# type: (Event, Hint) -> Optional[Event]
with capture_internal_exceptions():
integration = Hub.current.get_integration(SparkWorkerIntegration)
task_context = TaskContext.get()
if integration is None or task_context is None:
return event
event.setdefault("tags", {}).setdefault(
"stageId", str(task_context.stageId())
)
event["tags"].setdefault("partitionId", str(task_context.partitionId()))
event["tags"].setdefault(
"attemptNumber", str(task_context.attemptNumber())
)
event["tags"].setdefault(
"taskAttemptId", str(task_context.taskAttemptId())
)
if task_context._localProperties:
if "sentry_app_name" in task_context._localProperties:
event["tags"].setdefault(
"app_name", task_context._localProperties["sentry_app_name"]
)
event["tags"].setdefault(
"application_id",
task_context._localProperties["sentry_application_id"],
)
if "callSite.short" in task_context._localProperties:
event.setdefault("extra", {}).setdefault(
"callSite", task_context._localProperties["callSite.short"]
)
return event
def _sentry_worker_main(*args, **kwargs):
# type: (*Optional[Any], **Optional[Any]) -> None
import pyspark.worker as original_worker
try:
original_worker.main(*args, **kwargs)
except SystemExit:
if Hub.current.get_integration(SparkWorkerIntegration) is not None:
hub = Hub.current
exc_info = sys.exc_info()
with capture_internal_exceptions():
_capture_exception(exc_info, hub)
sentry-python-1.4.3/sentry_sdk/integrations/sqlalchemy.py 0000664 0000000 0000000 00000005601 14125057761 0024003 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from sentry_sdk._types import MYPY
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.tracing_utils import record_sql_queries
try:
from sqlalchemy.engine import Engine # type: ignore
from sqlalchemy.event import listen # type: ignore
from sqlalchemy import __version__ as SQLALCHEMY_VERSION # type: ignore
except ImportError:
raise DidNotEnable("SQLAlchemy not installed.")
if MYPY:
from typing import Any
from typing import ContextManager
from typing import Optional
from sentry_sdk.tracing import Span
class SqlalchemyIntegration(Integration):
identifier = "sqlalchemy"
@staticmethod
def setup_once():
# type: () -> None
try:
version = tuple(map(int, SQLALCHEMY_VERSION.split("b")[0].split(".")))
except (TypeError, ValueError):
raise DidNotEnable(
"Unparsable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION)
)
if version < (1, 2):
raise DidNotEnable("SQLAlchemy 1.2 or newer required.")
listen(Engine, "before_cursor_execute", _before_cursor_execute)
listen(Engine, "after_cursor_execute", _after_cursor_execute)
listen(Engine, "handle_error", _handle_error)
def _before_cursor_execute(
conn, cursor, statement, parameters, context, executemany, *args
):
# type: (Any, Any, Any, Any, Any, bool, *Any) -> None
hub = Hub.current
if hub.get_integration(SqlalchemyIntegration) is None:
return
ctx_mgr = record_sql_queries(
hub,
cursor,
statement,
parameters,
paramstyle=context and context.dialect and context.dialect.paramstyle or None,
executemany=executemany,
)
conn._sentry_sql_span_manager = ctx_mgr
span = ctx_mgr.__enter__()
if span is not None:
conn._sentry_sql_span = span
def _after_cursor_execute(conn, cursor, statement, *args):
# type: (Any, Any, Any, *Any) -> None
ctx_mgr = getattr(
conn, "_sentry_sql_span_manager", None
) # type: ContextManager[Any]
if ctx_mgr is not None:
conn._sentry_sql_span_manager = None
ctx_mgr.__exit__(None, None, None)
def _handle_error(context, *args):
# type: (Any, *Any) -> None
conn = context.connection
span = getattr(conn, "_sentry_sql_span", None) # type: Optional[Span]
if span is not None:
span.set_status("internal_error")
# _after_cursor_execute does not get called for crashing SQL stmts. Judging
# from SQLAlchemy codebase it does seem like any error coming into this
# handler is going to be fatal.
ctx_mgr = getattr(
conn, "_sentry_sql_span_manager", None
) # type: ContextManager[Any]
if ctx_mgr is not None:
conn._sentry_sql_span_manager = None
ctx_mgr.__exit__(None, None, None)
sentry-python-1.4.3/sentry_sdk/integrations/stdlib.py 0000664 0000000 0000000 00000016744 14125057761 0023134 0 ustar 00root root 0000000 0000000 import os
import subprocess
import sys
import platform
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.tracing_utils import EnvironHeaders
from sentry_sdk.utils import capture_internal_exceptions, logger, safe_repr
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from typing import List
from sentry_sdk._types import Event, Hint
try:
from httplib import HTTPConnection # type: ignore
except ImportError:
from http.client import HTTPConnection
_RUNTIME_CONTEXT = {
"name": platform.python_implementation(),
"version": "%s.%s.%s" % (sys.version_info[:3]),
"build": sys.version,
}
class StdlibIntegration(Integration):
identifier = "stdlib"
@staticmethod
def setup_once():
# type: () -> None
_install_httplib()
_install_subprocess()
@add_global_event_processor
def add_python_runtime_context(event, hint):
# type: (Event, Hint) -> Optional[Event]
if Hub.current.get_integration(StdlibIntegration) is not None:
contexts = event.setdefault("contexts", {})
if isinstance(contexts, dict) and "runtime" not in contexts:
contexts["runtime"] = _RUNTIME_CONTEXT
return event
def _install_httplib():
# type: () -> None
real_putrequest = HTTPConnection.putrequest
real_getresponse = HTTPConnection.getresponse
def putrequest(self, method, url, *args, **kwargs):
# type: (HTTPConnection, str, str, *Any, **Any) -> Any
hub = Hub.current
if hub.get_integration(StdlibIntegration) is None:
return real_putrequest(self, method, url, *args, **kwargs)
host = self.host
port = self.port
default_port = self.default_port
real_url = url
if not real_url.startswith(("http://", "https://")):
real_url = "%s://%s%s%s" % (
default_port == 443 and "https" or "http",
host,
port != default_port and ":%s" % port or "",
url,
)
span = hub.start_span(op="http", description="%s %s" % (method, real_url))
span.set_data("method", method)
span.set_data("url", real_url)
rv = real_putrequest(self, method, url, *args, **kwargs)
for key, value in hub.iter_trace_propagation_headers(span):
logger.debug(
"[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
key=key, value=value, real_url=real_url
)
)
self.putheader(key, value)
self._sentrysdk_span = span
return rv
def getresponse(self, *args, **kwargs):
# type: (HTTPConnection, *Any, **Any) -> Any
span = getattr(self, "_sentrysdk_span", None)
if span is None:
return real_getresponse(self, *args, **kwargs)
rv = real_getresponse(self, *args, **kwargs)
span.set_data("status_code", rv.status)
span.set_http_status(int(rv.status))
span.set_data("reason", rv.reason)
span.finish()
return rv
HTTPConnection.putrequest = putrequest
HTTPConnection.getresponse = getresponse
def _init_argument(args, kwargs, name, position, setdefault_callback=None):
# type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any
"""
given (*args, **kwargs) of a function call, retrieve (and optionally set a
default for) an argument by either name or position.
This is useful for wrapping functions with complex type signatures and
extracting a few arguments without needing to redefine that function's
entire type signature.
"""
if name in kwargs:
rv = kwargs[name]
if setdefault_callback is not None:
rv = setdefault_callback(rv)
if rv is not None:
kwargs[name] = rv
elif position < len(args):
rv = args[position]
if setdefault_callback is not None:
rv = setdefault_callback(rv)
if rv is not None:
args[position] = rv
else:
rv = setdefault_callback and setdefault_callback(None)
if rv is not None:
kwargs[name] = rv
return rv
def _install_subprocess():
# type: () -> None
old_popen_init = subprocess.Popen.__init__
def sentry_patched_popen_init(self, *a, **kw):
# type: (subprocess.Popen[Any], *Any, **Any) -> None
hub = Hub.current
if hub.get_integration(StdlibIntegration) is None:
return old_popen_init(self, *a, **kw) # type: ignore
# Convert from tuple to list to be able to set values.
a = list(a)
args = _init_argument(a, kw, "args", 0) or []
cwd = _init_argument(a, kw, "cwd", 9)
# if args is not a list or tuple (and e.g. some iterator instead),
# let's not use it at all. There are too many things that can go wrong
# when trying to collect an iterator into a list and setting that list
# into `a` again.
#
# Also invocations where `args` is not a sequence are not actually
# legal. They just happen to work under CPython.
description = None
if isinstance(args, (list, tuple)) and len(args) < 100:
with capture_internal_exceptions():
description = " ".join(map(str, args))
if description is None:
description = safe_repr(args)
env = None
with hub.start_span(op="subprocess", description=description) as span:
for k, v in hub.iter_trace_propagation_headers(span):
if env is None:
env = _init_argument(
a, kw, "env", 10, lambda x: dict(x or os.environ)
)
env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
if cwd:
span.set_data("subprocess.cwd", cwd)
rv = old_popen_init(self, *a, **kw) # type: ignore
span.set_tag("subprocess.pid", self.pid)
return rv
subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore
old_popen_wait = subprocess.Popen.wait
def sentry_patched_popen_wait(self, *a, **kw):
# type: (subprocess.Popen[Any], *Any, **Any) -> Any
hub = Hub.current
if hub.get_integration(StdlibIntegration) is None:
return old_popen_wait(self, *a, **kw)
with hub.start_span(op="subprocess.wait") as span:
span.set_tag("subprocess.pid", self.pid)
return old_popen_wait(self, *a, **kw)
subprocess.Popen.wait = sentry_patched_popen_wait # type: ignore
old_popen_communicate = subprocess.Popen.communicate
def sentry_patched_popen_communicate(self, *a, **kw):
# type: (subprocess.Popen[Any], *Any, **Any) -> Any
hub = Hub.current
if hub.get_integration(StdlibIntegration) is None:
return old_popen_communicate(self, *a, **kw)
with hub.start_span(op="subprocess.communicate") as span:
span.set_tag("subprocess.pid", self.pid)
return old_popen_communicate(self, *a, **kw)
subprocess.Popen.communicate = sentry_patched_popen_communicate # type: ignore
def get_subprocess_traceparent_headers():
# type: () -> EnvironHeaders
return EnvironHeaders(os.environ, prefix="SUBPROCESS_")
sentry-python-1.4.3/sentry_sdk/integrations/threading.py 0000664 0000000 0000000 00000005450 14125057761 0023610 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import sys
from threading import Thread, current_thread
from sentry_sdk import Hub
from sentry_sdk._compat import reraise
from sentry_sdk._types import MYPY
from sentry_sdk.integrations import Integration
from sentry_sdk.utils import event_from_exception, capture_internal_exceptions
if MYPY:
from typing import Any
from typing import TypeVar
from typing import Callable
from typing import Optional
from sentry_sdk._types import ExcInfo
F = TypeVar("F", bound=Callable[..., Any])
class ThreadingIntegration(Integration):
identifier = "threading"
def __init__(self, propagate_hub=False):
# type: (bool) -> None
self.propagate_hub = propagate_hub
@staticmethod
def setup_once():
# type: () -> None
old_start = Thread.start
def sentry_start(self, *a, **kw):
# type: (Thread, *Any, **Any) -> Any
hub = Hub.current
integration = hub.get_integration(ThreadingIntegration)
if integration is not None:
if not integration.propagate_hub:
hub_ = None
else:
hub_ = Hub(hub)
# Patching instance methods in `start()` creates a reference cycle if
# done in a naive way. See
# https://github.com/getsentry/sentry-python/pull/434
#
# In threading module, using current_thread API will access current thread instance
# without holding it to avoid a reference cycle in an easier way.
with capture_internal_exceptions():
new_run = _wrap_run(hub_, getattr(self.run, "__func__", self.run))
self.run = new_run # type: ignore
return old_start(self, *a, **kw) # type: ignore
Thread.start = sentry_start # type: ignore
def _wrap_run(parent_hub, old_run_func):
# type: (Optional[Hub], F) -> F
def run(*a, **kw):
# type: (*Any, **Any) -> Any
hub = parent_hub or Hub.current
with hub:
try:
self = current_thread()
return old_run_func(self, *a, **kw)
except Exception:
reraise(*_capture_exception())
return run # type: ignore
def _capture_exception():
# type: () -> ExcInfo
hub = Hub.current
exc_info = sys.exc_info()
if hub.get_integration(ThreadingIntegration) is not None:
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "threading", "handled": False},
)
hub.capture_event(event, hint=hint)
return exc_info
sentry-python-1.4.3/sentry_sdk/integrations/tornado.py 0000664 0000000 0000000 00000016022 14125057761 0023306 0 ustar 00root root 0000000 0000000 import weakref
import contextlib
from inspect import iscoroutinefunction
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.tracing import Transaction
from sentry_sdk.utils import (
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
event_from_exception,
capture_internal_exceptions,
transaction_from_function,
)
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations._wsgi_common import (
RequestExtractor,
_filter_headers,
_is_json_content_type,
)
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk._compat import iteritems
try:
from tornado import version_info as TORNADO_VERSION # type: ignore
from tornado.web import RequestHandler, HTTPError
from tornado.gen import coroutine
except ImportError:
raise DidNotEnable("Tornado not installed")
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Optional
from typing import Dict
from typing import Callable
from typing import Generator
from sentry_sdk._types import EventProcessor
class TornadoIntegration(Integration):
identifier = "tornado"
@staticmethod
def setup_once():
# type: () -> None
if TORNADO_VERSION < (5, 0):
raise DidNotEnable("Tornado 5+ required")
if not HAS_REAL_CONTEXTVARS:
# Tornado is async. We better have contextvars or we're going to leak
# state between requests.
raise DidNotEnable(
"The tornado integration for Sentry requires Python 3.7+ or the aiocontextvars package"
+ CONTEXTVARS_ERROR_MESSAGE
)
ignore_logger("tornado.access")
old_execute = RequestHandler._execute # type: ignore
awaitable = iscoroutinefunction(old_execute)
if awaitable:
# Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await)
# In that case our method should be a coroutine function too
async def sentry_execute_request_handler(self, *args, **kwargs):
# type: (RequestHandler, *Any, **Any) -> Any
with _handle_request_impl(self):
return await old_execute(self, *args, **kwargs)
else:
@coroutine # type: ignore
def sentry_execute_request_handler(self, *args, **kwargs): # type: ignore
# type: (RequestHandler, *Any, **Any) -> Any
with _handle_request_impl(self):
result = yield from old_execute(self, *args, **kwargs)
return result
RequestHandler._execute = sentry_execute_request_handler # type: ignore
old_log_exception = RequestHandler.log_exception
def sentry_log_exception(self, ty, value, tb, *args, **kwargs):
# type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any]
_capture_exception(ty, value, tb)
return old_log_exception(self, ty, value, tb, *args, **kwargs) # type: ignore
RequestHandler.log_exception = sentry_log_exception # type: ignore
@contextlib.contextmanager
def _handle_request_impl(self):
# type: (RequestHandler) -> Generator[None, None, None]
hub = Hub.current
integration = hub.get_integration(TornadoIntegration)
if integration is None:
yield
weak_handler = weakref.ref(self)
with Hub(hub) as hub:
with hub.configure_scope() as scope:
scope.clear_breadcrumbs()
processor = _make_event_processor(weak_handler) # type: ignore
scope.add_event_processor(processor)
transaction = Transaction.continue_from_headers(
self.request.headers,
op="http.server",
# Like with all other integrations, this is our
# fallback transaction in case there is no route.
# sentry_urldispatcher_resolve is responsible for
# setting a transaction name later.
name="generic Tornado request",
)
with hub.start_transaction(
transaction, custom_sampling_context={"tornado_request": self.request}
):
yield
def _capture_exception(ty, value, tb):
# type: (type, BaseException, Any) -> None
hub = Hub.current
if hub.get_integration(TornadoIntegration) is None:
return
if isinstance(value, HTTPError):
return
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
(ty, value, tb),
client_options=client.options,
mechanism={"type": "tornado", "handled": False},
)
hub.capture_event(event, hint=hint)
def _make_event_processor(weak_handler):
# type: (Callable[[], RequestHandler]) -> EventProcessor
def tornado_processor(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
handler = weak_handler()
if handler is None:
return event
request = handler.request
with capture_internal_exceptions():
method = getattr(handler, handler.request.method.lower()) # type: ignore
event["transaction"] = transaction_from_function(method)
with capture_internal_exceptions():
extractor = TornadoRequestExtractor(request)
extractor.extract_into_event(event)
request_info = event["request"]
request_info["url"] = "%s://%s%s" % (
request.protocol,
request.host,
request.path,
)
request_info["query_string"] = request.query
request_info["method"] = request.method
request_info["env"] = {"REMOTE_ADDR": request.remote_ip}
request_info["headers"] = _filter_headers(dict(request.headers))
with capture_internal_exceptions():
if handler.current_user and _should_send_default_pii():
event.setdefault("user", {}).setdefault("is_authenticated", True)
return event
return tornado_processor
class TornadoRequestExtractor(RequestExtractor):
def content_length(self):
# type: () -> int
if self.request.body is None:
return 0
return len(self.request.body)
def cookies(self):
# type: () -> Dict[str, str]
return {k: v.value for k, v in iteritems(self.request.cookies)}
def raw_data(self):
# type: () -> bytes
return self.request.body
def form(self):
# type: () -> Dict[str, Any]
return {
k: [v.decode("latin1", "replace") for v in vs]
for k, vs in iteritems(self.request.body_arguments)
}
def is_json(self):
# type: () -> bool
return _is_json_content_type(self.request.headers.get("content-type"))
def files(self):
# type: () -> Dict[str, Any]
return {k: v[0] for k, v in iteritems(self.request.files) if v}
def size_of_file(self, file):
# type: (Any) -> int
return len(file.body or ())
sentry-python-1.4.3/sentry_sdk/integrations/trytond.py 0000664 0000000 0000000 00000003300 14125057761 0023336 0 ustar 00root root 0000000 0000000 import sentry_sdk.hub
import sentry_sdk.utils
import sentry_sdk.integrations
import sentry_sdk.integrations.wsgi
from sentry_sdk._types import MYPY
from trytond.exceptions import TrytonException # type: ignore
from trytond.wsgi import app # type: ignore
if MYPY:
from typing import Any
# TODO: trytond-worker, trytond-cron and trytond-admin intergations
class TrytondWSGIIntegration(sentry_sdk.integrations.Integration):
identifier = "trytond_wsgi"
def __init__(self): # type: () -> None
pass
@staticmethod
def setup_once(): # type: () -> None
app.wsgi_app = sentry_sdk.integrations.wsgi.SentryWsgiMiddleware(app.wsgi_app)
def error_handler(e): # type: (Exception) -> None
hub = sentry_sdk.hub.Hub.current
if hub.get_integration(TrytondWSGIIntegration) is None:
return
elif isinstance(e, TrytonException):
return
else:
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = sentry_sdk.utils.event_from_exception(
e,
client_options=client.options,
mechanism={"type": "trytond", "handled": False},
)
hub.capture_event(event, hint=hint)
# Expected error handlers signature was changed
# when the error_handler decorator was introduced
# in Tryton-5.4
if hasattr(app, "error_handler"):
@app.error_handler
def _(app, request, e): # type: ignore
error_handler(e)
else:
app.error_handlers.append(error_handler)
sentry-python-1.4.3/sentry_sdk/integrations/wsgi.py 0000664 0000000 0000000 00000025615 14125057761 0022621 0 ustar 00root root 0000000 0000000 import sys
from sentry_sdk._functools import partial
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.utils import (
ContextVar,
capture_internal_exceptions,
event_from_exception,
)
from sentry_sdk._compat import PY2, reraise, iteritems
from sentry_sdk.tracing import Transaction
from sentry_sdk.sessions import auto_session_tracking
from sentry_sdk.integrations._wsgi_common import _filter_headers
from sentry_sdk._types import MYPY
if MYPY:
from typing import Callable
from typing import Dict
from typing import Iterator
from typing import Any
from typing import Tuple
from typing import Optional
from typing import TypeVar
from typing import Protocol
from sentry_sdk.utils import ExcInfo
from sentry_sdk._types import EventProcessor
WsgiResponseIter = TypeVar("WsgiResponseIter")
WsgiResponseHeaders = TypeVar("WsgiResponseHeaders")
WsgiExcInfo = TypeVar("WsgiExcInfo")
class StartResponse(Protocol):
def __call__(self, status, response_headers, exc_info=None):
# type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter
pass
_wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied")
if PY2:
def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
# type: (str, str, str) -> str
return s.decode(charset, errors)
else:
def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
# type: (str, str, str) -> str
return s.encode("latin1").decode(charset, errors)
def get_host(environ, use_x_forwarded_for=False):
# type: (Dict[str, str], bool) -> str
"""Return the host for the given WSGI environment. Yanked from Werkzeug."""
if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ:
rv = environ["HTTP_X_FORWARDED_HOST"]
if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
rv = rv[:-3]
elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
rv = rv[:-4]
elif environ.get("HTTP_HOST"):
rv = environ["HTTP_HOST"]
if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
rv = rv[:-3]
elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
rv = rv[:-4]
elif environ.get("SERVER_NAME"):
rv = environ["SERVER_NAME"]
if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
("https", "443"),
("http", "80"),
):
rv += ":" + environ["SERVER_PORT"]
else:
# In spite of the WSGI spec, SERVER_NAME might not be present.
rv = "unknown"
return rv
def get_request_url(environ, use_x_forwarded_for=False):
# type: (Dict[str, str], bool) -> str
"""Return the absolute URL without query string for the given WSGI
environment."""
return "%s://%s/%s" % (
environ.get("wsgi.url_scheme"),
get_host(environ, use_x_forwarded_for),
wsgi_decoding_dance(environ.get("PATH_INFO") or "").lstrip("/"),
)
class SentryWsgiMiddleware(object):
__slots__ = ("app", "use_x_forwarded_for")
def __init__(self, app, use_x_forwarded_for=False):
# type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool) -> None
self.app = app
self.use_x_forwarded_for = use_x_forwarded_for
def __call__(self, environ, start_response):
# type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse
if _wsgi_middleware_applied.get(False):
return self.app(environ, start_response)
_wsgi_middleware_applied.set(True)
try:
hub = Hub(Hub.current)
with auto_session_tracking(hub, session_mode="request"):
with hub:
with capture_internal_exceptions():
with hub.configure_scope() as scope:
scope.clear_breadcrumbs()
scope._name = "wsgi"
scope.add_event_processor(
_make_wsgi_event_processor(
environ, self.use_x_forwarded_for
)
)
transaction = Transaction.continue_from_environ(
environ, op="http.server", name="generic WSGI request"
)
with hub.start_transaction(
transaction, custom_sampling_context={"wsgi_environ": environ}
):
try:
rv = self.app(
environ,
partial(
_sentry_start_response, start_response, transaction
),
)
except BaseException:
reraise(*_capture_exception(hub))
finally:
_wsgi_middleware_applied.set(False)
return _ScopedResponse(hub, rv)
def _sentry_start_response(
old_start_response, # type: StartResponse
transaction, # type: Transaction
status, # type: str
response_headers, # type: WsgiResponseHeaders
exc_info=None, # type: Optional[WsgiExcInfo]
):
# type: (...) -> WsgiResponseIter
with capture_internal_exceptions():
status_int = int(status.split(" ", 1)[0])
transaction.set_http_status(status_int)
if exc_info is None:
# The Django Rest Framework WSGI test client, and likely other
# (incorrect) implementations, cannot deal with the exc_info argument
# if one is present. Avoid providing a third argument if not necessary.
return old_start_response(status, response_headers)
else:
return old_start_response(status, response_headers, exc_info)
def _get_environ(environ):
# type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
"""
Returns our explicitly included environment variables we want to
capture (server name, port and remote addr if pii is enabled).
"""
keys = ["SERVER_NAME", "SERVER_PORT"]
if _should_send_default_pii():
# make debugging of proxy setup easier. Proxy headers are
# in headers.
keys += ["REMOTE_ADDR"]
for key in keys:
if key in environ:
yield key, environ[key]
# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
#
# We need this function because Django does not give us a "pure" http header
# dict. So we might as well use it for all WSGI integrations.
def _get_headers(environ):
# type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
"""
Returns only proper HTTP headers.
"""
for key, value in iteritems(environ):
key = str(key)
if key.startswith("HTTP_") and key not in (
"HTTP_CONTENT_TYPE",
"HTTP_CONTENT_LENGTH",
):
yield key[5:].replace("_", "-").title(), value
elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
yield key.replace("_", "-").title(), value
def get_client_ip(environ):
# type: (Dict[str, str]) -> Optional[Any]
"""
Infer the user IP address from various headers. This cannot be used in
security sensitive situations since the value may be forged from a client,
but it's good enough for the event payload.
"""
try:
return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip()
except (KeyError, IndexError):
pass
try:
return environ["HTTP_X_REAL_IP"]
except KeyError:
pass
return environ.get("REMOTE_ADDR")
def _capture_exception(hub):
# type: (Hub) -> ExcInfo
exc_info = sys.exc_info()
# Check client here as it might have been unset while streaming response
if hub.client is not None:
e = exc_info[1]
# SystemExit(0) is the only uncaught exception that is expected behavior
should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None)
if not should_skip_capture:
event, hint = event_from_exception(
exc_info,
client_options=hub.client.options,
mechanism={"type": "wsgi", "handled": False},
)
hub.capture_event(event, hint=hint)
return exc_info
class _ScopedResponse(object):
__slots__ = ("_response", "_hub")
def __init__(self, hub, response):
# type: (Hub, Iterator[bytes]) -> None
self._hub = hub
self._response = response
def __iter__(self):
# type: () -> Iterator[bytes]
iterator = iter(self._response)
while True:
with self._hub:
try:
chunk = next(iterator)
except StopIteration:
break
except BaseException:
reraise(*_capture_exception(self._hub))
yield chunk
def close(self):
# type: () -> None
with self._hub:
try:
self._response.close() # type: ignore
except AttributeError:
pass
except BaseException:
reraise(*_capture_exception(self._hub))
def _make_wsgi_event_processor(environ, use_x_forwarded_for):
# type: (Dict[str, str], bool) -> EventProcessor
# It's a bit unfortunate that we have to extract and parse the request data
# from the environ so eagerly, but there are a few good reasons for this.
#
# We might be in a situation where the scope/hub never gets torn down
# properly. In that case we will have an unnecessary strong reference to
# all objects in the environ (some of which may take a lot of memory) when
# we're really just interested in a few of them.
#
# Keeping the environment around for longer than the request lifecycle is
# also not necessarily something uWSGI can deal with:
# https://github.com/unbit/uwsgi/issues/1950
client_ip = get_client_ip(environ)
request_url = get_request_url(environ, use_x_forwarded_for)
query_string = environ.get("QUERY_STRING")
method = environ.get("REQUEST_METHOD")
env = dict(_get_environ(environ))
headers = _filter_headers(dict(_get_headers(environ)))
def event_processor(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
with capture_internal_exceptions():
# if the code below fails halfway through we at least have some data
request_info = event.setdefault("request", {})
if _should_send_default_pii():
user_info = event.setdefault("user", {})
if client_ip:
user_info.setdefault("ip_address", client_ip)
request_info["url"] = request_url
request_info["query_string"] = query_string
request_info["method"] = method
request_info["env"] = env
request_info["headers"] = headers
return event
return event_processor
sentry-python-1.4.3/sentry_sdk/py.typed 0000664 0000000 0000000 00000000000 14125057761 0020244 0 ustar 00root root 0000000 0000000 sentry-python-1.4.3/sentry_sdk/scope.py 0000664 0000000 0000000 00000036556 14125057761 0020261 0 ustar 00root root 0000000 0000000 from copy import copy
from collections import deque
from itertools import chain
from sentry_sdk._functools import wraps
from sentry_sdk._types import MYPY
from sentry_sdk.utils import logger, capture_internal_exceptions
from sentry_sdk.tracing import Transaction
from sentry_sdk.attachments import Attachment
if MYPY:
from typing import Any
from typing import Dict
from typing import Optional
from typing import Deque
from typing import List
from typing import Callable
from typing import TypeVar
from sentry_sdk._types import (
Breadcrumb,
Event,
EventProcessor,
ErrorProcessor,
ExcInfo,
Hint,
Type,
)
from sentry_sdk.tracing import Span
from sentry_sdk.session import Session
F = TypeVar("F", bound=Callable[..., Any])
T = TypeVar("T")
global_event_processors = [] # type: List[EventProcessor]
def add_global_event_processor(processor):
# type: (EventProcessor) -> None
global_event_processors.append(processor)
def _attr_setter(fn):
# type: (Any) -> Any
return property(fset=fn, doc=fn.__doc__)
def _disable_capture(fn):
# type: (F) -> F
@wraps(fn)
def wrapper(self, *args, **kwargs):
# type: (Any, *Dict[str, Any], **Any) -> Any
if not self._should_capture:
return
try:
self._should_capture = False
return fn(self, *args, **kwargs)
finally:
self._should_capture = True
return wrapper # type: ignore
class Scope(object):
"""The scope holds extra information that should be sent with all
events that belong to it.
"""
# NOTE: Even though it should not happen, the scope needs to not crash when
# accessed by multiple threads. It's fine if it's full of races, but those
# races should never make the user application crash.
#
# The same needs to hold for any accesses of the scope the SDK makes.
__slots__ = (
"_level",
"_name",
"_fingerprint",
# note that for legacy reasons, _transaction is the transaction *name*,
# not a Transaction object (the object is stored in _span)
"_transaction",
"_user",
"_tags",
"_contexts",
"_extras",
"_breadcrumbs",
"_event_processors",
"_error_processors",
"_should_capture",
"_span",
"_session",
"_attachments",
"_force_auto_session_tracking",
)
def __init__(self):
# type: () -> None
self._event_processors = [] # type: List[EventProcessor]
self._error_processors = [] # type: List[ErrorProcessor]
self._name = None # type: Optional[str]
self.clear()
def clear(self):
# type: () -> None
"""Clears the entire scope."""
self._level = None # type: Optional[str]
self._fingerprint = None # type: Optional[List[str]]
self._transaction = None # type: Optional[str]
self._user = None # type: Optional[Dict[str, Any]]
self._tags = {} # type: Dict[str, Any]
self._contexts = {} # type: Dict[str, Dict[str, Any]]
self._extras = {} # type: Dict[str, Any]
self._attachments = [] # type: List[Attachment]
self.clear_breadcrumbs()
self._should_capture = True
self._span = None # type: Optional[Span]
self._session = None # type: Optional[Session]
self._force_auto_session_tracking = None # type: Optional[bool]
@_attr_setter
def level(self, value):
# type: (Optional[str]) -> None
"""When set this overrides the level. Deprecated in favor of set_level."""
self._level = value
def set_level(self, value):
# type: (Optional[str]) -> None
"""Sets the level for the scope."""
self._level = value
@_attr_setter
def fingerprint(self, value):
# type: (Optional[List[str]]) -> None
"""When set this overrides the default fingerprint."""
self._fingerprint = value
@property
def transaction(self):
# type: () -> Any
# would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004
"""Return the transaction (root span) in the scope, if any."""
# there is no span/transaction on the scope
if self._span is None:
return None
# there is an orphan span on the scope
if self._span.containing_transaction is None:
return None
# there is either a transaction (which is its own containing
# transaction) or a non-orphan span on the scope
return self._span.containing_transaction
@transaction.setter
def transaction(self, value):
# type: (Any) -> None
# would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004
"""When set this forces a specific transaction name to be set."""
# XXX: the docstring above is misleading. The implementation of
# apply_to_event prefers an existing value of event.transaction over
# anything set in the scope.
# XXX: note that with the introduction of the Scope.transaction getter,
# there is a semantic and type mismatch between getter and setter. The
# getter returns a Transaction, the setter sets a transaction name.
# Without breaking version compatibility, we could make the setter set a
# transaction name or transaction (self._span) depending on the type of
# the value argument.
self._transaction = value
span = self._span
if span and isinstance(span, Transaction):
span.name = value
@_attr_setter
def user(self, value):
# type: (Optional[Dict[str, Any]]) -> None
"""When set a specific user is bound to the scope. Deprecated in favor of set_user."""
self.set_user(value)
def set_user(self, value):
# type: (Optional[Dict[str, Any]]) -> None
"""Sets a user for the scope."""
self._user = value
if self._session is not None:
self._session.update(user=value)
@property
def span(self):
# type: () -> Optional[Span]
"""Get/set current tracing span or transaction."""
return self._span
@span.setter
def span(self, span):
# type: (Optional[Span]) -> None
self._span = span
# XXX: this differs from the implementation in JS, there Scope.setSpan
# does not set Scope._transactionName.
if isinstance(span, Transaction):
transaction = span
if transaction.name:
self._transaction = transaction.name
def set_tag(
self,
key, # type: str
value, # type: Any
):
# type: (...) -> None
"""Sets a tag for a key to a specific value."""
self._tags[key] = value
def remove_tag(
self, key # type: str
):
# type: (...) -> None
"""Removes a specific tag."""
self._tags.pop(key, None)
def set_context(
self,
key, # type: str
value, # type: Dict[str, Any]
):
# type: (...) -> None
"""Binds a context at a certain key to a specific value."""
self._contexts[key] = value
def remove_context(
self, key # type: str
):
# type: (...) -> None
"""Removes a context."""
self._contexts.pop(key, None)
def set_extra(
self,
key, # type: str
value, # type: Any
):
# type: (...) -> None
"""Sets an extra key to a specific value."""
self._extras[key] = value
def remove_extra(
self, key # type: str
):
# type: (...) -> None
"""Removes a specific extra key."""
self._extras.pop(key, None)
def clear_breadcrumbs(self):
# type: () -> None
"""Clears breadcrumb buffer."""
self._breadcrumbs = deque() # type: Deque[Breadcrumb]
def add_attachment(
self,
bytes=None, # type: Optional[bytes]
filename=None, # type: Optional[str]
path=None, # type: Optional[str]
content_type=None, # type: Optional[str]
add_to_transactions=False, # type: bool
):
# type: (...) -> None
"""Adds an attachment to future events sent."""
self._attachments.append(
Attachment(
bytes=bytes,
path=path,
filename=filename,
content_type=content_type,
add_to_transactions=add_to_transactions,
)
)
def add_event_processor(
self, func # type: EventProcessor
):
# type: (...) -> None
"""Register a scope local event processor on the scope.
:param func: This function behaves like `before_send.`
"""
if len(self._event_processors) > 20:
logger.warning(
"Too many event processors on scope! Clearing list to free up some memory: %r",
self._event_processors,
)
del self._event_processors[:]
self._event_processors.append(func)
def add_error_processor(
self,
func, # type: ErrorProcessor
cls=None, # type: Optional[Type[BaseException]]
):
# type: (...) -> None
"""Register a scope local error processor on the scope.
:param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument.
:param cls: Optionally, only process exceptions of this type.
"""
if cls is not None:
cls_ = cls # For mypy.
real_func = func
def func(event, exc_info):
# type: (Event, ExcInfo) -> Optional[Event]
try:
is_inst = isinstance(exc_info[1], cls_)
except Exception:
is_inst = False
if is_inst:
return real_func(event, exc_info)
return event
self._error_processors.append(func)
@_disable_capture
def apply_to_event(
self,
event, # type: Event
hint, # type: Hint
):
# type: (...) -> Optional[Event]
"""Applies the information contained on the scope to the given event."""
def _drop(event, cause, ty):
# type: (Dict[str, Any], Any, str) -> Optional[Any]
logger.info("%s (%s) dropped event (%s)", ty, cause, event)
return None
is_transaction = event.get("type") == "transaction"
# put all attachments into the hint. This lets callbacks play around
# with attachments. We also later pull this out of the hint when we
# create the envelope.
attachments_to_send = hint.get("attachments") or []
for attachment in self._attachments:
if not is_transaction or attachment.add_to_transactions:
attachments_to_send.append(attachment)
hint["attachments"] = attachments_to_send
if self._level is not None:
event["level"] = self._level
if not is_transaction:
event.setdefault("breadcrumbs", {}).setdefault("values", []).extend(
self._breadcrumbs
)
if event.get("user") is None and self._user is not None:
event["user"] = self._user
if event.get("transaction") is None and self._transaction is not None:
event["transaction"] = self._transaction
if event.get("fingerprint") is None and self._fingerprint is not None:
event["fingerprint"] = self._fingerprint
if self._extras:
event.setdefault("extra", {}).update(self._extras)
if self._tags:
event.setdefault("tags", {}).update(self._tags)
if self._contexts:
event.setdefault("contexts", {}).update(self._contexts)
if self._span is not None:
contexts = event.setdefault("contexts", {})
if not contexts.get("trace"):
contexts["trace"] = self._span.get_trace_context()
exc_info = hint.get("exc_info")
if exc_info is not None:
for error_processor in self._error_processors:
new_event = error_processor(event, exc_info)
if new_event is None:
return _drop(event, error_processor, "error processor")
event = new_event
for event_processor in chain(global_event_processors, self._event_processors):
new_event = event
with capture_internal_exceptions():
new_event = event_processor(event, hint)
if new_event is None:
return _drop(event, event_processor, "event processor")
event = new_event
return event
def update_from_scope(self, scope):
# type: (Scope) -> None
if scope._level is not None:
self._level = scope._level
if scope._fingerprint is not None:
self._fingerprint = scope._fingerprint
if scope._transaction is not None:
self._transaction = scope._transaction
if scope._user is not None:
self._user = scope._user
if scope._tags:
self._tags.update(scope._tags)
if scope._contexts:
self._contexts.update(scope._contexts)
if scope._extras:
self._extras.update(scope._extras)
if scope._breadcrumbs:
self._breadcrumbs.extend(scope._breadcrumbs)
if scope._span:
self._span = scope._span
if scope._attachments:
self._attachments.extend(scope._attachments)
def update_from_kwargs(
self,
user=None, # type: Optional[Any]
level=None, # type: Optional[str]
extras=None, # type: Optional[Dict[str, Any]]
contexts=None, # type: Optional[Dict[str, Any]]
tags=None, # type: Optional[Dict[str, str]]
fingerprint=None, # type: Optional[List[str]]
):
# type: (...) -> None
if level is not None:
self._level = level
if user is not None:
self._user = user
if extras is not None:
self._extras.update(extras)
if contexts is not None:
self._contexts.update(contexts)
if tags is not None:
self._tags.update(tags)
if fingerprint is not None:
self._fingerprint = fingerprint
def __copy__(self):
# type: () -> Scope
rv = object.__new__(self.__class__) # type: Scope
rv._level = self._level
rv._name = self._name
rv._fingerprint = self._fingerprint
rv._transaction = self._transaction
rv._user = self._user
rv._tags = dict(self._tags)
rv._contexts = dict(self._contexts)
rv._extras = dict(self._extras)
rv._breadcrumbs = copy(self._breadcrumbs)
rv._event_processors = list(self._event_processors)
rv._error_processors = list(self._error_processors)
rv._should_capture = self._should_capture
rv._span = self._span
rv._session = self._session
rv._force_auto_session_tracking = self._force_auto_session_tracking
rv._attachments = list(self._attachments)
return rv
def __repr__(self):
# type: () -> str
return "<%s id=%s name=%s>" % (
self.__class__.__name__,
hex(id(self)),
self._name,
)
sentry-python-1.4.3/sentry_sdk/serializer.py 0000664 0000000 0000000 00000037663 14125057761 0021321 0 ustar 00root root 0000000 0000000 import sys
import math
from datetime import datetime
from sentry_sdk.utils import (
AnnotatedValue,
capture_internal_exception,
disable_capture_event,
format_timestamp,
json_dumps,
safe_repr,
strip_string,
)
import sentry_sdk.utils
from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
from sentry_sdk._types import MYPY
if MYPY:
from datetime import timedelta
from types import TracebackType
from typing import Any
from typing import Callable
from typing import ContextManager
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from sentry_sdk._types import NotImplementedType, Event
Span = Dict[str, Any]
ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]]
Segment = Union[str, int]
if PY2:
# Importing ABCs from collections is deprecated, and will stop working in 3.8
# https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
from collections import Mapping, Sequence, Set
serializable_str_types = string_types
else:
# New in 3.3
# https://docs.python.org/3/library/collections.abc.html
from collections.abc import Mapping, Sequence, Set
# Bytes are technically not strings in Python 3, but we can serialize them
serializable_str_types = (str, bytes)
# Maximum length of JSON-serialized event payloads that can be safely sent
# before the server may reject the event due to its size. This is not intended
# to reflect actual values defined server-side, but rather only be an upper
# bound for events sent by the SDK.
#
# Can be overwritten if wanting to send more bytes, e.g. with a custom server.
# When changing this, keep in mind that events may be a little bit larger than
# this value due to attached metadata, so keep the number conservative.
MAX_EVENT_BYTES = 10 ** 6
MAX_DATABAG_DEPTH = 5
MAX_DATABAG_BREADTH = 10
CYCLE_MARKER = u""
global_repr_processors = [] # type: List[ReprProcessor]
def add_global_repr_processor(processor):
# type: (ReprProcessor) -> None
global_repr_processors.append(processor)
class Memo(object):
__slots__ = ("_ids", "_objs")
def __init__(self):
# type: () -> None
self._ids = {} # type: Dict[int, Any]
self._objs = [] # type: List[Any]
def memoize(self, obj):
# type: (Any) -> ContextManager[bool]
self._objs.append(obj)
return self
def __enter__(self):
# type: () -> bool
obj = self._objs[-1]
if id(obj) in self._ids:
return True
else:
self._ids[id(obj)] = obj
return False
def __exit__(
self,
ty, # type: Optional[Type[BaseException]]
value, # type: Optional[BaseException]
tb, # type: Optional[TracebackType]
):
# type: (...) -> None
self._ids.pop(id(self._objs.pop()), None)
def serialize(event, smart_transaction_trimming=False, **kwargs):
# type: (Event, bool, **Any) -> Event
memo = Memo()
path = [] # type: List[Segment]
meta_stack = [] # type: List[Dict[str, Any]]
span_description_bytes = [] # type: List[int]
def _annotate(**meta):
# type: (**Any) -> None
while len(meta_stack) <= len(path):
try:
segment = path[len(meta_stack) - 1]
node = meta_stack[-1].setdefault(text_type(segment), {})
except IndexError:
node = {}
meta_stack.append(node)
meta_stack[-1].setdefault("", {}).update(meta)
def _should_repr_strings():
# type: () -> Optional[bool]
"""
By default non-serializable objects are going through
safe_repr(). For certain places in the event (local vars) we
want to repr() even things that are JSON-serializable to
make their type more apparent. For example, it's useful to
see the difference between a unicode-string and a bytestring
when viewing a stacktrace.
For container-types we still don't do anything different.
Generally we just try to make the Sentry UI present exactly
what a pretty-printed repr would look like.
:returns: `True` if we are somewhere in frame variables, and `False` if
we are in a position where we will never encounter frame variables
when recursing (for example, we're in `event.extra`). `None` if we
are not (yet) in frame variables, but might encounter them when
recursing (e.g. we're in `event.exception`)
"""
try:
p0 = path[0]
if p0 == "stacktrace" and path[1] == "frames" and path[3] == "vars":
return True
if (
p0 in ("threads", "exception")
and path[1] == "values"
and path[3] == "stacktrace"
and path[4] == "frames"
and path[6] == "vars"
):
return True
except IndexError:
return None
return False
def _is_databag():
# type: () -> Optional[bool]
"""
A databag is any value that we need to trim.
:returns: Works like `_should_repr_strings()`. `True` for "yes",
`False` for :"no", `None` for "maybe soon".
"""
try:
rv = _should_repr_strings()
if rv in (True, None):
return rv
p0 = path[0]
if p0 == "request" and path[1] == "data":
return True
if p0 == "breadcrumbs" and path[1] == "values":
path[2]
return True
if p0 == "extra":
return True
except IndexError:
return None
return False
def _serialize_node(
obj, # type: Any
is_databag=None, # type: Optional[bool]
should_repr_strings=None, # type: Optional[bool]
segment=None, # type: Optional[Segment]
remaining_breadth=None, # type: Optional[int]
remaining_depth=None, # type: Optional[int]
):
# type: (...) -> Any
if segment is not None:
path.append(segment)
try:
with memo.memoize(obj) as result:
if result:
return CYCLE_MARKER
return _serialize_node_impl(
obj,
is_databag=is_databag,
should_repr_strings=should_repr_strings,
remaining_depth=remaining_depth,
remaining_breadth=remaining_breadth,
)
except BaseException:
capture_internal_exception(sys.exc_info())
if is_databag:
return u""
return None
finally:
if segment is not None:
path.pop()
del meta_stack[len(path) + 1 :]
def _flatten_annotated(obj):
# type: (Any) -> Any
if isinstance(obj, AnnotatedValue):
_annotate(**obj.metadata)
obj = obj.value
return obj
def _serialize_node_impl(
obj, is_databag, should_repr_strings, remaining_depth, remaining_breadth
):
# type: (Any, Optional[bool], Optional[bool], Optional[int], Optional[int]) -> Any
if should_repr_strings is None:
should_repr_strings = _should_repr_strings()
if is_databag is None:
is_databag = _is_databag()
if is_databag and remaining_depth is None:
remaining_depth = MAX_DATABAG_DEPTH
if is_databag and remaining_breadth is None:
remaining_breadth = MAX_DATABAG_BREADTH
obj = _flatten_annotated(obj)
if remaining_depth is not None and remaining_depth <= 0:
_annotate(rem=[["!limit", "x"]])
if is_databag:
return _flatten_annotated(strip_string(safe_repr(obj)))
return None
if is_databag and global_repr_processors:
hints = {"memo": memo, "remaining_depth": remaining_depth}
for processor in global_repr_processors:
result = processor(obj, hints)
if result is not NotImplemented:
return _flatten_annotated(result)
if obj is None or isinstance(obj, (bool, number_types)):
if should_repr_strings or (
isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj))
):
return safe_repr(obj)
else:
return obj
elif isinstance(obj, datetime):
return (
text_type(format_timestamp(obj))
if not should_repr_strings
else safe_repr(obj)
)
elif isinstance(obj, Mapping):
# Create temporary copy here to avoid calling too much code that
# might mutate our dictionary while we're still iterating over it.
obj = dict(iteritems(obj))
rv_dict = {} # type: Dict[str, Any]
i = 0
for k, v in iteritems(obj):
if remaining_breadth is not None and i >= remaining_breadth:
_annotate(len=len(obj))
break
str_k = text_type(k)
v = _serialize_node(
v,
segment=str_k,
should_repr_strings=should_repr_strings,
is_databag=is_databag,
remaining_depth=remaining_depth - 1
if remaining_depth is not None
else None,
remaining_breadth=remaining_breadth,
)
rv_dict[str_k] = v
i += 1
return rv_dict
elif not isinstance(obj, serializable_str_types) and isinstance(
obj, (Set, Sequence)
):
rv_list = []
for i, v in enumerate(obj):
if remaining_breadth is not None and i >= remaining_breadth:
_annotate(len=len(obj))
break
rv_list.append(
_serialize_node(
v,
segment=i,
should_repr_strings=should_repr_strings,
is_databag=is_databag,
remaining_depth=remaining_depth - 1
if remaining_depth is not None
else None,
remaining_breadth=remaining_breadth,
)
)
return rv_list
if should_repr_strings:
obj = safe_repr(obj)
else:
if isinstance(obj, bytes):
obj = obj.decode("utf-8", "replace")
if not isinstance(obj, string_types):
obj = safe_repr(obj)
# Allow span descriptions to be longer than other strings.
#
# For database auto-instrumented spans, the description contains
# potentially long SQL queries that are most useful when not truncated.
# Because arbitrarily large events may be discarded by the server as a
# protection mechanism, we dynamically limit the description length
# later in _truncate_span_descriptions.
if (
smart_transaction_trimming
and len(path) == 3
and path[0] == "spans"
and path[-1] == "description"
):
span_description_bytes.append(len(obj))
return obj
return _flatten_annotated(strip_string(obj))
def _truncate_span_descriptions(serialized_event, event, excess_bytes):
# type: (Event, Event, int) -> None
"""
Modifies serialized_event in-place trying to remove excess_bytes from
span descriptions. The original event is used read-only to access the
span timestamps (represented as RFC3399-formatted strings in
serialized_event).
It uses heuristics to prioritize preserving the description of spans
that might be the most interesting ones in terms of understanding and
optimizing performance.
"""
# When truncating a description, preserve a small prefix.
min_length = 10
def shortest_duration_longest_description_first(args):
# type: (Tuple[int, Span]) -> Tuple[timedelta, int]
i, serialized_span = args
span = event["spans"][i]
now = datetime.utcnow()
start = span.get("start_timestamp") or now
end = span.get("timestamp") or now
duration = end - start
description = serialized_span.get("description") or ""
return (duration, -len(description))
# Note: for simplicity we sort spans by exact duration and description
# length. If ever needed, we could have a more involved heuristic, e.g.
# replacing exact durations with "buckets" and/or looking at other span
# properties.
path.append("spans")
for i, span in sorted(
enumerate(serialized_event.get("spans") or []),
key=shortest_duration_longest_description_first,
):
description = span.get("description") or ""
if len(description) <= min_length:
continue
excess_bytes -= len(description) - min_length
path.extend([i, "description"])
# Note: the last time we call strip_string we could preserve a few
# more bytes up to a total length of MAX_EVENT_BYTES. Since that's
# not strictly required, we leave it out for now for simplicity.
span["description"] = _flatten_annotated(
strip_string(description, max_length=min_length)
)
del path[-2:]
del meta_stack[len(path) + 1 :]
if excess_bytes <= 0:
break
path.pop()
del meta_stack[len(path) + 1 :]
disable_capture_event.set(True)
try:
rv = _serialize_node(event, **kwargs)
if meta_stack and isinstance(rv, dict):
rv["_meta"] = meta_stack[0]
sum_span_description_bytes = sum(span_description_bytes)
if smart_transaction_trimming and sum_span_description_bytes > 0:
span_count = len(event.get("spans") or [])
# This is an upper bound of how many bytes all descriptions would
# consume if the usual string truncation in _serialize_node_impl
# would have taken place, not accounting for the metadata attached
# as event["_meta"].
descriptions_budget_bytes = span_count * sentry_sdk.utils.MAX_STRING_LENGTH
# If by not truncating descriptions we ended up with more bytes than
# per the usual string truncation, check if the event is too large
# and we need to truncate some descriptions.
#
# This is guarded with an if statement to avoid JSON-encoding the
# event unnecessarily.
if sum_span_description_bytes > descriptions_budget_bytes:
original_bytes = len(json_dumps(rv))
excess_bytes = original_bytes - MAX_EVENT_BYTES
if excess_bytes > 0:
# Event is too large, will likely be discarded by the
# server. Trim it down before sending.
_truncate_span_descriptions(rv, event, excess_bytes)
# Span descriptions truncated, set or reset _meta.
#
# We run the same code earlier because we want to account
# for _meta when calculating original_bytes, the number of
# bytes in the JSON-encoded event.
if meta_stack and isinstance(rv, dict):
rv["_meta"] = meta_stack[0]
return rv
finally:
disable_capture_event.set(False)
sentry-python-1.4.3/sentry_sdk/session.py 0000664 0000000 0000000 00000012647 14125057761 0020626 0 ustar 00root root 0000000 0000000 import uuid
from datetime import datetime
from sentry_sdk._types import MYPY
from sentry_sdk.utils import format_timestamp
if MYPY:
from typing import Optional
from typing import Union
from typing import Any
from typing import Dict
from sentry_sdk._types import SessionStatus
def _minute_trunc(ts):
# type: (datetime) -> datetime
return ts.replace(second=0, microsecond=0)
def _make_uuid(
val, # type: Union[str, uuid.UUID]
):
# type: (...) -> uuid.UUID
if isinstance(val, uuid.UUID):
return val
return uuid.UUID(val)
class Session(object):
def __init__(
self,
sid=None, # type: Optional[Union[str, uuid.UUID]]
did=None, # type: Optional[str]
timestamp=None, # type: Optional[datetime]
started=None, # type: Optional[datetime]
duration=None, # type: Optional[float]
status=None, # type: Optional[SessionStatus]
release=None, # type: Optional[str]
environment=None, # type: Optional[str]
user_agent=None, # type: Optional[str]
ip_address=None, # type: Optional[str]
errors=None, # type: Optional[int]
user=None, # type: Optional[Any]
session_mode="application", # type: str
):
# type: (...) -> None
if sid is None:
sid = uuid.uuid4()
if started is None:
started = datetime.utcnow()
if status is None:
status = "ok"
self.status = status
self.did = None # type: Optional[str]
self.started = started
self.release = None # type: Optional[str]
self.environment = None # type: Optional[str]
self.duration = None # type: Optional[float]
self.user_agent = None # type: Optional[str]
self.ip_address = None # type: Optional[str]
self.session_mode = session_mode # type: str
self.errors = 0
self.update(
sid=sid,
did=did,
timestamp=timestamp,
duration=duration,
release=release,
environment=environment,
user_agent=user_agent,
ip_address=ip_address,
errors=errors,
user=user,
)
@property
def truncated_started(self):
# type: (...) -> datetime
return _minute_trunc(self.started)
def update(
self,
sid=None, # type: Optional[Union[str, uuid.UUID]]
did=None, # type: Optional[str]
timestamp=None, # type: Optional[datetime]
started=None, # type: Optional[datetime]
duration=None, # type: Optional[float]
status=None, # type: Optional[SessionStatus]
release=None, # type: Optional[str]
environment=None, # type: Optional[str]
user_agent=None, # type: Optional[str]
ip_address=None, # type: Optional[str]
errors=None, # type: Optional[int]
user=None, # type: Optional[Any]
):
# type: (...) -> None
# If a user is supplied we pull some data form it
if user:
if ip_address is None:
ip_address = user.get("ip_address")
if did is None:
did = user.get("id") or user.get("email") or user.get("username")
if sid is not None:
self.sid = _make_uuid(sid)
if did is not None:
self.did = str(did)
if timestamp is None:
timestamp = datetime.utcnow()
self.timestamp = timestamp
if started is not None:
self.started = started
if duration is not None:
self.duration = duration
if release is not None:
self.release = release
if environment is not None:
self.environment = environment
if ip_address is not None:
self.ip_address = ip_address
if user_agent is not None:
self.user_agent = user_agent
if errors is not None:
self.errors = errors
if status is not None:
self.status = status
def close(
self, status=None # type: Optional[SessionStatus]
):
# type: (...) -> Any
if status is None and self.status == "ok":
status = "exited"
if status is not None:
self.update(status=status)
def get_json_attrs(
self, with_user_info=True # type: Optional[bool]
):
# type: (...) -> Any
attrs = {}
if self.release is not None:
attrs["release"] = self.release
if self.environment is not None:
attrs["environment"] = self.environment
if with_user_info:
if self.ip_address is not None:
attrs["ip_address"] = self.ip_address
if self.user_agent is not None:
attrs["user_agent"] = self.user_agent
return attrs
def to_json(self):
# type: (...) -> Any
rv = {
"sid": str(self.sid),
"init": True,
"started": format_timestamp(self.started),
"timestamp": format_timestamp(self.timestamp),
"status": self.status,
} # type: Dict[str, Any]
if self.errors:
rv["errors"] = self.errors
if self.did is not None:
rv["did"] = self.did
if self.duration is not None:
rv["duration"] = self.duration
attrs = self.get_json_attrs()
if attrs:
rv["attrs"] = attrs
return rv
sentry-python-1.4.3/sentry_sdk/sessions.py 0000664 0000000 0000000 00000013277 14125057761 0021011 0 ustar 00root root 0000000 0000000 import os
import time
from threading import Thread, Lock
from contextlib import contextmanager
import sentry_sdk
from sentry_sdk.envelope import Envelope
from sentry_sdk.session import Session
from sentry_sdk._types import MYPY
from sentry_sdk.utils import format_timestamp
if MYPY:
from typing import Callable
from typing import Optional
from typing import Any
from typing import Dict
from typing import List
from typing import Generator
def is_auto_session_tracking_enabled(hub=None):
# type: (Optional[sentry_sdk.Hub]) -> bool
"""Utility function to find out if session tracking is enabled."""
if hub is None:
hub = sentry_sdk.Hub.current
should_track = hub.scope._force_auto_session_tracking
if should_track is None:
client_options = hub.client.options if hub.client else {}
should_track = client_options["auto_session_tracking"]
return should_track
@contextmanager
def auto_session_tracking(hub=None, session_mode="application"):
# type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None]
"""Starts and stops a session automatically around a block."""
if hub is None:
hub = sentry_sdk.Hub.current
should_track = is_auto_session_tracking_enabled(hub)
if should_track:
hub.start_session(session_mode=session_mode)
try:
yield
finally:
if should_track:
hub.end_session()
TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed")
MAX_ENVELOPE_ITEMS = 100
def make_aggregate_envelope(aggregate_states, attrs):
# type: (Any, Any) -> Any
return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())}
class SessionFlusher(object):
def __init__(
self,
capture_func, # type: Callable[[Envelope], None]
flush_interval=60, # type: int
):
# type: (...) -> None
self.capture_func = capture_func
self.flush_interval = flush_interval
self.pending_sessions = [] # type: List[Any]
self.pending_aggregates = {} # type: Dict[Any, Any]
self._thread = None # type: Optional[Thread]
self._thread_lock = Lock()
self._aggregate_lock = Lock()
self._thread_for_pid = None # type: Optional[int]
self._running = True
def flush(self):
# type: (...) -> None
pending_sessions = self.pending_sessions
self.pending_sessions = []
with self._aggregate_lock:
pending_aggregates = self.pending_aggregates
self.pending_aggregates = {}
envelope = Envelope()
for session in pending_sessions:
if len(envelope.items) == MAX_ENVELOPE_ITEMS:
self.capture_func(envelope)
envelope = Envelope()
envelope.add_session(session)
for (attrs, states) in pending_aggregates.items():
if len(envelope.items) == MAX_ENVELOPE_ITEMS:
self.capture_func(envelope)
envelope = Envelope()
envelope.add_sessions(make_aggregate_envelope(states, attrs))
if len(envelope.items) > 0:
self.capture_func(envelope)
def _ensure_running(self):
# type: (...) -> None
if self._thread_for_pid == os.getpid() and self._thread is not None:
return None
with self._thread_lock:
if self._thread_for_pid == os.getpid() and self._thread is not None:
return None
def _thread():
# type: (...) -> None
while self._running:
time.sleep(self.flush_interval)
if self._running:
self.flush()
thread = Thread(target=_thread)
thread.daemon = True
thread.start()
self._thread = thread
self._thread_for_pid = os.getpid()
return None
def add_aggregate_session(
self, session # type: Session
):
# type: (...) -> None
# NOTE on `session.did`:
# the protocol can deal with buckets that have a distinct-id, however
# in practice we expect the python SDK to have an extremely high cardinality
# here, effectively making aggregation useless, therefore we do not
# aggregate per-did.
# For this part we can get away with using the global interpreter lock
with self._aggregate_lock:
attrs = session.get_json_attrs(with_user_info=False)
primary_key = tuple(sorted(attrs.items()))
secondary_key = session.truncated_started # (, session.did)
states = self.pending_aggregates.setdefault(primary_key, {})
state = states.setdefault(secondary_key, {})
if "started" not in state:
state["started"] = format_timestamp(session.truncated_started)
# if session.did is not None:
# state["did"] = session.did
if session.status == "crashed":
state["crashed"] = state.get("crashed", 0) + 1
elif session.status == "abnormal":
state["abnormal"] = state.get("abnormal", 0) + 1
elif session.errors > 0:
state["errored"] = state.get("errored", 0) + 1
else:
state["exited"] = state.get("exited", 0) + 1
def add_session(
self, session # type: Session
):
# type: (...) -> None
if session.session_mode == "request":
self.add_aggregate_session(session)
else:
self.pending_sessions.append(session.to_json())
self._ensure_running()
def kill(self):
# type: (...) -> None
self._running = False
def __del__(self):
# type: (...) -> None
self.kill()
sentry-python-1.4.3/sentry_sdk/tracing.py 0000664 0000000 0000000 00000061005 14125057761 0020562 0 ustar 00root root 0000000 0000000 import uuid
import random
import time
from datetime import datetime, timedelta
import sentry_sdk
from sentry_sdk.utils import logger
from sentry_sdk._types import MYPY
if MYPY:
import typing
from typing import Optional
from typing import Any
from typing import Dict
from typing import List
from typing import Tuple
from typing import Iterator
from sentry_sdk._types import SamplingContext
class _SpanRecorder(object):
"""Limits the number of spans recorded in a transaction."""
__slots__ = ("maxlen", "spans")
def __init__(self, maxlen):
# type: (int) -> None
# FIXME: this is `maxlen - 1` only to preserve historical behavior
# enforced by tests.
# Either this should be changed to `maxlen` or the JS SDK implementation
# should be changed to match a consistent interpretation of what maxlen
# limits: either transaction+spans or only child spans.
self.maxlen = maxlen - 1
self.spans = [] # type: List[Span]
def add(self, span):
# type: (Span) -> None
if len(self.spans) > self.maxlen:
span._span_recorder = None
else:
self.spans.append(span)
class Span(object):
__slots__ = (
"trace_id",
"span_id",
"parent_span_id",
"same_process_as_parent",
"sampled",
"op",
"description",
"start_timestamp",
"_start_timestamp_monotonic",
"status",
"timestamp",
"_tags",
"_data",
"_span_recorder",
"hub",
"_context_manager_state",
"_containing_transaction",
)
def __new__(cls, **kwargs):
# type: (**Any) -> Any
"""
Backwards-compatible implementation of Span and Transaction
creation.
"""
# TODO: consider removing this in a future release.
# This is for backwards compatibility with releases before Transaction
# existed, to allow for a smoother transition.
if "transaction" in kwargs:
return object.__new__(Transaction)
return object.__new__(cls)
def __init__(
self,
trace_id=None, # type: Optional[str]
span_id=None, # type: Optional[str]
parent_span_id=None, # type: Optional[str]
same_process_as_parent=True, # type: bool
sampled=None, # type: Optional[bool]
op=None, # type: Optional[str]
description=None, # type: Optional[str]
hub=None, # type: Optional[sentry_sdk.Hub]
status=None, # type: Optional[str]
transaction=None, # type: Optional[str] # deprecated
containing_transaction=None, # type: Optional[Transaction]
):
# type: (...) -> None
self.trace_id = trace_id or uuid.uuid4().hex
self.span_id = span_id or uuid.uuid4().hex[16:]
self.parent_span_id = parent_span_id
self.same_process_as_parent = same_process_as_parent
self.sampled = sampled
self.op = op
self.description = description
self.status = status
self.hub = hub
self._tags = {} # type: Dict[str, str]
self._data = {} # type: Dict[str, Any]
self._containing_transaction = containing_transaction
self.start_timestamp = datetime.utcnow()
try:
# TODO: For Python 3.7+, we could use a clock with ns resolution:
# self._start_timestamp_monotonic = time.perf_counter_ns()
# Python 3.3+
self._start_timestamp_monotonic = time.perf_counter()
except AttributeError:
pass
#: End timestamp of span
self.timestamp = None # type: Optional[datetime]
self._span_recorder = None # type: Optional[_SpanRecorder]
# TODO this should really live on the Transaction class rather than the Span
# class
def init_span_recorder(self, maxlen):
# type: (int) -> None
if self._span_recorder is None:
self._span_recorder = _SpanRecorder(maxlen)
def __repr__(self):
# type: () -> str
return "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % (
self.__class__.__name__,
self.op,
self.description,
self.trace_id,
self.span_id,
self.parent_span_id,
self.sampled,
)
def __enter__(self):
# type: () -> Span
hub = self.hub or sentry_sdk.Hub.current
_, scope = hub._stack[-1]
old_span = scope.span
scope.span = self
self._context_manager_state = (hub, scope, old_span)
return self
def __exit__(self, ty, value, tb):
# type: (Optional[Any], Optional[Any], Optional[Any]) -> None
if value is not None:
self.set_status("internal_error")
hub, scope, old_span = self._context_manager_state
del self._context_manager_state
self.finish(hub)
scope.span = old_span
@property
def containing_transaction(self):
# type: () -> Optional[Transaction]
# this is a getter rather than a regular attribute so that transactions
# can return `self` here instead (as a way to prevent them circularly
# referencing themselves)
return self._containing_transaction
def start_child(self, **kwargs):
# type: (**Any) -> Span
"""
Start a sub-span from the current span or transaction.
Takes the same arguments as the initializer of :py:class:`Span`. The
trace id, sampling decision, transaction pointer, and span recorder are
inherited from the current span/transaction.
"""
kwargs.setdefault("sampled", self.sampled)
child = Span(
trace_id=self.trace_id,
parent_span_id=self.span_id,
containing_transaction=self.containing_transaction,
**kwargs
)
span_recorder = (
self.containing_transaction and self.containing_transaction._span_recorder
)
if span_recorder:
span_recorder.add(child)
return child
def new_span(self, **kwargs):
# type: (**Any) -> Span
"""Deprecated: use start_child instead."""
logger.warning("Deprecated: use Span.start_child instead of Span.new_span.")
return self.start_child(**kwargs)
@classmethod
def continue_from_environ(
cls,
environ, # type: typing.Mapping[str, str]
**kwargs # type: Any
):
# type: (...) -> Transaction
"""
Create a Transaction with the given params, then add in data pulled from
the 'sentry-trace' and 'tracestate' headers from the environ (if any)
before returning the Transaction.
This is different from `continue_from_headers` in that it assumes header
names in the form "HTTP_HEADER_NAME" - such as you would get from a wsgi
environ - rather than the form "header-name".
"""
if cls is Span:
logger.warning(
"Deprecated: use Transaction.continue_from_environ "
"instead of Span.continue_from_environ."
)
return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs)
@classmethod
def continue_from_headers(
cls,
headers, # type: typing.Mapping[str, str]
**kwargs # type: Any
):
# type: (...) -> Transaction
"""
Create a transaction with the given params (including any data pulled from
the 'sentry-trace' and 'tracestate' headers).
"""
# TODO move this to the Transaction class
if cls is Span:
logger.warning(
"Deprecated: use Transaction.continue_from_headers "
"instead of Span.continue_from_headers."
)
kwargs.update(extract_sentrytrace_data(headers.get("sentry-trace")))
kwargs.update(extract_tracestate_data(headers.get("tracestate")))
transaction = Transaction(**kwargs)
transaction.same_process_as_parent = False
return transaction
def iter_headers(self):
# type: () -> Iterator[Tuple[str, str]]
"""
Creates a generator which returns the span's `sentry-trace` and
`tracestate` headers.
If the span's containing transaction doesn't yet have a
`sentry_tracestate` value, this will cause one to be generated and
stored.
"""
yield "sentry-trace", self.to_traceparent()
tracestate = self.to_tracestate() if has_tracestate_enabled(self) else None
# `tracestate` will only be `None` if there's no client or no DSN
# TODO (kmclb) the above will be true once the feature is no longer
# behind a flag
if tracestate:
yield "tracestate", tracestate
@classmethod
def from_traceparent(
cls,
traceparent, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional[Transaction]
"""
DEPRECATED: Use Transaction.continue_from_headers(headers, **kwargs)
Create a Transaction with the given params, then add in data pulled from
the given 'sentry-trace' header value before returning the Transaction.
"""
logger.warning(
"Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) "
"instead of from_traceparent(traceparent, **kwargs)"
)
if not traceparent:
return None
return cls.continue_from_headers({"sentry-trace": traceparent}, **kwargs)
def to_traceparent(self):
# type: () -> str
sampled = ""
if self.sampled is True:
sampled = "1"
if self.sampled is False:
sampled = "0"
return "%s-%s-%s" % (self.trace_id, self.span_id, sampled)
def to_tracestate(self):
# type: () -> Optional[str]
"""
Computes the `tracestate` header value using data from the containing
transaction.
If the containing transaction doesn't yet have a `sentry_tracestate`
value, this will cause one to be generated and stored.
If there is no containing transaction, a value will be generated but not
stored.
Returns None if there's no client and/or no DSN.
"""
sentry_tracestate = self.get_or_set_sentry_tracestate()
third_party_tracestate = (
self.containing_transaction._third_party_tracestate
if self.containing_transaction
else None
)
if not sentry_tracestate:
return None
header_value = sentry_tracestate
if third_party_tracestate:
header_value = header_value + "," + third_party_tracestate
return header_value
def get_or_set_sentry_tracestate(self):
# type: (Span) -> Optional[str]
"""
Read sentry tracestate off of the span's containing transaction.
If the transaction doesn't yet have a `_sentry_tracestate` value,
compute one and store it.
"""
transaction = self.containing_transaction
if transaction:
if not transaction._sentry_tracestate:
transaction._sentry_tracestate = compute_tracestate_entry(self)
return transaction._sentry_tracestate
# orphan span - nowhere to store the value, so just return it
return compute_tracestate_entry(self)
def set_tag(self, key, value):
# type: (str, Any) -> None
self._tags[key] = value
def set_data(self, key, value):
# type: (str, Any) -> None
self._data[key] = value
def set_status(self, value):
# type: (str) -> None
self.status = value
def set_http_status(self, http_status):
# type: (int) -> None
self.set_tag("http.status_code", str(http_status))
if http_status < 400:
self.set_status("ok")
elif 400 <= http_status < 500:
if http_status == 403:
self.set_status("permission_denied")
elif http_status == 404:
self.set_status("not_found")
elif http_status == 429:
self.set_status("resource_exhausted")
elif http_status == 413:
self.set_status("failed_precondition")
elif http_status == 401:
self.set_status("unauthenticated")
elif http_status == 409:
self.set_status("already_exists")
else:
self.set_status("invalid_argument")
elif 500 <= http_status < 600:
if http_status == 504:
self.set_status("deadline_exceeded")
elif http_status == 501:
self.set_status("unimplemented")
elif http_status == 503:
self.set_status("unavailable")
else:
self.set_status("internal_error")
else:
self.set_status("unknown_error")
def is_success(self):
# type: () -> bool
return self.status == "ok"
def finish(self, hub=None):
# type: (Optional[sentry_sdk.Hub]) -> Optional[str]
# XXX: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads
# to incompatible return types for Span.finish and Transaction.finish.
if self.timestamp is not None:
# This span is already finished, ignore.
return None
hub = hub or self.hub or sentry_sdk.Hub.current
try:
duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds)
except AttributeError:
self.timestamp = datetime.utcnow()
maybe_create_breadcrumbs_from_span(hub, self)
return None
def to_json(self):
# type: () -> Dict[str, Any]
rv = {
"trace_id": self.trace_id,
"span_id": self.span_id,
"parent_span_id": self.parent_span_id,
"same_process_as_parent": self.same_process_as_parent,
"op": self.op,
"description": self.description,
"start_timestamp": self.start_timestamp,
"timestamp": self.timestamp,
} # type: Dict[str, Any]
if self.status:
self._tags["status"] = self.status
tags = self._tags
if tags:
rv["tags"] = tags
data = self._data
if data:
rv["data"] = data
return rv
def get_trace_context(self):
# type: () -> Any
rv = {
"trace_id": self.trace_id,
"span_id": self.span_id,
"parent_span_id": self.parent_span_id,
"op": self.op,
"description": self.description,
}
if self.status:
rv["status"] = self.status
# if the transaction didn't inherit a tracestate value, and no outgoing
# requests - whose need for headers would have caused a tracestate value
# to be created - were made as part of the transaction, the transaction
# still won't have a tracestate value, so compute one now
sentry_tracestate = self.get_or_set_sentry_tracestate()
if sentry_tracestate:
rv["tracestate"] = sentry_tracestate
return rv
class Transaction(Span):
__slots__ = (
"name",
"parent_sampled",
# the sentry portion of the `tracestate` header used to transmit
# correlation context for server-side dynamic sampling, of the form
# `sentry=xxxxx`, where `xxxxx` is the base64-encoded json of the
# correlation context data, missing trailing any =
"_sentry_tracestate",
# tracestate data from other vendors, of the form `dogs=yes,cats=maybe`
"_third_party_tracestate",
)
def __init__(
self,
name="", # type: str
parent_sampled=None, # type: Optional[bool]
sentry_tracestate=None, # type: Optional[str]
third_party_tracestate=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
# TODO: consider removing this in a future release.
# This is for backwards compatibility with releases before Transaction
# existed, to allow for a smoother transition.
if not name and "transaction" in kwargs:
logger.warning(
"Deprecated: use Transaction(name=...) to create transactions "
"instead of Span(transaction=...)."
)
name = kwargs.pop("transaction")
Span.__init__(self, **kwargs)
self.name = name
self.parent_sampled = parent_sampled
# if tracestate isn't inherited and set here, it will get set lazily,
# either the first time an outgoing request needs it for a header or the
# first time an event needs it for inclusion in the captured data
self._sentry_tracestate = sentry_tracestate
self._third_party_tracestate = third_party_tracestate
def __repr__(self):
# type: () -> str
return "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % (
self.__class__.__name__,
self.name,
self.op,
self.trace_id,
self.span_id,
self.parent_span_id,
self.sampled,
)
@property
def containing_transaction(self):
# type: () -> Transaction
# Transactions (as spans) belong to themselves (as transactions). This
# is a getter rather than a regular attribute to avoid having a circular
# reference.
return self
def finish(self, hub=None):
# type: (Optional[sentry_sdk.Hub]) -> Optional[str]
if self.timestamp is not None:
# This transaction is already finished, ignore.
return None
hub = hub or self.hub or sentry_sdk.Hub.current
client = hub.client
# This is a de facto proxy for checking if sampled = False
if self._span_recorder is None:
logger.debug("Discarding transaction because sampled = False")
# This is not entirely accurate because discards here are not
# exclusively based on sample rate but also traces sampler, but
# we handle this the same here.
if client and client.transport:
client.transport.record_lost_event(
"sample_rate", data_category="transaction"
)
return None
if client is None:
# We have no client and therefore nowhere to send this transaction.
return None
if not self.name:
logger.warning(
"Transaction has no name, falling back to ``."
)
self.name = ""
Span.finish(self, hub)
if not self.sampled:
# At this point a `sampled = None` should have already been resolved
# to a concrete decision.
if self.sampled is None:
logger.warning("Discarding transaction without sampling decision.")
return None
finished_spans = [
span.to_json()
for span in self._span_recorder.spans
if span.timestamp is not None
]
# we do this to break the circular reference of transaction -> span
# recorder -> span -> containing transaction (which is where we started)
# before either the spans or the transaction goes out of scope and has
# to be garbage collected
self._span_recorder = None
return hub.capture_event(
{
"type": "transaction",
"transaction": self.name,
"contexts": {"trace": self.get_trace_context()},
"tags": self._tags,
"timestamp": self.timestamp,
"start_timestamp": self.start_timestamp,
"spans": finished_spans,
}
)
def to_json(self):
# type: () -> Dict[str, Any]
rv = super(Transaction, self).to_json()
rv["name"] = self.name
rv["sampled"] = self.sampled
return rv
def _set_initial_sampling_decision(self, sampling_context):
# type: (SamplingContext) -> None
"""
Sets the transaction's sampling decision, according to the following
precedence rules:
1. If a sampling decision is passed to `start_transaction`
(`start_transaction(name: "my transaction", sampled: True)`), that
decision will be used, regardlesss of anything else
2. If `traces_sampler` is defined, its decision will be used. It can
choose to keep or ignore any parent sampling decision, or use the
sampling context data to make its own decision or to choose a sample
rate for the transaction.
3. If `traces_sampler` is not defined, but there's a parent sampling
decision, the parent sampling decision will be used.
4. If `traces_sampler` is not defined and there's no parent sampling
decision, `traces_sample_rate` will be used.
"""
hub = self.hub or sentry_sdk.Hub.current
client = hub.client
options = (client and client.options) or {}
transaction_description = "{op}transaction <{name}>".format(
op=("<" + self.op + "> " if self.op else ""), name=self.name
)
# nothing to do if there's no client or if tracing is disabled
if not client or not has_tracing_enabled(options):
self.sampled = False
return
# if the user has forced a sampling decision by passing a `sampled`
# value when starting the transaction, go with that
if self.sampled is not None:
return
# we would have bailed already if neither `traces_sampler` nor
# `traces_sample_rate` were defined, so one of these should work; prefer
# the hook if so
sample_rate = (
options["traces_sampler"](sampling_context)
if callable(options.get("traces_sampler"))
else (
# default inheritance behavior
sampling_context["parent_sampled"]
if sampling_context["parent_sampled"] is not None
else options["traces_sample_rate"]
)
)
# Since this is coming from the user (or from a function provided by the
# user), who knows what we might get. (The only valid values are
# booleans or numbers between 0 and 1.)
if not is_valid_sample_rate(sample_rate):
logger.warning(
"[Tracing] Discarding {transaction_description} because of invalid sample rate.".format(
transaction_description=transaction_description,
)
)
self.sampled = False
return
# if the function returned 0 (or false), or if `traces_sample_rate` is
# 0, it's a sign the transaction should be dropped
if not sample_rate:
logger.debug(
"[Tracing] Discarding {transaction_description} because {reason}".format(
transaction_description=transaction_description,
reason=(
"traces_sampler returned 0 or False"
if callable(options.get("traces_sampler"))
else "traces_sample_rate is set to 0"
),
)
)
self.sampled = False
return
# Now we roll the dice. random.random is inclusive of 0, but not of 1,
# so strict < is safe here. In case sample_rate is a boolean, cast it
# to a float (True becomes 1.0 and False becomes 0.0)
self.sampled = random.random() < float(sample_rate)
if self.sampled:
logger.debug(
"[Tracing] Starting {transaction_description}".format(
transaction_description=transaction_description,
)
)
else:
logger.debug(
"[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format(
transaction_description=transaction_description,
sample_rate=float(sample_rate),
)
)
# Circular imports
from sentry_sdk.tracing_utils import (
EnvironHeaders,
compute_tracestate_entry,
extract_sentrytrace_data,
extract_tracestate_data,
has_tracestate_enabled,
has_tracing_enabled,
is_valid_sample_rate,
maybe_create_breadcrumbs_from_span,
)
sentry-python-1.4.3/sentry_sdk/tracing_utils.py 0000664 0000000 0000000 00000030547 14125057761 0022011 0 ustar 00root root 0000000 0000000 import re
import contextlib
import json
import math
from numbers import Real
import sentry_sdk
from sentry_sdk.utils import (
capture_internal_exceptions,
Dsn,
logger,
to_base64,
to_string,
from_base64,
)
from sentry_sdk._compat import PY2
from sentry_sdk._types import MYPY
if PY2:
from collections import Mapping
else:
from collections.abc import Mapping
if MYPY:
import typing
from typing import Generator
from typing import Optional
from typing import Any
from typing import Dict
from typing import Union
SENTRY_TRACE_REGEX = re.compile(
"^[ \t]*" # whitespace
"([0-9a-f]{32})?" # trace_id
"-?([0-9a-f]{16})?" # span_id
"-?([01])?" # sampled
"[ \t]*$" # whitespace
)
# This is a normal base64 regex, modified to reflect that fact that we strip the
# trailing = or == off
base64_stripped = (
# any of the characters in the base64 "alphabet", in multiples of 4
"([a-zA-Z0-9+/]{4})*"
# either nothing or 2 or 3 base64-alphabet characters (see
# https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding for
# why there's never only 1 extra character)
"([a-zA-Z0-9+/]{2,3})?"
)
# comma-delimited list of entries of the form `xxx=yyy`
tracestate_entry = "[^=]+=[^=]+"
TRACESTATE_ENTRIES_REGEX = re.compile(
# one or more xxxxx=yyyy entries
"^({te})+"
# each entry except the last must be followed by a comma
"(,|$)".format(te=tracestate_entry)
)
# this doesn't check that the value is valid, just that there's something there
# of the form `sentry=xxxx`
SENTRY_TRACESTATE_ENTRY_REGEX = re.compile(
# either sentry is the first entry or there's stuff immediately before it,
# ending in a commma (this prevents matching something like `coolsentry=xxx`)
"(?:^|.+,)"
# sentry's part, not including the potential comma
"(sentry=[^,]*)"
# either there's a comma and another vendor's entry or we end
"(?:,.+|$)"
)
class EnvironHeaders(Mapping): # type: ignore
def __init__(
self,
environ, # type: typing.Mapping[str, str]
prefix="HTTP_", # type: str
):
# type: (...) -> None
self.environ = environ
self.prefix = prefix
def __getitem__(self, key):
# type: (str) -> Optional[Any]
return self.environ[self.prefix + key.replace("-", "_").upper()]
def __len__(self):
# type: () -> int
return sum(1 for _ in iter(self))
def __iter__(self):
# type: () -> Generator[str, None, None]
for k in self.environ:
if not isinstance(k, str):
continue
k = k.replace("-", "_").upper()
if not k.startswith(self.prefix):
continue
yield k[len(self.prefix) :]
def has_tracing_enabled(options):
# type: (Dict[str, Any]) -> bool
"""
Returns True if either traces_sample_rate or traces_sampler is
non-zero/defined, False otherwise.
"""
return bool(
options.get("traces_sample_rate") is not None
or options.get("traces_sampler") is not None
)
def is_valid_sample_rate(rate):
# type: (Any) -> bool
"""
Checks the given sample rate to make sure it is valid type and value (a
boolean or a number between 0 and 1, inclusive).
"""
# both booleans and NaN are instances of Real, so a) checking for Real
# checks for the possibility of a boolean also, and b) we have to check
# separately for NaN
if not isinstance(rate, Real) or math.isnan(rate):
logger.warning(
"[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
rate=rate, type=type(rate)
)
)
return False
# in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
rate = float(rate)
if rate < 0 or rate > 1:
logger.warning(
"[Tracing] Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
rate=rate
)
)
return False
return True
@contextlib.contextmanager
def record_sql_queries(
hub, # type: sentry_sdk.Hub
cursor, # type: Any
query, # type: Any
params_list, # type: Any
paramstyle, # type: Optional[str]
executemany, # type: bool
):
# type: (...) -> Generator[Span, None, None]
# TODO: Bring back capturing of params by default
if hub.client and hub.client.options["_experiments"].get(
"record_sql_params", False
):
if not params_list or params_list == [None]:
params_list = None
if paramstyle == "pyformat":
paramstyle = "format"
else:
params_list = None
paramstyle = None
query = _format_sql(cursor, query)
data = {}
if params_list is not None:
data["db.params"] = params_list
if paramstyle is not None:
data["db.paramstyle"] = paramstyle
if executemany:
data["db.executemany"] = True
with capture_internal_exceptions():
hub.add_breadcrumb(message=query, category="query", data=data)
with hub.start_span(op="db", description=query) as span:
for k, v in data.items():
span.set_data(k, v)
yield span
def maybe_create_breadcrumbs_from_span(hub, span):
# type: (sentry_sdk.Hub, Span) -> None
if span.op == "redis":
hub.add_breadcrumb(
message=span.description, type="redis", category="redis", data=span._tags
)
elif span.op == "http":
hub.add_breadcrumb(type="http", category="httplib", data=span._data)
elif span.op == "subprocess":
hub.add_breadcrumb(
type="subprocess",
category="subprocess",
message=span.description,
data=span._data,
)
def extract_sentrytrace_data(header):
# type: (Optional[str]) -> typing.Mapping[str, Union[str, bool, None]]
"""
Given a `sentry-trace` header string, return a dictionary of data.
"""
trace_id = parent_span_id = parent_sampled = None
if header:
if header.startswith("00-") and header.endswith("-00"):
header = header[3:-3]
match = SENTRY_TRACE_REGEX.match(header)
if match:
trace_id, parent_span_id, sampled_str = match.groups()
if trace_id:
trace_id = "{:032x}".format(int(trace_id, 16))
if parent_span_id:
parent_span_id = "{:016x}".format(int(parent_span_id, 16))
if sampled_str:
parent_sampled = sampled_str != "0"
return {
"trace_id": trace_id,
"parent_span_id": parent_span_id,
"parent_sampled": parent_sampled,
}
def extract_tracestate_data(header):
# type: (Optional[str]) -> typing.Mapping[str, Optional[str]]
"""
Extracts the sentry tracestate value and any third-party data from the given
tracestate header, returning a dictionary of data.
"""
sentry_entry = third_party_entry = None
before = after = ""
if header:
# find sentry's entry, if any
sentry_match = SENTRY_TRACESTATE_ENTRY_REGEX.search(header)
if sentry_match:
sentry_entry = sentry_match.group(1)
# remove the commas after the split so we don't end up with
# `xxx=yyy,,zzz=qqq` (double commas) when we put them back together
before, after = map(lambda s: s.strip(","), header.split(sentry_entry))
# extract sentry's value from its entry and test to make sure it's
# valid; if it isn't, discard the entire entry so that a new one
# will be created
sentry_value = sentry_entry.replace("sentry=", "")
if not re.search("^{b64}$".format(b64=base64_stripped), sentry_value):
sentry_entry = None
else:
after = header
# if either part is invalid or empty, remove it before gluing them together
third_party_entry = (
",".join(filter(TRACESTATE_ENTRIES_REGEX.search, [before, after])) or None
)
return {
"sentry_tracestate": sentry_entry,
"third_party_tracestate": third_party_entry,
}
def compute_tracestate_value(data):
# type: (typing.Mapping[str, str]) -> str
"""
Computes a new tracestate value using the given data.
Note: Returns just the base64-encoded data, NOT the full `sentry=...`
tracestate entry.
"""
tracestate_json = json.dumps(data)
# Base64-encoded strings always come out with a length which is a multiple
# of 4. In order to achieve this, the end is padded with one or more `=`
# signs. Because the tracestate standard calls for using `=` signs between
# vendor name and value (`sentry=xxx,dogsaregreat=yyy`), to avoid confusion
# we strip the `=`
return (to_base64(tracestate_json) or "").rstrip("=")
def compute_tracestate_entry(span):
# type: (Span) -> Optional[str]
"""
Computes a new sentry tracestate for the span. Includes the `sentry=`.
Will return `None` if there's no client and/or no DSN.
"""
data = {}
hub = span.hub or sentry_sdk.Hub.current
client = hub.client
scope = hub.scope
if client and client.options.get("dsn"):
options = client.options
user = scope._user
data = {
"trace_id": span.trace_id,
"environment": options["environment"],
"release": options.get("release"),
"public_key": Dsn(options["dsn"]).public_key,
}
if user and (user.get("id") or user.get("segment")):
user_data = {}
if user.get("id"):
user_data["id"] = user["id"]
if user.get("segment"):
user_data["segment"] = user["segment"]
data["user"] = user_data
if span.containing_transaction:
data["transaction"] = span.containing_transaction.name
return "sentry=" + compute_tracestate_value(data)
return None
def reinflate_tracestate(encoded_tracestate):
# type: (str) -> typing.Optional[Mapping[str, str]]
"""
Given a sentry tracestate value in its encoded form, translate it back into
a dictionary of data.
"""
inflated_tracestate = None
if encoded_tracestate:
# Base64-encoded strings always come out with a length which is a
# multiple of 4. In order to achieve this, the end is padded with one or
# more `=` signs. Because the tracestate standard calls for using `=`
# signs between vendor name and value (`sentry=xxx,dogsaregreat=yyy`),
# to avoid confusion we strip the `=` when the data is initially
# encoded. Python's decoding function requires they be put back.
# Fortunately, it doesn't complain if there are too many, so we just
# attach two `=` on spec (there will never be more than 2, see
# https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding).
tracestate_json = from_base64(encoded_tracestate + "==")
try:
assert tracestate_json is not None
inflated_tracestate = json.loads(tracestate_json)
except Exception as err:
logger.warning(
(
"Unable to attach tracestate data to envelope header: {err}"
+ "\nTracestate value is {encoded_tracestate}"
).format(err=err, encoded_tracestate=encoded_tracestate),
)
return inflated_tracestate
def _format_sql(cursor, sql):
# type: (Any, str) -> Optional[str]
real_sql = None
# If we're using psycopg2, it could be that we're
# looking at a query that uses Composed objects. Use psycopg2's mogrify
# function to format the query. We lose per-parameter trimming but gain
# accuracy in formatting.
try:
if hasattr(cursor, "mogrify"):
real_sql = cursor.mogrify(sql)
if isinstance(real_sql, bytes):
real_sql = real_sql.decode(cursor.connection.encoding)
except Exception:
real_sql = None
return real_sql or to_string(sql)
def has_tracestate_enabled(span=None):
# type: (Optional[Span]) -> bool
client = ((span and span.hub) or sentry_sdk.Hub.current).client
options = client and client.options
return bool(options and options["_experiments"].get("propagate_tracestate"))
# Circular imports
if MYPY:
from sentry_sdk.tracing import Span
sentry-python-1.4.3/sentry_sdk/transport.py 0000664 0000000 0000000 00000041346 14125057761 0021175 0 ustar 00root root 0000000 0000000 from __future__ import print_function
import io
import urllib3 # type: ignore
import certifi
import gzip
import time
from datetime import datetime, timedelta
from collections import defaultdict
from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dumps
from sentry_sdk.worker import BackgroundWorker
from sentry_sdk.envelope import Envelope, Item, PayloadRef
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Callable
from typing import Dict
from typing import Iterable
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from typing import DefaultDict
from urllib3.poolmanager import PoolManager # type: ignore
from urllib3.poolmanager import ProxyManager
from sentry_sdk._types import Event, EndpointType
DataCategory = Optional[str]
try:
from urllib.request import getproxies
except ImportError:
from urllib import getproxies # type: ignore
class Transport(object):
"""Baseclass for all transports.
A transport is used to send an event to sentry.
"""
parsed_dsn = None # type: Optional[Dsn]
def __init__(
self, options=None # type: Optional[Dict[str, Any]]
):
# type: (...) -> None
self.options = options
if options and options["dsn"] is not None and options["dsn"]:
self.parsed_dsn = Dsn(options["dsn"])
else:
self.parsed_dsn = None
def capture_event(
self, event # type: Event
):
# type: (...) -> None
"""
This gets invoked with the event dictionary when an event should
be sent to sentry.
"""
raise NotImplementedError()
def capture_envelope(
self, envelope # type: Envelope
):
# type: (...) -> None
"""
Send an envelope to Sentry.
Envelopes are a data container format that can hold any type of data
submitted to Sentry. We use it for transactions and sessions, but
regular "error" events should go through `capture_event` for backwards
compat.
"""
raise NotImplementedError()
def flush(
self,
timeout, # type: float
callback=None, # type: Optional[Any]
):
# type: (...) -> None
"""Wait `timeout` seconds for the current events to be sent out."""
pass
def kill(self):
# type: () -> None
"""Forcefully kills the transport."""
pass
def record_lost_event(
self,
reason, # type: str
data_category=None, # type: Optional[str]
item=None, # type: Optional[Item]
):
# type: (...) -> None
"""This increments a counter for event loss by reason and
data category.
"""
return None
def __del__(self):
# type: () -> None
try:
self.kill()
except Exception:
pass
def _parse_rate_limits(header, now=None):
# type: (Any, Optional[datetime]) -> Iterable[Tuple[DataCategory, datetime]]
if now is None:
now = datetime.utcnow()
for limit in header.split(","):
try:
retry_after, categories, _ = limit.strip().split(":", 2)
retry_after = now + timedelta(seconds=int(retry_after))
for category in categories and categories.split(";") or (None,):
yield category, retry_after
except (LookupError, ValueError):
continue
class HttpTransport(Transport):
"""The default HTTP transport."""
def __init__(
self, options # type: Dict[str, Any]
):
# type: (...) -> None
from sentry_sdk.consts import VERSION
Transport.__init__(self, options)
assert self.parsed_dsn is not None
self.options = options # type: Dict[str, Any]
self._worker = BackgroundWorker(queue_size=options["transport_queue_size"])
self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION)
self._disabled_until = {} # type: Dict[DataCategory, datetime]
self._retry = urllib3.util.Retry()
self._discarded_events = defaultdict(
int
) # type: DefaultDict[Tuple[str, str], int]
self._last_client_report_sent = time.time()
self._pool = self._make_pool(
self.parsed_dsn,
http_proxy=options["http_proxy"],
https_proxy=options["https_proxy"],
ca_certs=options["ca_certs"],
)
from sentry_sdk import Hub
self.hub_cls = Hub
def record_lost_event(
self,
reason, # type: str
data_category=None, # type: Optional[str]
item=None, # type: Optional[Item]
):
# type: (...) -> None
if not self.options["send_client_reports"]:
return
quantity = 1
if item is not None:
data_category = item.data_category
if data_category == "attachment":
# quantity of 0 is actually 1 as we do not want to count
# empty attachments as actually empty.
quantity = len(item.get_bytes()) or 1
elif data_category is None:
raise TypeError("data category not provided")
self._discarded_events[data_category, reason] += quantity
def _update_rate_limits(self, response):
# type: (urllib3.HTTPResponse) -> None
# new sentries with more rate limit insights. We honor this header
# no matter of the status code to update our internal rate limits.
header = response.headers.get("x-sentry-rate-limits")
if header:
logger.warning("Rate-limited via x-sentry-rate-limits")
self._disabled_until.update(_parse_rate_limits(header))
# old sentries only communicate global rate limit hits via the
# retry-after header on 429. This header can also be emitted on new
# sentries if a proxy in front wants to globally slow things down.
elif response.status == 429:
logger.warning("Rate-limited via 429")
self._disabled_until[None] = datetime.utcnow() + timedelta(
seconds=self._retry.get_retry_after(response) or 60
)
def _send_request(
self,
body, # type: bytes
headers, # type: Dict[str, str]
endpoint_type="store", # type: EndpointType
envelope=None, # type: Optional[Envelope]
):
# type: (...) -> None
def record_loss(reason):
# type: (str) -> None
if envelope is None:
self.record_lost_event(reason, data_category="error")
else:
for item in envelope.items:
self.record_lost_event(reason, item=item)
headers.update(
{
"User-Agent": str(self._auth.client),
"X-Sentry-Auth": str(self._auth.to_header()),
}
)
try:
response = self._pool.request(
"POST",
str(self._auth.get_api_url(endpoint_type)),
body=body,
headers=headers,
)
except Exception:
self.on_dropped_event("network")
record_loss("network_error")
raise
try:
self._update_rate_limits(response)
if response.status == 429:
# if we hit a 429. Something was rate limited but we already
# acted on this in `self._update_rate_limits`. Note that we
# do not want to record event loss here as we will have recorded
# an outcome in relay already.
self.on_dropped_event("status_429")
pass
elif response.status >= 300 or response.status < 200:
logger.error(
"Unexpected status code: %s (body: %s)",
response.status,
response.data,
)
self.on_dropped_event("status_{}".format(response.status))
record_loss("network_error")
finally:
response.close()
def on_dropped_event(self, reason):
# type: (str) -> None
return None
def _fetch_pending_client_report(self, force=False, interval=60):
# type: (bool, int) -> Optional[Item]
if not self.options["send_client_reports"]:
return None
if not (force or self._last_client_report_sent < time.time() - interval):
return None
discarded_events = self._discarded_events
self._discarded_events = defaultdict(int)
self._last_client_report_sent = time.time()
if not discarded_events:
return None
return Item(
PayloadRef(
json={
"timestamp": time.time(),
"discarded_events": [
{"reason": reason, "category": category, "quantity": quantity}
for (
(category, reason),
quantity,
) in discarded_events.items()
],
}
),
type="client_report",
)
def _flush_client_reports(self, force=False):
# type: (bool) -> None
client_report = self._fetch_pending_client_report(force=force, interval=60)
if client_report is not None:
self.capture_envelope(Envelope(items=[client_report]))
def _check_disabled(self, category):
# type: (str) -> bool
def _disabled(bucket):
# type: (Any) -> bool
ts = self._disabled_until.get(bucket)
return ts is not None and ts > datetime.utcnow()
return _disabled(category) or _disabled(None)
def _send_event(
self, event # type: Event
):
# type: (...) -> None
if self._check_disabled("error"):
self.on_dropped_event("self_rate_limits")
self.record_lost_event("ratelimit_backoff", data_category="error")
return None
body = io.BytesIO()
with gzip.GzipFile(fileobj=body, mode="w") as f:
f.write(json_dumps(event))
assert self.parsed_dsn is not None
logger.debug(
"Sending event, type:%s level:%s event_id:%s project:%s host:%s"
% (
event.get("type") or "null",
event.get("level") or "null",
event.get("event_id") or "null",
self.parsed_dsn.project_id,
self.parsed_dsn.host,
)
)
self._send_request(
body.getvalue(),
headers={"Content-Type": "application/json", "Content-Encoding": "gzip"},
)
return None
def _send_envelope(
self, envelope # type: Envelope
):
# type: (...) -> None
# remove all items from the envelope which are over quota
new_items = []
for item in envelope.items:
if self._check_disabled(item.data_category):
if item.data_category in ("transaction", "error", "default"):
self.on_dropped_event("self_rate_limits")
self.record_lost_event("ratelimit_backoff", item=item)
else:
new_items.append(item)
# Since we're modifying the envelope here make a copy so that others
# that hold references do not see their envelope modified.
envelope = Envelope(headers=envelope.headers, items=new_items)
if not envelope.items:
return None
# since we're already in the business of sending out an envelope here
# check if we have one pending for the stats session envelopes so we
# can attach it to this enveloped scheduled for sending. This will
# currently typically attach the client report to the most recent
# session update.
client_report_item = self._fetch_pending_client_report(interval=30)
if client_report_item is not None:
envelope.items.append(client_report_item)
body = io.BytesIO()
with gzip.GzipFile(fileobj=body, mode="w") as f:
envelope.serialize_into(f)
assert self.parsed_dsn is not None
logger.debug(
"Sending envelope [%s] project:%s host:%s",
envelope.description,
self.parsed_dsn.project_id,
self.parsed_dsn.host,
)
self._send_request(
body.getvalue(),
headers={
"Content-Type": "application/x-sentry-envelope",
"Content-Encoding": "gzip",
},
endpoint_type="envelope",
envelope=envelope,
)
return None
def _get_pool_options(self, ca_certs):
# type: (Optional[Any]) -> Dict[str, Any]
return {
"num_pools": 2,
"cert_reqs": "CERT_REQUIRED",
"ca_certs": ca_certs or certifi.where(),
}
def _in_no_proxy(self, parsed_dsn):
# type: (Dsn) -> bool
no_proxy = getproxies().get("no")
if not no_proxy:
return False
for host in no_proxy.split(","):
host = host.strip()
if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith(host):
return True
return False
def _make_pool(
self,
parsed_dsn, # type: Dsn
http_proxy, # type: Optional[str]
https_proxy, # type: Optional[str]
ca_certs, # type: Optional[Any]
):
# type: (...) -> Union[PoolManager, ProxyManager]
proxy = None
no_proxy = self._in_no_proxy(parsed_dsn)
# try HTTPS first
if parsed_dsn.scheme == "https" and (https_proxy != ""):
proxy = https_proxy or (not no_proxy and getproxies().get("https"))
# maybe fallback to HTTP proxy
if not proxy and (http_proxy != ""):
proxy = http_proxy or (not no_proxy and getproxies().get("http"))
opts = self._get_pool_options(ca_certs)
if proxy:
return urllib3.ProxyManager(proxy, **opts)
else:
return urllib3.PoolManager(**opts)
def capture_event(
self, event # type: Event
):
# type: (...) -> None
hub = self.hub_cls.current
def send_event_wrapper():
# type: () -> None
with hub:
with capture_internal_exceptions():
self._send_event(event)
self._flush_client_reports()
if not self._worker.submit(send_event_wrapper):
self.on_dropped_event("full_queue")
self.record_lost_event("queue_overflow", data_category="error")
def capture_envelope(
self, envelope # type: Envelope
):
# type: (...) -> None
hub = self.hub_cls.current
def send_envelope_wrapper():
# type: () -> None
with hub:
with capture_internal_exceptions():
self._send_envelope(envelope)
self._flush_client_reports()
if not self._worker.submit(send_envelope_wrapper):
self.on_dropped_event("full_queue")
for item in envelope.items:
self.record_lost_event("queue_overflow", item=item)
def flush(
self,
timeout, # type: float
callback=None, # type: Optional[Any]
):
# type: (...) -> None
logger.debug("Flushing HTTP transport")
if timeout > 0:
self._worker.submit(lambda: self._flush_client_reports(force=True))
self._worker.flush(timeout, callback)
def kill(self):
# type: () -> None
logger.debug("Killing HTTP transport")
self._worker.kill()
class _FunctionTransport(Transport):
def __init__(
self, func # type: Callable[[Event], None]
):
# type: (...) -> None
Transport.__init__(self)
self._func = func
def capture_event(
self, event # type: Event
):
# type: (...) -> None
self._func(event)
return None
def make_transport(options):
# type: (Dict[str, Any]) -> Optional[Transport]
ref_transport = options["transport"]
# If no transport is given, we use the http transport class
if ref_transport is None:
transport_cls = HttpTransport # type: Type[Transport]
elif isinstance(ref_transport, Transport):
return ref_transport
elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport):
transport_cls = ref_transport
elif callable(ref_transport):
return _FunctionTransport(ref_transport) # type: ignore
# if a transport class is given only instantiate it if the dsn is not
# empty or None
if options["dsn"]:
return transport_cls(options)
return None
sentry-python-1.4.3/sentry_sdk/utils.py 0000664 0000000 0000000 00000067671 14125057761 0020312 0 ustar 00root root 0000000 0000000 import base64
import json
import linecache
import logging
import os
import sys
import threading
import subprocess
import re
from datetime import datetime
import sentry_sdk
from sentry_sdk._compat import urlparse, text_type, implements_str, PY2
from sentry_sdk._types import MYPY
if MYPY:
from types import FrameType
from types import TracebackType
from typing import Any
from typing import Callable
from typing import Dict
from typing import ContextManager
from typing import Iterator
from typing import List
from typing import Optional
from typing import Set
from typing import Tuple
from typing import Union
from typing import Type
from sentry_sdk._types import ExcInfo, EndpointType
epoch = datetime(1970, 1, 1)
# The logger is created here but initialized in the debug support module
logger = logging.getLogger("sentry_sdk.errors")
MAX_STRING_LENGTH = 512
MAX_FORMAT_PARAM_LENGTH = 128
BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$")
def json_dumps(data):
# type: (Any) -> bytes
"""Serialize data into a compact JSON representation encoded as UTF-8."""
return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8")
def _get_debug_hub():
# type: () -> Optional[sentry_sdk.Hub]
# This function is replaced by debug.py
pass
def get_default_release():
# type: () -> Optional[str]
"""Try to guess a default release."""
release = os.environ.get("SENTRY_RELEASE")
if release:
return release
with open(os.path.devnull, "w+") as null:
try:
release = (
subprocess.Popen(
["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE,
stderr=null,
stdin=null,
)
.communicate()[0]
.strip()
.decode("utf-8")
)
except (OSError, IOError):
pass
if release:
return release
for var in (
"HEROKU_SLUG_COMMIT",
"SOURCE_VERSION",
"CODEBUILD_RESOLVED_SOURCE_VERSION",
"CIRCLE_SHA1",
"GAE_DEPLOYMENT_ID",
):
release = os.environ.get(var)
if release:
return release
return None
class CaptureInternalException(object):
__slots__ = ()
def __enter__(self):
# type: () -> ContextManager[Any]
return self
def __exit__(self, ty, value, tb):
# type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool
if ty is not None and value is not None:
capture_internal_exception((ty, value, tb))
return True
_CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException()
def capture_internal_exceptions():
# type: () -> ContextManager[Any]
return _CAPTURE_INTERNAL_EXCEPTION
def capture_internal_exception(exc_info):
# type: (ExcInfo) -> None
hub = _get_debug_hub()
if hub is not None:
hub._capture_internal_exception(exc_info)
def to_timestamp(value):
# type: (datetime) -> float
return (value - epoch).total_seconds()
def format_timestamp(value):
# type: (datetime) -> str
return value.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
def event_hint_with_exc_info(exc_info=None):
# type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]]
"""Creates a hint with the exc info filled in."""
if exc_info is None:
exc_info = sys.exc_info()
else:
exc_info = exc_info_from_error(exc_info)
if exc_info[0] is None:
exc_info = None
return {"exc_info": exc_info}
class BadDsn(ValueError):
"""Raised on invalid DSNs."""
@implements_str
class Dsn(object):
"""Represents a DSN."""
def __init__(self, value):
# type: (Union[Dsn, str]) -> None
if isinstance(value, Dsn):
self.__dict__ = dict(value.__dict__)
return
parts = urlparse.urlsplit(text_type(value))
if parts.scheme not in (u"http", u"https"):
raise BadDsn("Unsupported scheme %r" % parts.scheme)
self.scheme = parts.scheme
if parts.hostname is None:
raise BadDsn("Missing hostname")
self.host = parts.hostname
if parts.port is None:
self.port = self.scheme == "https" and 443 or 80
else:
self.port = parts.port
if not parts.username:
raise BadDsn("Missing public key")
self.public_key = parts.username
self.secret_key = parts.password
path = parts.path.rsplit("/", 1)
try:
self.project_id = text_type(int(path.pop()))
except (ValueError, TypeError):
raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:])
self.path = "/".join(path) + "/"
@property
def netloc(self):
# type: () -> str
"""The netloc part of a DSN."""
rv = self.host
if (self.scheme, self.port) not in (("http", 80), ("https", 443)):
rv = "%s:%s" % (rv, self.port)
return rv
def to_auth(self, client=None):
# type: (Optional[Any]) -> Auth
"""Returns the auth info object for this dsn."""
return Auth(
scheme=self.scheme,
host=self.netloc,
path=self.path,
project_id=self.project_id,
public_key=self.public_key,
secret_key=self.secret_key,
client=client,
)
def __str__(self):
# type: () -> str
return "%s://%s%s@%s%s%s" % (
self.scheme,
self.public_key,
self.secret_key and "@" + self.secret_key or "",
self.netloc,
self.path,
self.project_id,
)
class Auth(object):
"""Helper object that represents the auth info."""
def __init__(
self,
scheme,
host,
project_id,
public_key,
secret_key=None,
version=7,
client=None,
path="/",
):
# type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None
self.scheme = scheme
self.host = host
self.path = path
self.project_id = project_id
self.public_key = public_key
self.secret_key = secret_key
self.version = version
self.client = client
@property
def store_api_url(self):
# type: () -> str
"""Returns the API url for storing events.
Deprecated: use get_api_url instead.
"""
return self.get_api_url(type="store")
def get_api_url(
self, type="store" # type: EndpointType
):
# type: (...) -> str
"""Returns the API url for storing events."""
return "%s://%s%sapi/%s/%s/" % (
self.scheme,
self.host,
self.path,
self.project_id,
type,
)
def to_header(self, timestamp=None):
# type: (Optional[datetime]) -> str
"""Returns the auth header a string."""
rv = [("sentry_key", self.public_key), ("sentry_version", self.version)]
if timestamp is not None:
rv.append(("sentry_timestamp", str(to_timestamp(timestamp))))
if self.client is not None:
rv.append(("sentry_client", self.client))
if self.secret_key is not None:
rv.append(("sentry_secret", self.secret_key))
return u"Sentry " + u", ".join("%s=%s" % (key, value) for key, value in rv)
class AnnotatedValue(object):
__slots__ = ("value", "metadata")
def __init__(self, value, metadata):
# type: (Optional[Any], Dict[str, Any]) -> None
self.value = value
self.metadata = metadata
if MYPY:
from typing import TypeVar
T = TypeVar("T")
Annotated = Union[AnnotatedValue, T]
def get_type_name(cls):
# type: (Optional[type]) -> Optional[str]
return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None)
def get_type_module(cls):
# type: (Optional[type]) -> Optional[str]
mod = getattr(cls, "__module__", None)
if mod not in (None, "builtins", "__builtins__"):
return mod
return None
def should_hide_frame(frame):
# type: (FrameType) -> bool
try:
mod = frame.f_globals["__name__"]
if mod.startswith("sentry_sdk."):
return True
except (AttributeError, KeyError):
pass
for flag_name in "__traceback_hide__", "__tracebackhide__":
try:
if frame.f_locals[flag_name]:
return True
except Exception:
pass
return False
def iter_stacks(tb):
# type: (Optional[TracebackType]) -> Iterator[TracebackType]
tb_ = tb # type: Optional[TracebackType]
while tb_ is not None:
if not should_hide_frame(tb_.tb_frame):
yield tb_
tb_ = tb_.tb_next
def get_lines_from_file(
filename, # type: str
lineno, # type: int
loader=None, # type: Optional[Any]
module=None, # type: Optional[str]
):
# type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
context_lines = 5
source = None
if loader is not None and hasattr(loader, "get_source"):
try:
source_str = loader.get_source(module) # type: Optional[str]
except (ImportError, IOError):
source_str = None
if source_str is not None:
source = source_str.splitlines()
if source is None:
try:
source = linecache.getlines(filename)
except (OSError, IOError):
return [], None, []
if not source:
return [], None, []
lower_bound = max(0, lineno - context_lines)
upper_bound = min(lineno + 1 + context_lines, len(source))
try:
pre_context = [
strip_string(line.strip("\r\n")) for line in source[lower_bound:lineno]
]
context_line = strip_string(source[lineno].strip("\r\n"))
post_context = [
strip_string(line.strip("\r\n"))
for line in source[(lineno + 1) : upper_bound]
]
return pre_context, context_line, post_context
except IndexError:
# the file may have changed since it was loaded into memory
return [], None, []
def get_source_context(
frame, # type: FrameType
tb_lineno, # type: int
):
# type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
try:
abs_path = frame.f_code.co_filename # type: Optional[str]
except Exception:
abs_path = None
try:
module = frame.f_globals["__name__"]
except Exception:
return [], None, []
try:
loader = frame.f_globals["__loader__"]
except Exception:
loader = None
lineno = tb_lineno - 1
if lineno is not None and abs_path:
return get_lines_from_file(abs_path, lineno, loader, module)
return [], None, []
def safe_str(value):
# type: (Any) -> str
try:
return text_type(value)
except Exception:
return safe_repr(value)
if PY2:
def safe_repr(value):
# type: (Any) -> str
try:
rv = repr(value).decode("utf-8", "replace")
# At this point `rv` contains a bunch of literal escape codes, like
# this (exaggerated example):
#
# u"\\x2f"
#
# But we want to show this string as:
#
# u"/"
try:
# unicode-escape does this job, but can only decode latin1. So we
# attempt to encode in latin1.
return rv.encode("latin1").decode("unicode-escape")
except Exception:
# Since usually strings aren't latin1 this can break. In those
# cases we just give up.
return rv
except Exception:
# If e.g. the call to `repr` already fails
return u""
else:
def safe_repr(value):
# type: (Any) -> str
try:
return repr(value)
except Exception:
return ""
def filename_for_module(module, abs_path):
# type: (Optional[str], Optional[str]) -> Optional[str]
if not abs_path or not module:
return abs_path
try:
if abs_path.endswith(".pyc"):
abs_path = abs_path[:-1]
base_module = module.split(".", 1)[0]
if base_module == module:
return os.path.basename(abs_path)
base_module_path = sys.modules[base_module].__file__
return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip(
os.sep
)
except Exception:
return abs_path
def serialize_frame(frame, tb_lineno=None, with_locals=True):
# type: (FrameType, Optional[int], bool) -> Dict[str, Any]
f_code = getattr(frame, "f_code", None)
if not f_code:
abs_path = None
function = None
else:
abs_path = frame.f_code.co_filename
function = frame.f_code.co_name
try:
module = frame.f_globals["__name__"]
except Exception:
module = None
if tb_lineno is None:
tb_lineno = frame.f_lineno
pre_context, context_line, post_context = get_source_context(frame, tb_lineno)
rv = {
"filename": filename_for_module(module, abs_path) or None,
"abs_path": os.path.abspath(abs_path) if abs_path else None,
"function": function or "",
"module": module,
"lineno": tb_lineno,
"pre_context": pre_context,
"context_line": context_line,
"post_context": post_context,
} # type: Dict[str, Any]
if with_locals:
rv["vars"] = frame.f_locals
return rv
def current_stacktrace(with_locals=True):
# type: (bool) -> Any
__tracebackhide__ = True
frames = []
f = sys._getframe() # type: Optional[FrameType]
while f is not None:
if not should_hide_frame(f):
frames.append(serialize_frame(f, with_locals=with_locals))
f = f.f_back
frames.reverse()
return {"frames": frames}
def get_errno(exc_value):
# type: (BaseException) -> Optional[Any]
return getattr(exc_value, "errno", None)
def single_exception_from_error_tuple(
exc_type, # type: Optional[type]
exc_value, # type: Optional[BaseException]
tb, # type: Optional[TracebackType]
client_options=None, # type: Optional[Dict[str, Any]]
mechanism=None, # type: Optional[Dict[str, Any]]
):
# type: (...) -> Dict[str, Any]
if exc_value is not None:
errno = get_errno(exc_value)
else:
errno = None
if errno is not None:
mechanism = mechanism or {"type": "generic"}
mechanism.setdefault("meta", {}).setdefault("errno", {}).setdefault(
"number", errno
)
if client_options is None:
with_locals = True
else:
with_locals = client_options["with_locals"]
frames = [
serialize_frame(tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals)
for tb in iter_stacks(tb)
]
rv = {
"module": get_type_module(exc_type),
"type": get_type_name(exc_type),
"value": safe_str(exc_value),
"mechanism": mechanism,
}
if frames:
rv["stacktrace"] = {"frames": frames}
return rv
HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__")
if HAS_CHAINED_EXCEPTIONS:
def walk_exception_chain(exc_info):
# type: (ExcInfo) -> Iterator[ExcInfo]
exc_type, exc_value, tb = exc_info
seen_exceptions = []
seen_exception_ids = set() # type: Set[int]
while (
exc_type is not None
and exc_value is not None
and id(exc_value) not in seen_exception_ids
):
yield exc_type, exc_value, tb
# Avoid hashing random types we don't know anything
# about. Use the list to keep a ref so that the `id` is
# not used for another object.
seen_exceptions.append(exc_value)
seen_exception_ids.add(id(exc_value))
if exc_value.__suppress_context__:
cause = exc_value.__cause__
else:
cause = exc_value.__context__
if cause is None:
break
exc_type = type(cause)
exc_value = cause
tb = getattr(cause, "__traceback__", None)
else:
def walk_exception_chain(exc_info):
# type: (ExcInfo) -> Iterator[ExcInfo]
yield exc_info
def exceptions_from_error_tuple(
exc_info, # type: ExcInfo
client_options=None, # type: Optional[Dict[str, Any]]
mechanism=None, # type: Optional[Dict[str, Any]]
):
# type: (...) -> List[Dict[str, Any]]
exc_type, exc_value, tb = exc_info
rv = []
for exc_type, exc_value, tb in walk_exception_chain(exc_info):
rv.append(
single_exception_from_error_tuple(
exc_type, exc_value, tb, client_options, mechanism
)
)
rv.reverse()
return rv
def to_string(value):
# type: (str) -> str
try:
return text_type(value)
except UnicodeDecodeError:
return repr(value)[1:-1]
def iter_event_stacktraces(event):
# type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
if "stacktrace" in event:
yield event["stacktrace"]
if "threads" in event:
for thread in event["threads"].get("values") or ():
if "stacktrace" in thread:
yield thread["stacktrace"]
if "exception" in event:
for exception in event["exception"].get("values") or ():
if "stacktrace" in exception:
yield exception["stacktrace"]
def iter_event_frames(event):
# type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
for stacktrace in iter_event_stacktraces(event):
for frame in stacktrace.get("frames") or ():
yield frame
def handle_in_app(event, in_app_exclude=None, in_app_include=None):
# type: (Dict[str, Any], Optional[List[str]], Optional[List[str]]) -> Dict[str, Any]
for stacktrace in iter_event_stacktraces(event):
handle_in_app_impl(
stacktrace.get("frames"),
in_app_exclude=in_app_exclude,
in_app_include=in_app_include,
)
return event
def handle_in_app_impl(frames, in_app_exclude, in_app_include):
# type: (Any, Optional[List[str]], Optional[List[str]]) -> Optional[Any]
if not frames:
return None
any_in_app = False
for frame in frames:
in_app = frame.get("in_app")
if in_app is not None:
if in_app:
any_in_app = True
continue
module = frame.get("module")
if not module:
continue
elif _module_in_set(module, in_app_include):
frame["in_app"] = True
any_in_app = True
elif _module_in_set(module, in_app_exclude):
frame["in_app"] = False
if not any_in_app:
for frame in frames:
if frame.get("in_app") is None:
frame["in_app"] = True
return frames
def exc_info_from_error(error):
# type: (Union[BaseException, ExcInfo]) -> ExcInfo
if isinstance(error, tuple) and len(error) == 3:
exc_type, exc_value, tb = error
elif isinstance(error, BaseException):
tb = getattr(error, "__traceback__", None)
if tb is not None:
exc_type = type(error)
exc_value = error
else:
exc_type, exc_value, tb = sys.exc_info()
if exc_value is not error:
tb = None
exc_value = error
exc_type = type(error)
else:
raise ValueError("Expected Exception object to report, got %s!" % type(error))
return exc_type, exc_value, tb
def event_from_exception(
exc_info, # type: Union[BaseException, ExcInfo]
client_options=None, # type: Optional[Dict[str, Any]]
mechanism=None, # type: Optional[Dict[str, Any]]
):
# type: (...) -> Tuple[Dict[str, Any], Dict[str, Any]]
exc_info = exc_info_from_error(exc_info)
hint = event_hint_with_exc_info(exc_info)
return (
{
"level": "error",
"exception": {
"values": exceptions_from_error_tuple(
exc_info, client_options, mechanism
)
},
},
hint,
)
def _module_in_set(name, set):
# type: (str, Optional[List[str]]) -> bool
if not set:
return False
for item in set or ():
if item == name or name.startswith(item + "."):
return True
return False
def strip_string(value, max_length=None):
# type: (str, Optional[int]) -> Union[AnnotatedValue, str]
# TODO: read max_length from config
if not value:
return value
if max_length is None:
# This is intentionally not just the default such that one can patch `MAX_STRING_LENGTH` and affect `strip_string`.
max_length = MAX_STRING_LENGTH
length = len(value)
if length > max_length:
return AnnotatedValue(
value=value[: max_length - 3] + u"...",
metadata={
"len": length,
"rem": [["!limit", "x", max_length - 3, max_length]],
},
)
return value
def _is_contextvars_broken():
# type: () -> bool
"""
Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars.
"""
try:
import gevent # type: ignore
from gevent.monkey import is_object_patched # type: ignore
# Get the MAJOR and MINOR version numbers of Gevent
version_tuple = tuple([int(part) for part in gevent.__version__.split(".")[:2]])
if is_object_patched("threading", "local"):
# Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching
# context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine.
# Ref: https://github.com/gevent/gevent/blob/83c9e2ae5b0834b8f84233760aabe82c3ba065b4/src/gevent/monkey.py#L604-L609
# Gevent 20.5, that doesn't depend on Greenlet 0.4.17 with native support
# for contextvars, is able to patch both thread locals and contextvars, in
# that case, check if contextvars are effectively patched.
if (
# Gevent 20.9.0+
(sys.version_info >= (3, 7) and version_tuple >= (20, 9))
# Gevent 20.5.0+ or Python < 3.7
or (is_object_patched("contextvars", "ContextVar"))
):
return False
return True
except ImportError:
pass
try:
from eventlet.patcher import is_monkey_patched # type: ignore
if is_monkey_patched("thread"):
return True
except ImportError:
pass
return False
def _make_threadlocal_contextvars(local):
# type: (type) -> type
class ContextVar(object):
# Super-limited impl of ContextVar
def __init__(self, name):
# type: (str) -> None
self._name = name
self._local = local()
def get(self, default):
# type: (Any) -> Any
return getattr(self._local, "value", default)
def set(self, value):
# type: (Any) -> None
self._local.value = value
return ContextVar
def _get_contextvars():
# type: () -> Tuple[bool, type]
"""
Figure out the "right" contextvars installation to use. Returns a
`contextvars.ContextVar`-like class with a limited API.
See https://docs.sentry.io/platforms/python/contextvars/ for more information.
"""
if not _is_contextvars_broken():
# aiocontextvars is a PyPI package that ensures that the contextvars
# backport (also a PyPI package) works with asyncio under Python 3.6
#
# Import it if available.
if sys.version_info < (3, 7):
# `aiocontextvars` is absolutely required for functional
# contextvars on Python 3.6.
try:
from aiocontextvars import ContextVar # noqa
return True, ContextVar
except ImportError:
pass
else:
# On Python 3.7 contextvars are functional.
try:
from contextvars import ContextVar
return True, ContextVar
except ImportError:
pass
# Fall back to basic thread-local usage.
from threading import local
return False, _make_threadlocal_contextvars(local)
HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
CONTEXTVARS_ERROR_MESSAGE = """
With asyncio/ASGI applications, the Sentry SDK requires a functional
installation of `contextvars` to avoid leaking scope/context data across
requests.
Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more information.
"""
def transaction_from_function(func):
# type: (Callable[..., Any]) -> Optional[str]
# Methods in Python 2
try:
return "%s.%s.%s" % (
func.im_class.__module__, # type: ignore
func.im_class.__name__, # type: ignore
func.__name__,
)
except Exception:
pass
func_qualname = (
getattr(func, "__qualname__", None) or getattr(func, "__name__", None) or None
) # type: Optional[str]
if not func_qualname:
# No idea what it is
return None
# Methods in Python 3
# Functions
# Classes
try:
return "%s.%s" % (func.__module__, func_qualname)
except Exception:
pass
# Possibly a lambda
return func_qualname
disable_capture_event = ContextVar("disable_capture_event")
class ServerlessTimeoutWarning(Exception):
"""Raised when a serverless method is about to reach its timeout."""
pass
class TimeoutThread(threading.Thread):
"""Creates a Thread which runs (sleeps) for a time duration equal to
waiting_time and raises a custom ServerlessTimeout exception.
"""
def __init__(self, waiting_time, configured_timeout):
# type: (float, int) -> None
threading.Thread.__init__(self)
self.waiting_time = waiting_time
self.configured_timeout = configured_timeout
self._stop_event = threading.Event()
def stop(self):
# type: () -> None
self._stop_event.set()
def run(self):
# type: () -> None
self._stop_event.wait(self.waiting_time)
if self._stop_event.is_set():
return
integer_configured_timeout = int(self.configured_timeout)
# Setting up the exact integer value of configured time(in seconds)
if integer_configured_timeout < self.configured_timeout:
integer_configured_timeout = integer_configured_timeout + 1
# Raising Exception after timeout duration is reached
raise ServerlessTimeoutWarning(
"WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
integer_configured_timeout
)
)
def to_base64(original):
# type: (str) -> Optional[str]
"""
Convert a string to base64, via UTF-8. Returns None on invalid input.
"""
base64_string = None
try:
utf8_bytes = original.encode("UTF-8")
base64_bytes = base64.b64encode(utf8_bytes)
base64_string = base64_bytes.decode("UTF-8")
except Exception as err:
logger.warning("Unable to encode {orig} to base64:".format(orig=original), err)
return base64_string
def from_base64(base64_string):
# type: (str) -> Optional[str]
"""
Convert a string from base64, via UTF-8. Returns None on invalid input.
"""
utf8_string = None
try:
only_valid_chars = BASE64_ALPHABET.match(base64_string)
assert only_valid_chars
base64_bytes = base64_string.encode("UTF-8")
utf8_bytes = base64.b64decode(base64_bytes)
utf8_string = utf8_bytes.decode("UTF-8")
except Exception as err:
logger.warning(
"Unable to decode {b64} from base64:".format(b64=base64_string), err
)
return utf8_string
sentry-python-1.4.3/sentry_sdk/worker.py 0000664 0000000 0000000 00000010053 14125057761 0020441 0 ustar 00root root 0000000 0000000 import os
import threading
from time import sleep, time
from sentry_sdk._compat import check_thread_support
from sentry_sdk._queue import Queue, Full
from sentry_sdk.utils import logger
from sentry_sdk.consts import DEFAULT_QUEUE_SIZE
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Optional
from typing import Callable
_TERMINATOR = object()
class BackgroundWorker(object):
def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
# type: (int) -> None
check_thread_support()
self._queue = Queue(queue_size) # type: Queue
self._lock = threading.Lock()
self._thread = None # type: Optional[threading.Thread]
self._thread_for_pid = None # type: Optional[int]
@property
def is_alive(self):
# type: () -> bool
if self._thread_for_pid != os.getpid():
return False
if not self._thread:
return False
return self._thread.is_alive()
def _ensure_thread(self):
# type: () -> None
if not self.is_alive:
self.start()
def _timed_queue_join(self, timeout):
# type: (float) -> bool
deadline = time() + timeout
queue = self._queue
queue.all_tasks_done.acquire()
try:
while queue.unfinished_tasks:
delay = deadline - time()
if delay <= 0:
return False
queue.all_tasks_done.wait(timeout=delay)
return True
finally:
queue.all_tasks_done.release()
def start(self):
# type: () -> None
with self._lock:
if not self.is_alive:
self._thread = threading.Thread(
target=self._target, name="raven-sentry.BackgroundWorker"
)
self._thread.daemon = True
self._thread.start()
self._thread_for_pid = os.getpid()
def kill(self):
# type: () -> None
"""
Kill worker thread. Returns immediately. Not useful for
waiting on shutdown for events, use `flush` for that.
"""
logger.debug("background worker got kill request")
with self._lock:
if self._thread:
try:
self._queue.put_nowait(_TERMINATOR)
except Full:
logger.debug("background worker queue full, kill failed")
self._thread = None
self._thread_for_pid = None
def flush(self, timeout, callback=None):
# type: (float, Optional[Any]) -> None
logger.debug("background worker got flush request")
with self._lock:
if self.is_alive and timeout > 0.0:
self._wait_flush(timeout, callback)
logger.debug("background worker flushed")
def _wait_flush(self, timeout, callback):
# type: (float, Optional[Any]) -> None
initial_timeout = min(0.1, timeout)
if not self._timed_queue_join(initial_timeout):
pending = self._queue.qsize() + 1
logger.debug("%d event(s) pending on flush", pending)
if callback is not None:
callback(pending, timeout)
if not self._timed_queue_join(timeout - initial_timeout):
pending = self._queue.qsize() + 1
logger.error("flush timed out, dropped %s events", pending)
def submit(self, callback):
# type: (Callable[[], None]) -> bool
self._ensure_thread()
try:
self._queue.put_nowait(callback)
return True
except Full:
return False
def _target(self):
# type: () -> None
while True:
callback = self._queue.get()
try:
if callback is _TERMINATOR:
break
try:
callback()
except Exception:
logger.error("Failed processing job", exc_info=True)
finally:
self._queue.task_done()
sleep(0)
sentry-python-1.4.3/setup.py 0000664 0000000 0000000 00000005166 14125057761 0016114 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
"""
Sentry-Python - Sentry SDK for Python
=====================================
**Sentry-Python is an SDK for Sentry.** Check out `GitHub
`_ to find out more.
"""
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
def get_file_text(file_name):
with open(os.path.join(here, file_name)) as in_file:
return in_file.read()
setup(
name="sentry-sdk",
version="1.4.3",
author="Sentry Team and Contributors",
author_email="hello@sentry.io",
url="https://github.com/getsentry/sentry-python",
project_urls={
"Documentation": "https://docs.sentry.io/platforms/python/",
"Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md",
},
description="Python client for Sentry (https://sentry.io)",
long_description=get_file_text("README.md"),
long_description_content_type="text/markdown",
packages=find_packages(exclude=("tests", "tests.*")),
# PEP 561
package_data={"sentry_sdk": ["py.typed"]},
zip_safe=False,
license="BSD",
install_requires=["urllib3>=1.10.0", "certifi"],
extras_require={
"flask": ["flask>=0.11", "blinker>=1.1"],
"bottle": ["bottle>=0.12.13"],
"falcon": ["falcon>=1.4"],
"django": ["django>=1.8"],
"sanic": ["sanic>=0.8"],
"celery": ["celery>=3"],
"beam": ["apache-beam>=2.12"],
"rq": ["rq>=0.6"],
"aiohttp": ["aiohttp>=3.5"],
"tornado": ["tornado>=5"],
"sqlalchemy": ["sqlalchemy>=1.2"],
"pyspark": ["pyspark>=2.4.4"],
"pure_eval": ["pure_eval", "executing", "asttokens"],
"chalice": ["chalice>=1.16.0"],
"httpx": ["httpx>=0.16.0"],
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Software Development :: Libraries :: Python Modules",
],
options={"bdist_wheel": {"universal": "1"}},
)
sentry-python-1.4.3/test-requirements.txt 0000664 0000000 0000000 00000000435 14125057761 0020635 0 ustar 00root root 0000000 0000000 pytest
pytest-forked==1.1.3
tox==3.7.0
Werkzeug
pytest-localserver==0.5.0
pytest-cov==2.8.1
jsonschema==3.2.0
pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
mock # for testing under python < 3.3
gevent
executing
asttokens
sentry-python-1.4.3/tests/ 0000775 0000000 0000000 00000000000 14125057761 0015534 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/__init__.py 0000664 0000000 0000000 00000000636 14125057761 0017652 0 ustar 00root root 0000000 0000000 import sys
import pytest
# This is used in _capture_internal_warnings. We need to run this at import
# time because that's where many deprecation warnings might get thrown.
#
# This lives in tests/__init__.py because apparently even tests/conftest.py
# gets loaded too late.
assert "sentry_sdk" not in sys.modules
_warning_recorder_mgr = pytest.warns(None)
_warning_recorder = _warning_recorder_mgr.__enter__()
sentry-python-1.4.3/tests/conftest.py 0000664 0000000 0000000 00000037164 14125057761 0017746 0 ustar 00root root 0000000 0000000 import os
import json
import pytest
import jsonschema
try:
import gevent
except ImportError:
gevent = None
try:
import eventlet
except ImportError:
eventlet = None
import sentry_sdk
from sentry_sdk._compat import reraise, string_types, iteritems
from sentry_sdk.transport import Transport
from sentry_sdk.envelope import Envelope
from sentry_sdk.utils import capture_internal_exceptions
from tests import _warning_recorder, _warning_recorder_mgr
SENTRY_EVENT_SCHEMA = "./checkouts/data-schemas/relay/event.schema.json"
if not os.path.isfile(SENTRY_EVENT_SCHEMA):
SENTRY_EVENT_SCHEMA = None
else:
with open(SENTRY_EVENT_SCHEMA) as f:
SENTRY_EVENT_SCHEMA = json.load(f)
try:
import pytest_benchmark
except ImportError:
@pytest.fixture
def benchmark():
return lambda x: x()
else:
del pytest_benchmark
@pytest.fixture(autouse=True)
def internal_exceptions(request, monkeypatch):
errors = []
if "tests_internal_exceptions" in request.keywords:
return
def _capture_internal_exception(self, exc_info):
errors.append(exc_info)
@request.addfinalizer
def _():
# rerasise the errors so that this just acts as a pass-through (that
# happens to keep track of the errors which pass through it)
for e in errors:
reraise(*e)
monkeypatch.setattr(
sentry_sdk.Hub, "_capture_internal_exception", _capture_internal_exception
)
return errors
@pytest.fixture(autouse=True, scope="session")
def _capture_internal_warnings():
yield
_warning_recorder_mgr.__exit__(None, None, None)
recorder = _warning_recorder
for warning in recorder:
try:
if isinstance(warning.message, ResourceWarning):
continue
except NameError:
pass
if "sentry_sdk" not in str(warning.filename) and "sentry-sdk" not in str(
warning.filename
):
continue
# pytest-django
if "getfuncargvalue" in str(warning.message):
continue
# Happens when re-initializing the SDK
if "but it was only enabled on init()" in str(warning.message):
continue
# sanic's usage of aiohttp for test client
if "verify_ssl is deprecated, use ssl=False instead" in str(warning.message):
continue
if "getargspec" in str(warning.message) and warning.filename.endswith(
("pyramid/config/util.py", "pyramid/config/views.py")
):
continue
if "isAlive() is deprecated" in str(
warning.message
) and warning.filename.endswith("celery/utils/timer2.py"):
continue
if "collections.abc" in str(warning.message) and warning.filename.endswith(
("celery/canvas.py", "werkzeug/datastructures.py", "tornado/httputil.py")
):
continue
# Django 1.7 emits a (seemingly) false-positive warning for our test
# app and suggests to use a middleware that does not exist in later
# Django versions.
if "SessionAuthenticationMiddleware" in str(warning.message):
continue
if "Something has already installed a non-asyncio" in str(warning.message):
continue
if "dns.hash" in str(warning.message) or "dns/namedict" in warning.filename:
continue
raise AssertionError(warning)
@pytest.fixture
def monkeypatch_test_transport(monkeypatch, validate_event_schema):
def check_event(event):
def check_string_keys(map):
for key, value in iteritems(map):
assert isinstance(key, string_types)
if isinstance(value, dict):
check_string_keys(value)
with capture_internal_exceptions():
check_string_keys(event)
validate_event_schema(event)
def check_envelope(envelope):
with capture_internal_exceptions():
# Assert error events are sent without envelope to server, for compat.
# This does not apply if any item in the envelope is an attachment.
if not any(x.type == "attachment" for x in envelope.items):
assert not any(item.data_category == "error" for item in envelope.items)
assert not any(item.get_event() is not None for item in envelope.items)
def inner(client):
monkeypatch.setattr(
client, "transport", TestTransport(check_event, check_envelope)
)
return inner
@pytest.fixture
def validate_event_schema(tmpdir):
def inner(event):
if SENTRY_EVENT_SCHEMA:
jsonschema.validate(instance=event, schema=SENTRY_EVENT_SCHEMA)
return inner
@pytest.fixture
def sentry_init(monkeypatch_test_transport, request):
def inner(*a, **kw):
hub = sentry_sdk.Hub.current
client = sentry_sdk.Client(*a, **kw)
hub.bind_client(client)
if "transport" not in kw:
monkeypatch_test_transport(sentry_sdk.Hub.current.client)
if request.node.get_closest_marker("forked"):
# Do not run isolation if the test is already running in
# ultimate isolation (seems to be required for celery tests that
# fork)
yield inner
else:
with sentry_sdk.Hub(None):
yield inner
class TestTransport(Transport):
def __init__(self, capture_event_callback, capture_envelope_callback):
Transport.__init__(self)
self.capture_event = capture_event_callback
self.capture_envelope = capture_envelope_callback
self._queue = None
@pytest.fixture
def capture_events(monkeypatch):
def inner():
events = []
test_client = sentry_sdk.Hub.current.client
old_capture_event = test_client.transport.capture_event
old_capture_envelope = test_client.transport.capture_envelope
def append_event(event):
events.append(event)
return old_capture_event(event)
def append_envelope(envelope):
for item in envelope:
if item.headers.get("type") in ("event", "transaction"):
test_client.transport.capture_event(item.payload.json)
return old_capture_envelope(envelope)
monkeypatch.setattr(test_client.transport, "capture_event", append_event)
monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
return events
return inner
@pytest.fixture
def capture_envelopes(monkeypatch):
def inner():
envelopes = []
test_client = sentry_sdk.Hub.current.client
old_capture_event = test_client.transport.capture_event
old_capture_envelope = test_client.transport.capture_envelope
def append_event(event):
envelope = Envelope()
envelope.add_event(event)
envelopes.append(envelope)
return old_capture_event(event)
def append_envelope(envelope):
envelopes.append(envelope)
return old_capture_envelope(envelope)
monkeypatch.setattr(test_client.transport, "capture_event", append_event)
monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
return envelopes
return inner
@pytest.fixture
def capture_events_forksafe(monkeypatch, capture_events, request):
def inner():
capture_events()
events_r, events_w = os.pipe()
events_r = os.fdopen(events_r, "rb", 0)
events_w = os.fdopen(events_w, "wb", 0)
test_client = sentry_sdk.Hub.current.client
old_capture_event = test_client.transport.capture_event
def append(event):
events_w.write(json.dumps(event).encode("utf-8"))
events_w.write(b"\n")
return old_capture_event(event)
def flush(timeout=None, callback=None):
events_w.write(b"flush\n")
monkeypatch.setattr(test_client.transport, "capture_event", append)
monkeypatch.setattr(test_client, "flush", flush)
return EventStreamReader(events_r)
return inner
class EventStreamReader(object):
def __init__(self, file):
self.file = file
def read_event(self):
return json.loads(self.file.readline().decode("utf-8"))
def read_flush(self):
assert self.file.readline() == b"flush\n"
# scope=session ensures that fixture is run earlier
@pytest.fixture(
scope="session",
params=[None, "eventlet", "gevent"],
ids=("threads", "eventlet", "greenlet"),
)
def maybe_monkeypatched_threading(request):
if request.param == "eventlet":
if eventlet is None:
pytest.skip("no eventlet installed")
try:
eventlet.monkey_patch()
except AttributeError as e:
if "'thread.RLock' object has no attribute" in str(e):
# https://bitbucket.org/pypy/pypy/issues/2962/gevent-cannot-patch-rlock-under-pypy-27-7
pytest.skip("https://github.com/eventlet/eventlet/issues/546")
else:
raise
elif request.param == "gevent":
if gevent is None:
pytest.skip("no gevent installed")
try:
gevent.monkey.patch_all()
except Exception as e:
if "_RLock__owner" in str(e):
pytest.skip("https://github.com/gevent/gevent/issues/1380")
else:
raise
else:
assert request.param is None
return request.param
@pytest.fixture
def render_span_tree():
def inner(event):
assert event["type"] == "transaction"
by_parent = {}
for span in event["spans"]:
by_parent.setdefault(span["parent_span_id"], []).append(span)
def render_span(span):
yield "- op={}: description={}".format(
json.dumps(span.get("op")), json.dumps(span.get("description"))
)
for subspan in by_parent.get(span["span_id"]) or ():
for line in render_span(subspan):
yield " {}".format(line)
root_span = event["contexts"]["trace"]
# Return a list instead of a multiline string because black will know better how to format that
return "\n".join(render_span(root_span))
return inner
@pytest.fixture(name="StringContaining")
def string_containing_matcher():
"""
An object which matches any string containing the substring passed to the
object at instantiation time.
Useful for assert_called_with, assert_any_call, etc.
Used like this:
>>> f = mock.Mock()
>>> f("dogs are great")
>>> f.assert_any_call("dogs") # will raise AssertionError
Traceback (most recent call last):
...
AssertionError: mock('dogs') call not found
>>> f.assert_any_call(StringContaining("dogs")) # no AssertionError
"""
class StringContaining(object):
def __init__(self, substring):
self.substring = substring
try:
# the `unicode` type only exists in python 2, so if this blows up,
# we must be in py3 and have the `bytes` type
self.valid_types = (str, unicode) # noqa
except NameError:
self.valid_types = (str, bytes)
def __eq__(self, test_string):
if not isinstance(test_string, self.valid_types):
return False
# this is safe even in py2 because as of 2.6, `bytes` exists in py2
# as an alias for `str`
if isinstance(test_string, bytes):
test_string = test_string.decode()
if len(self.substring) > len(test_string):
return False
return self.substring in test_string
def __ne__(self, test_string):
return not self.__eq__(test_string)
return StringContaining
def _safe_is_equal(x, y):
"""
Compares two values, preferring to use the first's __eq__ method if it
exists and is implemented.
Accounts for py2/py3 differences (like ints in py2 not having a __eq__
method), as well as the incomparability of certain types exposed by using
raw __eq__ () rather than ==.
"""
# Prefer using __eq__ directly to ensure that examples like
#
# maisey = Dog()
# maisey.name = "Maisey the Dog"
# maisey == ObjectDescribedBy(attrs={"name": StringContaining("Maisey")})
#
# evaluate to True (in other words, examples where the values in self.attrs
# might also have custom __eq__ methods; this makes sure those methods get
# used if possible)
try:
is_equal = x.__eq__(y)
except AttributeError:
is_equal = NotImplemented
# this can happen on its own, too (i.e. without an AttributeError being
# thrown), which is why this is separate from the except block above
if is_equal == NotImplemented:
# using == smoothes out weird variations exposed by raw __eq__
return x == y
return is_equal
@pytest.fixture(name="DictionaryContaining")
def dictionary_containing_matcher():
"""
An object which matches any dictionary containing all key-value pairs from
the dictionary passed to the object at instantiation time.
Useful for assert_called_with, assert_any_call, etc.
Used like this:
>>> f = mock.Mock()
>>> f({"dogs": "yes", "cats": "maybe"})
>>> f.assert_any_call({"dogs": "yes"}) # will raise AssertionError
Traceback (most recent call last):
...
AssertionError: mock({'dogs': 'yes'}) call not found
>>> f.assert_any_call(DictionaryContaining({"dogs": "yes"})) # no AssertionError
"""
class DictionaryContaining(object):
def __init__(self, subdict):
self.subdict = subdict
def __eq__(self, test_dict):
if not isinstance(test_dict, dict):
return False
if len(self.subdict) > len(test_dict):
return False
for key, value in self.subdict.items():
try:
test_value = test_dict[key]
except KeyError: # missing key
return False
if not _safe_is_equal(value, test_value):
return False
return True
def __ne__(self, test_dict):
return not self.__eq__(test_dict)
return DictionaryContaining
@pytest.fixture(name="ObjectDescribedBy")
def object_described_by_matcher():
"""
An object which matches any other object with the given properties.
Available properties currently are "type" (a type object) and "attrs" (a
dictionary).
Useful for assert_called_with, assert_any_call, etc.
Used like this:
>>> class Dog(object):
... pass
...
>>> maisey = Dog()
>>> maisey.name = "Maisey"
>>> maisey.age = 7
>>> f = mock.Mock()
>>> f(maisey)
>>> f.assert_any_call(ObjectDescribedBy(type=Dog)) # no AssertionError
>>> f.assert_any_call(ObjectDescribedBy(attrs={"name": "Maisey"})) # no AssertionError
"""
class ObjectDescribedBy(object):
def __init__(self, type=None, attrs=None):
self.type = type
self.attrs = attrs
def __eq__(self, test_obj):
if self.type:
if not isinstance(test_obj, self.type):
return False
if self.attrs:
for attr_name, attr_value in self.attrs.items():
try:
test_value = getattr(test_obj, attr_name)
except AttributeError: # missing attribute
return False
if not _safe_is_equal(attr_value, test_value):
return False
return True
def __ne__(self, test_obj):
return not self.__eq__(test_obj)
return ObjectDescribedBy
sentry-python-1.4.3/tests/integrations/ 0000775 0000000 0000000 00000000000 14125057761 0020242 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/__init__.py 0000664 0000000 0000000 00000000000 14125057761 0022341 0 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/aiohttp/ 0000775 0000000 0000000 00000000000 14125057761 0021712 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/aiohttp/__init__.py 0000664 0000000 0000000 00000000070 14125057761 0024020 0 ustar 00root root 0000000 0000000 import pytest
aiohttp = pytest.importorskip("aiohttp")
sentry-python-1.4.3/tests/integrations/aiohttp/test_aiohttp.py 0000664 0000000 0000000 00000015405 14125057761 0025000 0 ustar 00root root 0000000 0000000 import asyncio
import json
from contextlib import suppress
import pytest
from aiohttp import web
from aiohttp.client import ServerDisconnectedError
from aiohttp.web_request import Request
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
async def test_basic(sentry_init, aiohttp_client, loop, capture_events):
sentry_init(integrations=[AioHttpIntegration()])
async def hello(request):
1 / 0
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 500
(event,) = events
assert (
event["transaction"]
== "tests.integrations.aiohttp.test_aiohttp.test_basic..hello"
)
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
request = event["request"]
host = request["headers"]["Host"]
assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
assert request["method"] == "GET"
assert request["query_string"] == ""
assert request.get("data") is None
assert request["url"] == "http://{host}/".format(host=host)
assert request["headers"] == {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate",
"Host": host,
"User-Agent": request["headers"]["User-Agent"],
}
async def test_post_body_not_read(sentry_init, aiohttp_client, loop, capture_events):
from sentry_sdk.integrations.aiohttp import BODY_NOT_READ_MESSAGE
sentry_init(integrations=[AioHttpIntegration()])
body = {"some": "value"}
async def hello(request):
1 / 0
app = web.Application()
app.router.add_post("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.post("/", json=body)
assert resp.status == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
request = event["request"]
assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
assert request["method"] == "POST"
assert request["data"] == BODY_NOT_READ_MESSAGE
async def test_post_body_read(sentry_init, aiohttp_client, loop, capture_events):
sentry_init(integrations=[AioHttpIntegration()])
body = {"some": "value"}
async def hello(request):
await request.json()
1 / 0
app = web.Application()
app.router.add_post("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.post("/", json=body)
assert resp.status == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
request = event["request"]
assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
assert request["method"] == "POST"
assert request["data"] == json.dumps(body)
async def test_403_not_captured(sentry_init, aiohttp_client, loop, capture_events):
sentry_init(integrations=[AioHttpIntegration()])
async def hello(request):
raise web.HTTPForbidden()
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 403
assert not events
async def test_cancelled_error_not_captured(
sentry_init, aiohttp_client, loop, capture_events
):
sentry_init(integrations=[AioHttpIntegration()])
async def hello(request):
raise asyncio.CancelledError()
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
with suppress(ServerDisconnectedError):
# Intended `aiohttp` interaction: server will disconnect if it
# encounters `asyncio.CancelledError`
await client.get("/")
assert not events
async def test_half_initialized(sentry_init, aiohttp_client, loop, capture_events):
sentry_init(integrations=[AioHttpIntegration()])
sentry_init()
async def hello(request):
return web.Response(text="hello")
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 200
assert events == []
async def test_tracing(sentry_init, aiohttp_client, loop, capture_events):
sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
async def hello(request):
return web.Response(text="hello")
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 200
(event,) = events
assert event["type"] == "transaction"
assert (
event["transaction"]
== "tests.integrations.aiohttp.test_aiohttp.test_tracing..hello"
)
@pytest.mark.parametrize(
"transaction_style,expected_transaction",
[
(
"handler_name",
"tests.integrations.aiohttp.test_aiohttp.test_transaction_style..hello",
),
("method_and_path_pattern", "GET /{var}"),
],
)
async def test_transaction_style(
sentry_init, aiohttp_client, capture_events, transaction_style, expected_transaction
):
sentry_init(
integrations=[AioHttpIntegration(transaction_style=transaction_style)],
traces_sample_rate=1.0,
)
async def hello(request):
return web.Response(text="hello")
app = web.Application()
app.router.add_get(r"/{var}", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/1")
assert resp.status == 200
(event,) = events
assert event["type"] == "transaction"
assert event["transaction"] == expected_transaction
async def test_traces_sampler_gets_request_object_in_sampling_context(
sentry_init,
aiohttp_client,
DictionaryContaining, # noqa:N803
ObjectDescribedBy, # noqa:N803
):
traces_sampler = mock.Mock()
sentry_init(
integrations=[AioHttpIntegration()],
traces_sampler=traces_sampler,
)
async def kangaroo_handler(request):
return web.Response(text="dogs are great")
app = web.Application()
app.router.add_get("/tricks/kangaroo", kangaroo_handler)
client = await aiohttp_client(app)
await client.get("/tricks/kangaroo")
traces_sampler.assert_any_call(
DictionaryContaining(
{
"aiohttp_request": ObjectDescribedBy(
type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"}
)
}
)
)
sentry-python-1.4.3/tests/integrations/argv/ 0000775 0000000 0000000 00000000000 14125057761 0021201 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/argv/test_argv.py 0000664 0000000 0000000 00000000644 14125057761 0023555 0 ustar 00root root 0000000 0000000 import sys
from sentry_sdk import capture_message
from sentry_sdk.integrations.argv import ArgvIntegration
def test_basic(sentry_init, capture_events, monkeypatch):
sentry_init(integrations=[ArgvIntegration()])
argv = ["foo", "bar", "baz"]
monkeypatch.setattr(sys, "argv", argv)
events = capture_events()
capture_message("hi")
(event,) = events
assert event["extra"]["sys.argv"] == argv
sentry-python-1.4.3/tests/integrations/asgi/ 0000775 0000000 0000000 00000000000 14125057761 0021165 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/asgi/__init__.py 0000664 0000000 0000000 00000000060 14125057761 0023272 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("starlette")
sentry-python-1.4.3/tests/integrations/asgi/test_asgi.py 0000664 0000000 0000000 00000020526 14125057761 0023526 0 ustar 00root root 0000000 0000000 import sys
import pytest
from sentry_sdk import Hub, capture_message, last_event_id
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
from starlette.applications import Starlette
from starlette.responses import PlainTextResponse
from starlette.testclient import TestClient
from starlette.websockets import WebSocket
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
@pytest.fixture
def app():
app = Starlette()
@app.route("/sync-message")
def hi(request):
capture_message("hi", level="error")
return PlainTextResponse("ok")
@app.route("/async-message")
async def hi2(request):
capture_message("hi", level="error")
return PlainTextResponse("ok")
app.add_middleware(SentryAsgiMiddleware)
return app
@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher")
def test_sync_request_data(sentry_init, app, capture_events):
sentry_init(send_default_pii=True)
events = capture_events()
client = TestClient(app)
response = client.get("/sync-message?foo=bar", headers={"Foo": u"ä"})
assert response.status_code == 200
(event,) = events
assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi"
assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
assert set(event["request"]["headers"]) == {
"accept",
"accept-encoding",
"connection",
"host",
"user-agent",
"foo",
}
assert event["request"]["query_string"] == "foo=bar"
assert event["request"]["url"].endswith("/sync-message")
assert event["request"]["method"] == "GET"
# Assert that state is not leaked
events.clear()
capture_message("foo")
(event,) = events
assert "request" not in event
assert "transaction" not in event
def test_async_request_data(sentry_init, app, capture_events):
sentry_init(send_default_pii=True)
events = capture_events()
client = TestClient(app)
response = client.get("/async-message?foo=bar")
assert response.status_code == 200
(event,) = events
assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi2"
assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
assert set(event["request"]["headers"]) == {
"accept",
"accept-encoding",
"connection",
"host",
"user-agent",
}
assert event["request"]["query_string"] == "foo=bar"
assert event["request"]["url"].endswith("/async-message")
assert event["request"]["method"] == "GET"
# Assert that state is not leaked
events.clear()
capture_message("foo")
(event,) = events
assert "request" not in event
assert "transaction" not in event
def test_errors(sentry_init, app, capture_events):
sentry_init(send_default_pii=True)
events = capture_events()
@app.route("/error")
def myerror(request):
raise ValueError("oh no")
client = TestClient(app, raise_server_exceptions=False)
response = client.get("/error")
assert response.status_code == 500
(event,) = events
assert (
event["transaction"]
== "tests.integrations.asgi.test_asgi.test_errors..myerror"
)
(exception,) = event["exception"]["values"]
assert exception["type"] == "ValueError"
assert exception["value"] == "oh no"
assert any(
frame["filename"].endswith("tests/integrations/asgi/test_asgi.py")
for frame in exception["stacktrace"]["frames"]
)
def test_websocket(sentry_init, capture_events, request):
sentry_init(debug=True, send_default_pii=True)
# Bind client to main thread because context propagation for the websocket
# client does not work.
Hub.main.bind_client(Hub.current.client)
request.addfinalizer(lambda: Hub.main.bind_client(None))
events = capture_events()
from starlette.testclient import TestClient
def message():
capture_message("hi")
raise ValueError("oh no")
async def app(scope, receive, send):
assert scope["type"] == "websocket"
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.send_text(message())
await websocket.close()
app = SentryAsgiMiddleware(app)
client = TestClient(app)
with client.websocket_connect("/") as websocket:
with pytest.raises(ValueError):
websocket.receive_text()
msg_event, error_event = events
assert msg_event["message"] == "hi"
(exc,) = error_event["exception"]["values"]
assert exc["type"] == "ValueError"
assert exc["value"] == "oh no"
assert (
msg_event["request"]
== error_event["request"]
== {
"env": {"REMOTE_ADDR": "testclient"},
"headers": {
"accept": "*/*",
"accept-encoding": "gzip, deflate",
"connection": "upgrade",
"host": "testserver",
"sec-websocket-key": "testserver==",
"sec-websocket-version": "13",
"user-agent": "testclient",
},
"method": None,
"query_string": None,
"url": "ws://testserver/",
}
)
def test_starlette_last_event_id(app, sentry_init, capture_events, request):
sentry_init(send_default_pii=True)
events = capture_events()
@app.route("/handlederror")
def handlederror(request):
raise ValueError("oh no")
@app.exception_handler(500)
def handler(*args, **kwargs):
return PlainTextResponse(last_event_id(), status_code=500)
client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False)
response = client.get("/handlederror")
assert response.status_code == 500
(event,) = events
assert response.content.strip().decode("ascii") == event["event_id"]
(exception,) = event["exception"]["values"]
assert exception["type"] == "ValueError"
assert exception["value"] == "oh no"
def test_transaction(app, sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0)
events = capture_events()
@app.route("/tricks/kangaroo")
def kangaroo_handler(request):
return PlainTextResponse("dogs are great")
client = TestClient(app)
client.get("/tricks/kangaroo")
event = events[0]
assert event["type"] == "transaction"
assert (
event["transaction"]
== "tests.integrations.asgi.test_asgi.test_transaction..kangaroo_handler"
)
def test_traces_sampler_gets_scope_in_sampling_context(
app, sentry_init, DictionaryContaining # noqa: N803
):
traces_sampler = mock.Mock()
sentry_init(traces_sampler=traces_sampler)
@app.route("/tricks/kangaroo")
def kangaroo_handler(request):
return PlainTextResponse("dogs are great")
client = TestClient(app)
client.get("/tricks/kangaroo")
traces_sampler.assert_any_call(
DictionaryContaining(
{
# starlette just uses a dictionary to hold the scope
"asgi_scope": DictionaryContaining(
{"method": "GET", "path": "/tricks/kangaroo"}
)
}
)
)
def test_x_forwarded_for(sentry_init, app, capture_events):
sentry_init(send_default_pii=True)
events = capture_events()
client = TestClient(app)
response = client.get("/sync-message", headers={"X-Forwarded-For": "testproxy"})
assert response.status_code == 200
(event,) = events
assert event["request"]["env"] == {"REMOTE_ADDR": "testproxy"}
def test_x_forwarded_for_multiple_entries(sentry_init, app, capture_events):
sentry_init(send_default_pii=True)
events = capture_events()
client = TestClient(app)
response = client.get(
"/sync-message", headers={"X-Forwarded-For": "testproxy1,testproxy2,testproxy3"}
)
assert response.status_code == 200
(event,) = events
assert event["request"]["env"] == {"REMOTE_ADDR": "testproxy1"}
def test_x_real_ip(sentry_init, app, capture_events):
sentry_init(send_default_pii=True)
events = capture_events()
client = TestClient(app)
response = client.get("/sync-message", headers={"X-Real-IP": "1.2.3.4"})
assert response.status_code == 200
(event,) = events
assert event["request"]["env"] == {"REMOTE_ADDR": "1.2.3.4"}
sentry-python-1.4.3/tests/integrations/aws_lambda/ 0000775 0000000 0000000 00000000000 14125057761 0022334 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/aws_lambda/client.py 0000664 0000000 0000000 00000017105 14125057761 0024170 0 ustar 00root root 0000000 0000000 import sys
import os
import shutil
import tempfile
import subprocess
import boto3
import uuid
import base64
def get_boto_client():
return boto3.client(
"lambda",
aws_access_key_id=os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
aws_secret_access_key=os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
region_name="us-east-1",
)
def build_no_code_serverless_function_and_layer(
client, tmpdir, fn_name, runtime, timeout, initial_handler
):
"""
Util function that auto instruments the no code implementation of the python
sdk by creating a layer containing the Python-sdk, and then creating a func
that uses that layer
"""
from scripts.build_awslambda_layer import (
build_packaged_zip,
)
build_packaged_zip(dest_abs_path=tmpdir, dest_zip_filename="serverless-ball.zip")
with open(os.path.join(tmpdir, "serverless-ball.zip"), "rb") as serverless_zip:
response = client.publish_layer_version(
LayerName="python-serverless-sdk-test",
Description="Created as part of testsuite for getsentry/sentry-python",
Content={"ZipFile": serverless_zip.read()},
)
with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip:
client.create_function(
FunctionName=fn_name,
Runtime=runtime,
Timeout=timeout,
Environment={
"Variables": {
"SENTRY_INITIAL_HANDLER": initial_handler,
"SENTRY_DSN": "https://123abc@example.com/123",
"SENTRY_TRACES_SAMPLE_RATE": "1.0",
}
},
Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"],
Handler="sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler",
Layers=[response["LayerVersionArn"]],
Code={"ZipFile": zip.read()},
Description="Created as part of testsuite for getsentry/sentry-python",
)
def run_lambda_function(
client,
runtime,
code,
payload,
add_finalizer,
syntax_check=True,
timeout=30,
layer=None,
initial_handler=None,
subprocess_kwargs=(),
):
subprocess_kwargs = dict(subprocess_kwargs)
with tempfile.TemporaryDirectory() as tmpdir:
if initial_handler:
# If Initial handler value is provided i.e. it is not the default
# `test_lambda.test_handler`, then create another dir level so that our path is
# test_dir.test_lambda.test_handler
test_dir_path = os.path.join(tmpdir, "test_dir")
python_init_file = os.path.join(test_dir_path, "__init__.py")
os.makedirs(test_dir_path)
with open(python_init_file, "w"):
# Create __init__ file to make it a python package
pass
test_lambda_py = os.path.join(tmpdir, "test_dir", "test_lambda.py")
else:
test_lambda_py = os.path.join(tmpdir, "test_lambda.py")
with open(test_lambda_py, "w") as f:
f.write(code)
if syntax_check:
# Check file for valid syntax first, and that the integration does not
# crash when not running in Lambda (but rather a local deployment tool
# such as chalice's)
subprocess.check_call([sys.executable, test_lambda_py])
fn_name = "test_function_{}".format(uuid.uuid4())
if layer is None:
setup_cfg = os.path.join(tmpdir, "setup.cfg")
with open(setup_cfg, "w") as f:
f.write("[install]\nprefix=")
subprocess.check_call(
[sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")],
**subprocess_kwargs
)
subprocess.check_call(
"pip install mock==3.0.0 funcsigs -t .",
cwd=tmpdir,
shell=True,
**subprocess_kwargs
)
# https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html
subprocess.check_call(
"pip install ../*.tar.gz -t .",
cwd=tmpdir,
shell=True,
**subprocess_kwargs
)
shutil.make_archive(os.path.join(tmpdir, "ball"), "zip", tmpdir)
with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip:
client.create_function(
FunctionName=fn_name,
Runtime=runtime,
Timeout=timeout,
Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"],
Handler="test_lambda.test_handler",
Code={"ZipFile": zip.read()},
Description="Created as part of testsuite for getsentry/sentry-python",
)
else:
subprocess.run(
["zip", "-q", "-x", "**/__pycache__/*", "-r", "ball.zip", "./"],
cwd=tmpdir,
check=True,
)
# Default initial handler
if not initial_handler:
initial_handler = "test_lambda.test_handler"
build_no_code_serverless_function_and_layer(
client, tmpdir, fn_name, runtime, timeout, initial_handler
)
@add_finalizer
def clean_up():
client.delete_function(FunctionName=fn_name)
# this closes the web socket so we don't get a
# ResourceWarning: unclosed
# warning on every test
# based on https://github.com/boto/botocore/pull/1810
# (if that's ever merged, this can just become client.close())
session = client._endpoint.http_session
managers = [session._manager] + list(session._proxy_managers.values())
for manager in managers:
manager.clear()
response = client.invoke(
FunctionName=fn_name,
InvocationType="RequestResponse",
LogType="Tail",
Payload=payload,
)
assert 200 <= response["StatusCode"] < 300, response
return response
_REPL_CODE = """
import os
def test_handler(event, context):
line = {line!r}
if line.startswith(">>> "):
exec(line[4:])
elif line.startswith("$ "):
os.system(line[2:])
else:
print("Start a line with $ or >>>")
return b""
"""
try:
import click
except ImportError:
pass
else:
@click.command()
@click.option(
"--runtime", required=True, help="name of the runtime to use, eg python3.8"
)
@click.option("--verbose", is_flag=True, default=False)
def repl(runtime, verbose):
"""
Launch a "REPL" against AWS Lambda to inspect their runtime.
"""
cleanup = []
client = get_boto_client()
print("Start a line with `$ ` to run shell commands, or `>>> ` to run Python")
while True:
line = input()
response = run_lambda_function(
client,
runtime,
_REPL_CODE.format(line=line),
b"",
cleanup.append,
subprocess_kwargs={
"stdout": subprocess.DEVNULL,
"stderr": subprocess.DEVNULL,
}
if not verbose
else {},
)
for line in base64.b64decode(response["LogResult"]).splitlines():
print(line.decode("utf8"))
for f in cleanup:
f()
cleanup = []
if __name__ == "__main__":
repl()
sentry-python-1.4.3/tests/integrations/aws_lambda/test_aws.py 0000664 0000000 0000000 00000053453 14125057761 0024551 0 ustar 00root root 0000000 0000000 """
# AWS Lambda system tests
This testsuite uses boto3 to upload actual lambda functions to AWS, execute
them and assert some things about the externally observed behavior. What that
means for you is that those tests won't run without AWS access keys:
export SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID=..
export SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY=...
export SENTRY_PYTHON_TEST_AWS_IAM_ROLE="arn:aws:iam::920901907255:role/service-role/lambda"
If you need to debug a new runtime, use this REPL to figure things out:
pip3 install click
python3 tests/integrations/aws_lambda/client.py --runtime=python4.0
"""
import base64
import json
import os
import re
from textwrap import dedent
import pytest
boto3 = pytest.importorskip("boto3")
LAMBDA_PRELUDE = """
from __future__ import print_function
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration, get_lambda_bootstrap
import sentry_sdk
import json
import time
from sentry_sdk.transport import HttpTransport
def event_processor(event):
# AWS Lambda truncates the log output to 4kb, which is small enough to miss
# parts of even a single error-event/transaction-envelope pair if considered
# in full, so only grab the data we need.
event_data = {}
event_data["contexts"] = {}
event_data["contexts"]["trace"] = event.get("contexts", {}).get("trace")
event_data["exception"] = event.get("exception")
event_data["extra"] = event.get("extra")
event_data["level"] = event.get("level")
event_data["request"] = event.get("request")
event_data["tags"] = event.get("tags")
event_data["transaction"] = event.get("transaction")
return event_data
def envelope_processor(envelope):
# AWS Lambda truncates the log output to 4kb, which is small enough to miss
# parts of even a single error-event/transaction-envelope pair if considered
# in full, so only grab the data we need.
(item,) = envelope.items
envelope_json = json.loads(item.get_bytes())
envelope_data = {}
envelope_data["contexts"] = {}
envelope_data["type"] = envelope_json["type"]
envelope_data["transaction"] = envelope_json["transaction"]
envelope_data["contexts"]["trace"] = envelope_json["contexts"]["trace"]
envelope_data["request"] = envelope_json["request"]
envelope_data["tags"] = envelope_json["tags"]
return envelope_data
class TestTransport(HttpTransport):
def _send_event(self, event):
event = event_processor(event)
# Writing a single string to stdout holds the GIL (seems like) and
# therefore cannot be interleaved with other threads. This is why we
# explicitly add a newline at the end even though `print` would provide
# us one.
print("\\nEVENT: {}\\n".format(json.dumps(event)))
def _send_envelope(self, envelope):
envelope = envelope_processor(envelope)
print("\\nENVELOPE: {}\\n".format(json.dumps(envelope)))
def init_sdk(timeout_warning=False, **extra_init_args):
sentry_sdk.init(
dsn="https://123abc@example.com/123",
transport=TestTransport,
integrations=[AwsLambdaIntegration(timeout_warning=timeout_warning)],
shutdown_timeout=10,
**extra_init_args
)
"""
@pytest.fixture
def lambda_client():
if "SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID" not in os.environ:
pytest.skip("AWS environ vars not set")
from tests.integrations.aws_lambda.client import get_boto_client
return get_boto_client()
@pytest.fixture(params=["python3.6", "python3.7", "python3.8", "python2.7"])
def lambda_runtime(request):
return request.param
@pytest.fixture
def run_lambda_function(request, lambda_client, lambda_runtime):
def inner(
code, payload, timeout=30, syntax_check=True, layer=None, initial_handler=None
):
from tests.integrations.aws_lambda.client import run_lambda_function
response = run_lambda_function(
client=lambda_client,
runtime=lambda_runtime,
code=code,
payload=payload,
add_finalizer=request.addfinalizer,
timeout=timeout,
syntax_check=syntax_check,
layer=layer,
initial_handler=initial_handler,
)
# for better debugging
response["LogResult"] = base64.b64decode(response["LogResult"]).splitlines()
response["Payload"] = json.loads(response["Payload"].read().decode("utf-8"))
del response["ResponseMetadata"]
events = []
envelopes = []
for line in response["LogResult"]:
print("AWS:", line)
if line.startswith(b"EVENT: "):
line = line[len(b"EVENT: ") :]
events.append(json.loads(line.decode("utf-8")))
elif line.startswith(b"ENVELOPE: "):
line = line[len(b"ENVELOPE: ") :]
envelopes.append(json.loads(line.decode("utf-8")))
else:
continue
return envelopes, events, response
return inner
def test_basic(run_lambda_function):
envelopes, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk()
def event_processor(event):
# Delay event output like this to test proper shutdown
time.sleep(1)
return event
def test_handler(event, context):
raise Exception("something went wrong")
"""
),
b'{"foo": "bar"}',
)
assert response["FunctionError"] == "Unhandled"
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
(frame1,) = exception["stacktrace"]["frames"]
assert frame1["filename"] == "test_lambda.py"
assert frame1["abs_path"] == "/var/task/test_lambda.py"
assert frame1["function"] == "test_handler"
assert frame1["in_app"] is True
assert exception["mechanism"] == {"type": "aws_lambda", "handled": False}
assert event["extra"]["lambda"]["function_name"].startswith("test_function_")
logs_url = event["extra"]["cloudwatch logs"]["url"]
assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
assert not re.search("(=;|=$)", logs_url)
assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
"/aws/lambda/test_function_"
)
log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
log_stream = event["extra"]["cloudwatch logs"]["log_stream"]
assert re.match(log_stream_re, log_stream)
def test_initialization_order(run_lambda_function):
"""Zappa lazily imports our code, so by the time we monkeypatch the handler
as seen by AWS already runs. At this point at least draining the queue
should work."""
envelopes, events, _response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
def test_handler(event, context):
init_sdk()
sentry_sdk.capture_exception(Exception("something went wrong"))
"""
),
b'{"foo": "bar"}',
)
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
def test_request_data(run_lambda_function):
envelopes, events, _response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk()
def test_handler(event, context):
sentry_sdk.capture_message("hi")
return "ok"
"""
),
payload=b"""
{
"resource": "/asd",
"path": "/asd",
"httpMethod": "GET",
"headers": {
"Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:62.0) Gecko/20100101 Firefox/62.0",
"X-Forwarded-Proto": "https"
},
"queryStringParameters": {
"bonkers": "true"
},
"pathParameters": null,
"stageVariables": null,
"requestContext": {
"identity": {
"sourceIp": "213.47.147.207",
"userArn": "42"
}
},
"body": null,
"isBase64Encoded": false
}
""",
)
(event,) = events
assert event["request"] == {
"headers": {
"Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:62.0) Gecko/20100101 Firefox/62.0",
"X-Forwarded-Proto": "https",
},
"method": "GET",
"query_string": {"bonkers": "true"},
"url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd",
}
def test_init_error(run_lambda_function, lambda_runtime):
if lambda_runtime == "python2.7":
pytest.skip("initialization error not supported on Python 2.7")
envelopes, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ (
"def event_processor(event):\n"
' return event["exception"]["values"][0]["value"]\n'
"init_sdk()\n"
"func()"
),
b'{"foo": "bar"}',
syntax_check=False,
)
(event,) = events
assert "name 'func' is not defined" in event
def test_timeout_error(run_lambda_function):
envelopes, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(timeout_warning=True)
def test_handler(event, context):
time.sleep(10)
return 0
"""
),
b'{"foo": "bar"}',
timeout=3,
)
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ServerlessTimeoutWarning"
assert exception["value"] in (
"WARNING : Function is expected to get timed out. Configured timeout duration = 4 seconds.",
"WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds.",
)
assert exception["mechanism"] == {"type": "threading", "handled": False}
assert event["extra"]["lambda"]["function_name"].startswith("test_function_")
logs_url = event["extra"]["cloudwatch logs"]["url"]
assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
assert not re.search("(=;|=$)", logs_url)
assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
"/aws/lambda/test_function_"
)
log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
log_stream = event["extra"]["cloudwatch logs"]["log_stream"]
assert re.match(log_stream_re, log_stream)
def test_performance_no_error(run_lambda_function):
envelopes, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
def test_handler(event, context):
return "test_string"
"""
),
b'{"foo": "bar"}',
)
(envelope,) = envelopes
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"]["op"] == "serverless.function"
assert envelope["transaction"].startswith("test_function_")
assert envelope["transaction"] in envelope["request"]["url"]
def test_performance_error(run_lambda_function):
envelopes, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
def test_handler(event, context):
raise Exception("something went wrong")
"""
),
b'{"foo": "bar"}',
)
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
(envelope,) = envelopes
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"]["op"] == "serverless.function"
assert envelope["transaction"].startswith("test_function_")
assert envelope["transaction"] in envelope["request"]["url"]
@pytest.mark.parametrize(
"aws_event, has_request_data, batch_size",
[
(b"1231", False, 1),
(b"11.21", False, 1),
(b'"Good dog!"', False, 1),
(b"true", False, 1),
(
b"""
[
{"good dog": "Maisey"},
{"good dog": "Charlie"},
{"good dog": "Cory"},
{"good dog": "Bodhi"}
]
""",
False,
4,
),
(
b"""
[
{
"headers": {
"Host": "dogs.are.great",
"X-Forwarded-Proto": "http"
},
"httpMethod": "GET",
"path": "/tricks/kangaroo",
"queryStringParameters": {
"completed_successfully": "true",
"treat_provided": "true",
"treat_type": "cheese"
},
"dog": "Maisey"
},
{
"headers": {
"Host": "dogs.are.great",
"X-Forwarded-Proto": "http"
},
"httpMethod": "GET",
"path": "/tricks/kangaroo",
"queryStringParameters": {
"completed_successfully": "true",
"treat_provided": "true",
"treat_type": "cheese"
},
"dog": "Charlie"
}
]
""",
True,
2,
),
],
)
def test_non_dict_event(
run_lambda_function,
aws_event,
has_request_data,
batch_size,
DictionaryContaining, # noqa:N803
):
envelopes, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
def test_handler(event, context):
raise Exception("More treats, please!")
"""
),
aws_event,
)
assert response["FunctionError"] == "Unhandled"
error_event = events[0]
assert error_event["level"] == "error"
assert error_event["contexts"]["trace"]["op"] == "serverless.function"
function_name = error_event["extra"]["lambda"]["function_name"]
assert function_name.startswith("test_function_")
assert error_event["transaction"] == function_name
exception = error_event["exception"]["values"][0]
assert exception["type"] == "Exception"
assert exception["value"] == "More treats, please!"
assert exception["mechanism"]["type"] == "aws_lambda"
envelope = envelopes[0]
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"] == DictionaryContaining(
error_event["contexts"]["trace"]
)
assert envelope["contexts"]["trace"]["status"] == "internal_error"
assert envelope["transaction"] == error_event["transaction"]
assert envelope["request"]["url"] == error_event["request"]["url"]
if has_request_data:
request_data = {
"headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"},
"method": "GET",
"url": "http://dogs.are.great/tricks/kangaroo",
"query_string": {
"completed_successfully": "true",
"treat_provided": "true",
"treat_type": "cheese",
},
}
else:
request_data = {"url": "awslambda:///{}".format(function_name)}
assert error_event["request"] == request_data
assert envelope["request"] == request_data
if batch_size > 1:
assert error_event["tags"]["batch_size"] == batch_size
assert error_event["tags"]["batch_request"] is True
assert envelope["tags"]["batch_size"] == batch_size
assert envelope["tags"]["batch_request"] is True
def test_traces_sampler_gets_correct_values_in_sampling_context(
run_lambda_function,
DictionaryContaining, # noqa:N803
ObjectDescribedBy, # noqa:N803
StringContaining, # noqa:N803
):
# TODO: This whole thing is a little hacky, specifically around the need to
# get `conftest.py` code into the AWS runtime, which is why there's both
# `inspect.getsource` and a copy of `_safe_is_equal` included directly in
# the code below. Ideas which have been discussed to fix this:
# - Include the test suite as a module installed in the package which is
# shot up to AWS
# - In client.py, copy `conftest.py` (or wherever the necessary code lives)
# from the test suite into the main SDK directory so it gets included as
# "part of the SDK"
# It's also worth noting why it's necessary to run the assertions in the AWS
# runtime rather than asserting on side effects the way we do with events
# and envelopes. The reasons are two-fold:
# - We're testing against the `LambdaContext` class, which only exists in
# the AWS runtime
# - If we were to transmit call args data they way we transmit event and
# envelope data (through JSON), we'd quickly run into the problem that all
# sorts of stuff isn't serializable by `json.dumps` out of the box, up to
# and including `datetime` objects (so anything with a timestamp is
# automatically out)
# Perhaps these challenges can be solved in a cleaner and more systematic
# way if we ever decide to refactor the entire AWS testing apparatus.
import inspect
envelopes, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(inspect.getsource(StringContaining))
+ dedent(inspect.getsource(DictionaryContaining))
+ dedent(inspect.getsource(ObjectDescribedBy))
+ dedent(
"""
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
def _safe_is_equal(x, y):
# copied from conftest.py - see docstring and comments there
try:
is_equal = x.__eq__(y)
except AttributeError:
is_equal = NotImplemented
if is_equal == NotImplemented:
# using == smoothes out weird variations exposed by raw __eq__
return x == y
return is_equal
def test_handler(event, context):
# this runs after the transaction has started, which means we
# can make assertions about traces_sampler
try:
traces_sampler.assert_any_call(
DictionaryContaining(
{
"aws_event": DictionaryContaining({
"httpMethod": "GET",
"path": "/sit/stay/rollover",
"headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"},
}),
"aws_context": ObjectDescribedBy(
type=get_lambda_bootstrap().LambdaContext,
attrs={
'function_name': StringContaining("test_function"),
'function_version': '$LATEST',
}
)
}
)
)
except AssertionError:
# catch the error and return it because the error itself will
# get swallowed by the SDK as an "internal exception"
return {"AssertionError raised": True,}
return {"AssertionError raised": False,}
traces_sampler = mock.Mock(return_value=True)
init_sdk(
traces_sampler=traces_sampler,
)
"""
),
b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"}}',
)
assert response["Payload"]["AssertionError raised"] is False
def test_serverless_no_code_instrumentation(run_lambda_function):
"""
Test that ensures that just by adding a lambda layer containing the
python sdk, with no code changes sentry is able to capture errors
"""
for initial_handler in [
None,
"test_dir/test_lambda.test_handler",
"test_dir.test_lambda.test_handler",
]:
print("Testing Initial Handler ", initial_handler)
_, _, response = run_lambda_function(
dedent(
"""
import sentry_sdk
def test_handler(event, context):
current_client = sentry_sdk.Hub.current.client
assert current_client is not None
assert len(current_client.options['integrations']) == 1
assert isinstance(current_client.options['integrations'][0],
sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration)
raise Exception("something went wrong")
"""
),
b'{"foo": "bar"}',
layer=True,
initial_handler=initial_handler,
)
assert response["FunctionError"] == "Unhandled"
assert response["StatusCode"] == 200
assert response["Payload"]["errorType"] != "AssertionError"
assert response["Payload"]["errorType"] == "Exception"
assert response["Payload"]["errorMessage"] == "something went wrong"
assert "sentry_handler" in response["LogResult"][3].decode("utf-8")
sentry-python-1.4.3/tests/integrations/beam/ 0000775 0000000 0000000 00000000000 14125057761 0021146 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/beam/test_beam.py 0000664 0000000 0000000 00000012700 14125057761 0023463 0 ustar 00root root 0000000 0000000 import pytest
import inspect
pytest.importorskip("apache_beam")
import dill
from sentry_sdk.integrations.beam import (
BeamIntegration,
_wrap_task_call,
_wrap_inspect_call,
)
from apache_beam.typehints.trivial_inference import instance_to_type
from apache_beam.typehints.decorators import getcallargs_forhints
from apache_beam.transforms.core import DoFn, ParDo, _DoFnParam, CallableWrapperDoFn
from apache_beam.runners.common import DoFnInvoker, OutputProcessor, DoFnContext
from apache_beam.utils.windowed_value import WindowedValue
def foo():
return True
def bar(x, y):
# print(x + y)
return True
def baz(x, y=2):
# print(x + y)
return True
class A:
def __init__(self, fn):
self.r = "We are in A"
self.fn = fn
self._inspect_fn = _wrap_inspect_call(self, "fn")
def process(self):
return self.fn()
class B(A, object):
def fa(self, x, element=False, another_element=False):
if x or (element and not another_element):
# print(self.r)
return True
1 / 0
return False
def __init__(self):
self.r = "We are in B"
super(B, self).__init__(self.fa)
class SimpleFunc(DoFn):
def process(self, x):
if x:
1 / 0
return [True]
class PlaceHolderFunc(DoFn):
def process(self, x, timestamp=DoFn.TimestampParam, wx=DoFn.WindowParam):
if isinstance(timestamp, _DoFnParam) or isinstance(wx, _DoFnParam):
raise Exception("Bad instance")
if x:
1 / 0
yield True
def fail(x):
if x:
1 / 0
return [True]
test_parent = A(foo)
test_child = B()
test_simple = SimpleFunc()
test_place_holder = PlaceHolderFunc()
test_callable = CallableWrapperDoFn(fail)
# Cannot call simple functions or placeholder test.
@pytest.mark.parametrize(
"obj,f,args,kwargs",
[
[test_parent, "fn", (), {}],
[test_child, "fn", (False,), {"element": True}],
[test_child, "fn", (True,), {}],
[test_simple, "process", (False,), {}],
[test_callable, "process", (False,), {}],
],
)
def test_monkey_patch_call(obj, f, args, kwargs):
func = getattr(obj, f)
assert func(*args, **kwargs)
assert _wrap_task_call(func)(*args, **kwargs)
@pytest.mark.parametrize("f", [foo, bar, baz, test_parent.fn, test_child.fn])
def test_monkey_patch_pickle(f):
f_temp = _wrap_task_call(f)
assert dill.pickles(f_temp), "{} is not pickling correctly!".format(f)
# Pickle everything
s1 = dill.dumps(f_temp)
s2 = dill.loads(s1)
dill.dumps(s2)
@pytest.mark.parametrize(
"f,args,kwargs",
[
[foo, (), {}],
[bar, (1, 5), {}],
[baz, (1,), {}],
[test_parent.fn, (), {}],
[test_child.fn, (False,), {"element": True}],
[test_child.fn, (True,), {}],
],
)
def test_monkey_patch_signature(f, args, kwargs):
arg_types = [instance_to_type(v) for v in args]
kwargs_types = {k: instance_to_type(v) for (k, v) in kwargs.items()}
f_temp = _wrap_task_call(f)
try:
getcallargs_forhints(f, *arg_types, **kwargs_types)
except Exception:
print("Failed on {} with parameters {}, {}".format(f, args, kwargs))
raise
try:
getcallargs_forhints(f_temp, *arg_types, **kwargs_types)
except Exception:
print("Failed on {} with parameters {}, {}".format(f_temp, args, kwargs))
raise
try:
expected_signature = inspect.signature(f)
test_signature = inspect.signature(f_temp)
assert (
expected_signature == test_signature
), "Failed on {}, signature {} does not match {}".format(
f, expected_signature, test_signature
)
except Exception:
# expected to pass for py2.7
pass
class _OutputProcessor(OutputProcessor):
def process_outputs(self, windowed_input_element, results):
print(windowed_input_element)
try:
for result in results:
assert result
except StopIteration:
print("In here")
@pytest.fixture
def init_beam(sentry_init):
def inner(fn):
sentry_init(default_integrations=False, integrations=[BeamIntegration()])
# Little hack to avoid having to run the whole pipeline.
pardo = ParDo(fn)
signature = pardo._signature
output_processor = _OutputProcessor()
return DoFnInvoker.create_invoker(
signature, output_processor, DoFnContext("test")
)
return inner
@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
def test_invoker_normal(init_beam, fn):
invoker = init_beam(fn)
print("Normal testing {} with {} invoker.".format(fn, invoker))
windowed_value = WindowedValue(False, 0, [None])
invoker.invoke_process(windowed_value)
@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
def test_invoker_exception(init_beam, capture_events, capture_exceptions, fn):
invoker = init_beam(fn)
events = capture_events()
print("Exception testing {} with {} invoker.".format(fn, invoker))
# Window value will always have one value for the process to run.
windowed_value = WindowedValue(True, 0, [None])
try:
invoker.invoke_process(windowed_value)
except Exception:
pass
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"]["type"] == "beam"
sentry-python-1.4.3/tests/integrations/boto3/ 0000775 0000000 0000000 00000000000 14125057761 0021270 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/boto3/__init__.py 0000664 0000000 0000000 00000000346 14125057761 0023404 0 ustar 00root root 0000000 0000000 import pytest
import os
pytest.importorskip("boto3")
xml_fixture_path = os.path.dirname(os.path.abspath(__file__))
def read_fixture(name):
with open(os.path.join(xml_fixture_path, name), "rb") as f:
return f.read()
sentry-python-1.4.3/tests/integrations/boto3/aws_mock.py 0000664 0000000 0000000 00000001563 14125057761 0023452 0 ustar 00root root 0000000 0000000 from io import BytesIO
from botocore.awsrequest import AWSResponse
class Body(BytesIO):
def stream(self, **kwargs):
contents = self.read()
while contents:
yield contents
contents = self.read()
class MockResponse(object):
def __init__(self, client, status_code, headers, body):
self._client = client
self._status_code = status_code
self._headers = headers
self._body = body
def __enter__(self):
self._client.meta.events.register("before-send", self)
return self
def __exit__(self, exc_type, exc_value, traceback):
self._client.meta.events.unregister("before-send", self)
def __call__(self, request, **kwargs):
return AWSResponse(
request.url,
self._status_code,
self._headers,
Body(self._body),
)
sentry-python-1.4.3/tests/integrations/boto3/s3_list.xml 0000664 0000000 0000000 00000001545 14125057761 0023377 0 ustar 00root root 0000000 0000000
marshalls-furious-bucket1000urlfalsefoo.txt2020-10-24T00:13:39.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARDbar.txt2020-10-02T15:15:20.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARD
sentry-python-1.4.3/tests/integrations/boto3/test_s3.py 0000664 0000000 0000000 00000005404 14125057761 0023231 0 ustar 00root root 0000000 0000000 from sentry_sdk import Hub
from sentry_sdk.integrations.boto3 import Boto3Integration
from tests.integrations.boto3.aws_mock import MockResponse
from tests.integrations.boto3 import read_fixture
import boto3
session = boto3.Session(
aws_access_key_id="-",
aws_secret_access_key="-",
)
def test_basic(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
events = capture_events()
s3 = session.resource("s3")
with Hub.current.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, read_fixture("s3_list.xml")
):
bucket = s3.Bucket("bucket")
items = [obj for obj in bucket.objects.all()]
assert len(items) == 2
assert items[0].key == "foo.txt"
assert items[1].key == "bar.txt"
transaction.finish()
(event,) = events
assert event["type"] == "transaction"
assert len(event["spans"]) == 1
(span,) = event["spans"]
assert span["op"] == "aws.request"
assert span["description"] == "aws.s3.ListObjects"
def test_streaming(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
events = capture_events()
s3 = session.resource("s3")
with Hub.current.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, b"hello"
):
obj = s3.Bucket("bucket").Object("foo.pdf")
body = obj.get()["Body"]
assert body.read(1) == b"h"
assert body.read(2) == b"el"
assert body.read(3) == b"lo"
assert body.read(1) == b""
transaction.finish()
(event,) = events
assert event["type"] == "transaction"
assert len(event["spans"]) == 2
span1 = event["spans"][0]
assert span1["op"] == "aws.request"
assert span1["description"] == "aws.s3.GetObject"
span2 = event["spans"][1]
assert span2["op"] == "aws.request.stream"
assert span2["description"] == "aws.s3.GetObject"
assert span2["parent_span_id"] == span1["span_id"]
def test_streaming_close(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
events = capture_events()
s3 = session.resource("s3")
with Hub.current.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, b"hello"
):
obj = s3.Bucket("bucket").Object("foo.pdf")
body = obj.get()["Body"]
assert body.read(1) == b"h"
body.close() # close partially-read stream
transaction.finish()
(event,) = events
assert event["type"] == "transaction"
assert len(event["spans"]) == 2
span1 = event["spans"][0]
assert span1["op"] == "aws.request"
span2 = event["spans"][1]
assert span2["op"] == "aws.request.stream"
sentry-python-1.4.3/tests/integrations/bottle/ 0000775 0000000 0000000 00000000000 14125057761 0021533 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/bottle/test_bottle.py 0000664 0000000 0000000 00000025305 14125057761 0024442 0 ustar 00root root 0000000 0000000 import json
import pytest
import logging
pytest.importorskip("bottle")
from io import BytesIO
from bottle import Bottle, debug as set_debug, abort, redirect
from sentry_sdk import capture_message
from sentry_sdk.integrations.logging import LoggingIntegration
from werkzeug.test import Client
import sentry_sdk.integrations.bottle as bottle_sentry
@pytest.fixture(scope="function")
def app(sentry_init):
app = Bottle()
@app.route("/message")
def hi():
capture_message("hi")
return "ok"
@app.route("/message-named-route", name="hi")
def named_hi():
capture_message("hi")
return "ok"
yield app
@pytest.fixture
def get_client(app):
def inner():
return Client(app)
return inner
def test_has_context(sentry_init, app, capture_events, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
events = capture_events()
client = get_client()
response = client.get("/message")
assert response[1] == "200 OK"
(event,) = events
assert event["message"] == "hi"
assert "data" not in event["request"]
assert event["request"]["url"] == "http://localhost/message"
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction",
[
("/message", "endpoint", "hi"),
("/message", "url", "/message"),
("/message-named-route", "endpoint", "hi"),
],
)
def test_transaction_style(
sentry_init,
app,
capture_events,
transaction_style,
expected_transaction,
url,
get_client,
):
sentry_init(
integrations=[
bottle_sentry.BottleIntegration(transaction_style=transaction_style)
]
)
events = capture_events()
client = get_client()
response = client.get("/message")
assert response[1] == "200 OK"
(event,) = events
assert event["transaction"].endswith(expected_transaction)
@pytest.mark.parametrize("debug", (True, False), ids=["debug", "nodebug"])
@pytest.mark.parametrize("catchall", (True, False), ids=["catchall", "nocatchall"])
def test_errors(
sentry_init, capture_exceptions, capture_events, app, debug, catchall, get_client
):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
app.catchall = catchall
set_debug(mode=debug)
exceptions = capture_exceptions()
events = capture_events()
@app.route("/")
def index():
1 / 0
client = get_client()
try:
client.get("/")
except ZeroDivisionError:
pass
(exc,) = exceptions
assert isinstance(exc, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
assert event["exception"]["values"][0]["mechanism"]["handled"] is False
def test_large_json_request(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
data = {"foo": {"bar": "a" * 2000}}
@app.route("/", method="POST")
def index():
import bottle
assert bottle.request.json == data
assert bottle.request.body.read() == json.dumps(data).encode("ascii")
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.get("/")
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response[1] == "200 OK"
(event,) = events
assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]["bar"]) == 512
@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_empty_json_request(sentry_init, capture_events, app, data, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
@app.route("/", method="POST")
def index():
import bottle
assert bottle.request.json == data
assert bottle.request.body.read() == json.dumps(data).encode("ascii")
# assert not bottle.request.forms
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response[1] == "200 OK"
(event,) = events
assert event["request"]["data"] == data
def test_medium_formdata_request(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
data = {"foo": "a" * 2000}
@app.route("/", method="POST")
def index():
import bottle
assert bottle.request.forms["foo"] == data["foo"]
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", data=data)
assert response[1] == "200 OK"
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]) == 512
@pytest.mark.parametrize("input_char", [u"a", b"a"])
def test_too_large_raw_request(
sentry_init, input_char, capture_events, app, get_client
):
sentry_init(
integrations=[bottle_sentry.BottleIntegration()], request_bodies="small"
)
data = input_char * 2000
@app.route("/", method="POST")
def index():
import bottle
if isinstance(data, bytes):
assert bottle.request.body.read() == data
else:
assert bottle.request.body.read() == data.encode("ascii")
assert not bottle.request.json
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", data=data)
assert response[1] == "200 OK"
(event,) = events
assert event["_meta"]["request"]["data"] == {
"": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
}
assert not event["request"]["data"]
def test_files_and_form(sentry_init, capture_events, app, get_client):
sentry_init(
integrations=[bottle_sentry.BottleIntegration()], request_bodies="always"
)
data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
@app.route("/", method="POST")
def index():
import bottle
assert list(bottle.request.forms) == ["foo"]
assert list(bottle.request.files) == ["file"]
assert not bottle.request.json
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", data=data)
assert response[1] == "200 OK"
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]) == 512
assert event["_meta"]["request"]["data"]["file"] == {
"": {
"len": -1,
"rem": [["!raw", "x", 0, -1]],
} # bottle default content-length is -1
}
assert not event["request"]["data"]["file"]
@pytest.mark.parametrize(
"integrations",
[
[bottle_sentry.BottleIntegration()],
[bottle_sentry.BottleIntegration(), LoggingIntegration(event_level="ERROR")],
],
)
def test_errors_not_reported_twice(
sentry_init, integrations, capture_events, app, get_client
):
sentry_init(integrations=integrations)
app.catchall = False
logger = logging.getLogger("bottle.app")
@app.route("/")
def index():
try:
1 / 0
except Exception as e:
logger.exception(e)
raise e
events = capture_events()
client = get_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
assert len(events) == 1
def test_logging(sentry_init, capture_events, app, get_client):
# ensure that Bottle's logger magic doesn't break ours
sentry_init(
integrations=[
bottle_sentry.BottleIntegration(),
LoggingIntegration(event_level="ERROR"),
]
)
@app.route("/")
def index():
app.logger.error("hi")
return "ok"
events = capture_events()
client = get_client()
client.get("/")
(event,) = events
assert event["level"] == "error"
def test_mount(app, capture_exceptions, capture_events, sentry_init, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
app.catchall = False
def crashing_app(environ, start_response):
1 / 0
app.mount("/wsgi/", crashing_app)
client = Client(app)
exceptions = capture_exceptions()
events = capture_events()
with pytest.raises(ZeroDivisionError) as exc:
client.get("/wsgi/")
(error,) = exceptions
assert error is exc.value
(event,) = events
assert event["exception"]["values"][0]["mechanism"] == {
"type": "bottle",
"handled": False,
}
def test_500(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
set_debug(False)
app.catchall = True
@app.route("/")
def index():
1 / 0
@app.error(500)
def error_handler(err):
capture_message("error_msg")
return "My error"
events = capture_events()
client = get_client()
response = client.get("/")
assert response[1] == "500 Internal Server Error"
_, event = events
assert event["message"] == "error_msg"
def test_error_in_errorhandler(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
set_debug(False)
app.catchall = True
@app.route("/")
def index():
raise ValueError()
@app.error(500)
def error_handler(err):
1 / 0
events = capture_events()
client = get_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
event1, event2 = events
(exception,) = event1["exception"]["values"]
assert exception["type"] == "ValueError"
exception = event2["exception"]["values"][0]
assert exception["type"] == "ZeroDivisionError"
def test_bad_request_not_captured(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
events = capture_events()
@app.route("/")
def index():
abort(400, "bad request in")
client = get_client()
client.get("/")
assert not events
def test_no_exception_on_redirect(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
events = capture_events()
@app.route("/")
def index():
redirect("/here")
@app.route("/here")
def here():
return "here"
client = get_client()
client.get("/")
assert not events
sentry-python-1.4.3/tests/integrations/celery/ 0000775 0000000 0000000 00000000000 14125057761 0021525 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/celery/test_celery.py 0000664 0000000 0000000 00000030160 14125057761 0024421 0 ustar 00root root 0000000 0000000 import threading
import pytest
pytest.importorskip("celery")
from sentry_sdk import Hub, configure_scope, start_transaction
from sentry_sdk.integrations.celery import CeleryIntegration
from sentry_sdk._compat import text_type
from celery import Celery, VERSION
from celery.bin import worker
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
@pytest.fixture
def connect_signal(request):
def inner(signal, f):
signal.connect(f)
request.addfinalizer(lambda: signal.disconnect(f))
return inner
@pytest.fixture
def init_celery(sentry_init, request):
def inner(propagate_traces=True, backend="always_eager", **kwargs):
sentry_init(
integrations=[CeleryIntegration(propagate_traces=propagate_traces)],
**kwargs
)
celery = Celery(__name__)
if backend == "always_eager":
if VERSION < (4,):
celery.conf.CELERY_ALWAYS_EAGER = True
else:
celery.conf.task_always_eager = True
elif backend == "redis":
# broken on celery 3
if VERSION < (4,):
pytest.skip("Redis backend broken for some reason")
# this backend requires capture_events_forksafe
celery.conf.worker_max_tasks_per_child = 1
celery.conf.worker_concurrency = 1
celery.conf.broker_url = "redis://127.0.0.1:6379"
celery.conf.result_backend = "redis://127.0.0.1:6379"
celery.conf.task_always_eager = False
Hub.main.bind_client(Hub.current.client)
request.addfinalizer(lambda: Hub.main.bind_client(None))
# Once we drop celery 3 we can use the celery_worker fixture
if VERSION < (5,):
worker_fn = worker.worker(app=celery).run
else:
from celery.bin.base import CLIContext
worker_fn = lambda: worker.worker(
obj=CLIContext(app=celery, no_color=True, workdir=".", quiet=False),
args=[],
)
worker_thread = threading.Thread(target=worker_fn)
worker_thread.daemon = True
worker_thread.start()
else:
raise ValueError(backend)
return celery
return inner
@pytest.fixture
def celery(init_celery):
return init_celery()
@pytest.fixture(
params=[
lambda task, x, y: (task.delay(x, y), {"args": [x, y], "kwargs": {}}),
lambda task, x, y: (task.apply_async((x, y)), {"args": [x, y], "kwargs": {}}),
lambda task, x, y: (
task.apply_async(args=(x, y)),
{"args": [x, y], "kwargs": {}},
),
lambda task, x, y: (
task.apply_async(kwargs=dict(x=x, y=y)),
{"args": [], "kwargs": {"x": x, "y": y}},
),
]
)
def celery_invocation(request):
"""
Invokes a task in multiple ways Celery allows you to (testing our apply_async monkeypatch).
Currently limited to a task signature of the form foo(x, y)
"""
return request.param
def test_simple(capture_events, celery, celery_invocation):
events = capture_events()
@celery.task(name="dummy_task")
def dummy_task(x, y):
foo = 42 # noqa
return x / y
with start_transaction() as transaction:
celery_invocation(dummy_task, 1, 2)
_, expected_context = celery_invocation(dummy_task, 1, 0)
(event,) = events
assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert event["contexts"]["trace"]["span_id"] != transaction.span_id
assert event["transaction"] == "dummy_task"
assert "celery_task_id" in event["tags"]
assert event["extra"]["celery-job"] == dict(
task_name="dummy_task", **expected_context
)
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"]["type"] == "celery"
assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"
@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
def test_transaction_events(capture_events, init_celery, celery_invocation, task_fails):
celery = init_celery(traces_sample_rate=1.0)
@celery.task(name="dummy_task")
def dummy_task(x, y):
return x / y
# XXX: For some reason the first call does not get instrumented properly.
celery_invocation(dummy_task, 1, 1)
events = capture_events()
with start_transaction(name="submission") as transaction:
celery_invocation(dummy_task, 1, 0 if task_fails else 1)
if task_fails:
error_event = events.pop(0)
assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
execution_event, submission_event = events
assert execution_event["transaction"] == "dummy_task"
assert submission_event["transaction"] == "submission"
assert execution_event["type"] == submission_event["type"] == "transaction"
assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
if task_fails:
assert execution_event["contexts"]["trace"]["status"] == "internal_error"
else:
assert execution_event["contexts"]["trace"]["status"] == "ok"
assert execution_event["spans"] == []
assert submission_event["spans"] == [
{
u"description": u"dummy_task",
u"op": "celery.submit",
u"parent_span_id": submission_event["contexts"]["trace"]["span_id"],
u"same_process_as_parent": True,
u"span_id": submission_event["spans"][0]["span_id"],
u"start_timestamp": submission_event["spans"][0]["start_timestamp"],
u"timestamp": submission_event["spans"][0]["timestamp"],
u"trace_id": text_type(transaction.trace_id),
}
]
def test_no_stackoverflows(celery):
"""We used to have a bug in the Celery integration where its monkeypatching
was repeated for every task invocation, leading to stackoverflows.
See https://github.com/getsentry/sentry-python/issues/265
"""
results = []
@celery.task(name="dummy_task")
def dummy_task():
with configure_scope() as scope:
scope.set_tag("foo", "bar")
results.append(42)
for _ in range(10000):
dummy_task.delay()
assert results == [42] * 10000
with configure_scope() as scope:
assert not scope._tags
def test_simple_no_propagation(capture_events, init_celery):
celery = init_celery(propagate_traces=False)
events = capture_events()
@celery.task(name="dummy_task")
def dummy_task():
1 / 0
with start_transaction() as transaction:
dummy_task.delay()
(event,) = events
assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id
assert event["transaction"] == "dummy_task"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
def test_ignore_expected(capture_events, celery):
events = capture_events()
@celery.task(name="dummy_task", throws=(ZeroDivisionError,))
def dummy_task(x, y):
return x / y
dummy_task.delay(1, 2)
dummy_task.delay(1, 0)
assert not events
def test_broken_prerun(init_celery, connect_signal):
from celery.signals import task_prerun
stack_lengths = []
def crash(*args, **kwargs):
# scope should exist in prerun
stack_lengths.append(len(Hub.current._stack))
1 / 0
# Order here is important to reproduce the bug: In Celery 3, a crashing
# prerun would prevent other preruns from running.
connect_signal(task_prerun, crash)
celery = init_celery()
assert len(Hub.current._stack) == 1
@celery.task(name="dummy_task")
def dummy_task(x, y):
stack_lengths.append(len(Hub.current._stack))
return x / y
if VERSION >= (4,):
dummy_task.delay(2, 2)
else:
with pytest.raises(ZeroDivisionError):
dummy_task.delay(2, 2)
assert len(Hub.current._stack) == 1
if VERSION < (4,):
assert stack_lengths == [2]
else:
assert stack_lengths == [2, 2]
@pytest.mark.xfail(
(4, 2, 0) <= VERSION < (4, 4, 3),
strict=True,
reason="https://github.com/celery/celery/issues/4661",
)
def test_retry(celery, capture_events):
events = capture_events()
failures = [True, True, False]
runs = []
@celery.task(name="dummy_task", bind=True)
def dummy_task(self):
runs.append(1)
try:
if failures.pop(0):
1 / 0
except Exception as exc:
self.retry(max_retries=2, exc=exc)
dummy_task.delay()
assert len(runs) == 3
assert not events
failures = [True, True, True]
runs = []
dummy_task.delay()
assert len(runs) == 3
(event,) = events
exceptions = event["exception"]["values"]
for e in exceptions:
assert e["type"] == "ZeroDivisionError"
@pytest.mark.forked
def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe, tmpdir):
celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True)
events = capture_events_forksafe()
runs = []
@celery.task(name="dummy_task", bind=True)
def dummy_task(self):
runs.append(1)
1 / 0
with start_transaction(name="submit_celery"):
# Curious: Cannot use delay() here or py2.7-celery-4.2 crashes
res = dummy_task.apply_async()
with pytest.raises(Exception):
# Celery 4.1 raises a gibberish exception
res.wait()
# if this is nonempty, the worker never really forked
assert not runs
submit_transaction = events.read_event()
assert submit_transaction["type"] == "transaction"
assert submit_transaction["transaction"] == "submit_celery"
(span,) = submit_transaction["spans"]
assert span["op"] == "celery.submit"
assert span["description"] == "dummy_task"
event = events.read_event()
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
transaction = events.read_event()
assert (
transaction["contexts"]["trace"]["trace_id"]
== event["contexts"]["trace"]["trace_id"]
== submit_transaction["contexts"]["trace"]["trace_id"]
)
events.read_flush()
# if this is nonempty, the worker never really forked
assert not runs
@pytest.mark.forked
@pytest.mark.parametrize("newrelic_order", ["sentry_first", "sentry_last"])
def test_newrelic_interference(init_celery, newrelic_order, celery_invocation):
def instrument_newrelic():
import celery.app.trace as celery_mod
from newrelic.hooks.application_celery import instrument_celery_execute_trace
assert hasattr(celery_mod, "build_tracer")
instrument_celery_execute_trace(celery_mod)
if newrelic_order == "sentry_first":
celery = init_celery()
instrument_newrelic()
elif newrelic_order == "sentry_last":
instrument_newrelic()
celery = init_celery()
else:
raise ValueError(newrelic_order)
@celery.task(name="dummy_task", bind=True)
def dummy_task(self, x, y):
return x / y
assert dummy_task.apply(kwargs={"x": 1, "y": 1}).wait() == 1
assert celery_invocation(dummy_task, 1, 1)[0].wait() == 1
def test_traces_sampler_gets_task_info_in_sampling_context(
init_celery, celery_invocation, DictionaryContaining # noqa:N803
):
traces_sampler = mock.Mock()
celery = init_celery(traces_sampler=traces_sampler)
@celery.task(name="dog_walk")
def walk_dogs(x, y):
dogs, route = x
num_loops = y
return dogs, route, num_loops
_, args_kwargs = celery_invocation(
walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1
)
traces_sampler.assert_any_call(
# depending on the iteration of celery_invocation, the data might be
# passed as args or as kwargs, so make this generic
DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)})
)
sentry-python-1.4.3/tests/integrations/chalice/ 0000775 0000000 0000000 00000000000 14125057761 0021632 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/chalice/__init__.py 0000664 0000000 0000000 00000000056 14125057761 0023744 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("chalice")
sentry-python-1.4.3/tests/integrations/chalice/test_chalice.py 0000664 0000000 0000000 00000005727 14125057761 0024646 0 ustar 00root root 0000000 0000000 import pytest
import time
from chalice import Chalice, BadRequestError
from chalice.local import LambdaContext, LocalGateway
from sentry_sdk.integrations.chalice import ChaliceIntegration
from pytest_chalice.handlers import RequestHandler
def _generate_lambda_context(self):
# Monkeypatch of the function _generate_lambda_context
# from the class LocalGateway
# for mock the timeout
# type: () -> LambdaContext
if self._config.lambda_timeout is None:
timeout = 10 * 1000
else:
timeout = self._config.lambda_timeout * 1000
return LambdaContext(
function_name=self._config.function_name,
memory_size=self._config.lambda_memory_size,
max_runtime_ms=timeout,
)
@pytest.fixture
def app(sentry_init):
sentry_init(integrations=[ChaliceIntegration()])
app = Chalice(app_name="sentry_chalice")
@app.route("/boom")
def boom():
raise Exception("boom goes the dynamite!")
@app.route("/context")
def has_request():
raise Exception("boom goes the dynamite!")
@app.route("/badrequest")
def badrequest():
raise BadRequestError("bad-request")
LocalGateway._generate_lambda_context = _generate_lambda_context
return app
@pytest.fixture
def lambda_context_args():
return ["lambda_name", 256]
def test_exception_boom(app, client: RequestHandler) -> None:
response = client.get("/boom")
assert response.status_code == 500
assert response.json == dict(
[
("Code", "InternalServerError"),
("Message", "An internal server error occurred."),
]
)
def test_has_request(app, capture_events, client: RequestHandler):
events = capture_events()
response = client.get("/context")
assert response.status_code == 500
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
def test_scheduled_event(app, lambda_context_args):
@app.schedule("rate(1 minutes)")
def every_hour(event):
raise Exception("schedule event!")
context = LambdaContext(
*lambda_context_args, max_runtime_ms=10000, time_source=time
)
lambda_event = {
"version": "0",
"account": "120987654312",
"region": "us-west-1",
"detail": {},
"detail-type": "Scheduled Event",
"source": "aws.events",
"time": "1970-01-01T00:00:00Z",
"id": "event-id",
"resources": ["arn:aws:events:us-west-1:120987654312:rule/my-schedule"],
}
with pytest.raises(Exception) as exc_info:
every_hour(lambda_event, context=context)
assert str(exc_info.value) == "schedule event!"
def test_bad_reques(client: RequestHandler) -> None:
response = client.get("/badrequest")
assert response.status_code == 400
assert response.json == dict(
[
("Code", "BadRequestError"),
("Message", "BadRequestError: bad-request"),
]
)
sentry-python-1.4.3/tests/integrations/conftest.py 0000664 0000000 0000000 00000001060 14125057761 0022436 0 ustar 00root root 0000000 0000000 import pytest
import sentry_sdk
@pytest.fixture
def capture_exceptions(monkeypatch):
def inner():
errors = set()
old_capture_event = sentry_sdk.Hub.capture_event
def capture_event(self, event, hint=None):
if hint:
if "exc_info" in hint:
error = hint["exc_info"][1]
errors.add(error)
return old_capture_event(self, event, hint=hint)
monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event)
return errors
return inner
sentry-python-1.4.3/tests/integrations/django/ 0000775 0000000 0000000 00000000000 14125057761 0021504 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/__init__.py 0000664 0000000 0000000 00000000066 14125057761 0023617 0 ustar 00root root 0000000 0000000 import pytest
django = pytest.importorskip("django")
sentry-python-1.4.3/tests/integrations/django/asgi/ 0000775 0000000 0000000 00000000000 14125057761 0022427 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/asgi/__init__.py 0000664 0000000 0000000 00000000057 14125057761 0024542 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("channels")
sentry-python-1.4.3/tests/integrations/django/asgi/test_asgi.py 0000664 0000000 0000000 00000013162 14125057761 0024766 0 ustar 00root root 0000000 0000000 import django
import pytest
from channels.testing import HttpCommunicator
from sentry_sdk import capture_message
from sentry_sdk.integrations.django import DjangoIntegration
from tests.integrations.django.myapp.asgi import channels_application
APPS = [channels_application]
if django.VERSION >= (3, 0):
from tests.integrations.django.myapp.asgi import asgi_application
APPS += [asgi_application]
@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
async def test_basic(sentry_init, capture_events, application):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
comm = HttpCommunicator(application, "GET", "/view-exc?test=query")
response = await comm.get_response()
assert response["status"] == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
# Test that the ASGI middleware got set up correctly. Right now this needs
# to be installed manually (see myapp/asgi.py)
assert event["transaction"] == "/view-exc"
assert event["request"] == {
"cookies": {},
"headers": {},
"method": "GET",
"query_string": "test=query",
"url": "/view-exc",
}
capture_message("hi")
event = events[-1]
assert "request" not in event
@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_views(sentry_init, capture_events, application):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
comm = HttpCommunicator(application, "GET", "/async_message")
response = await comm.get_response()
assert response["status"] == 200
(event,) = events
assert event["transaction"] == "/async_message"
assert event["request"] == {
"cookies": {},
"headers": {},
"method": "GET",
"query_string": None,
"url": "/async_message",
}
@pytest.mark.asyncio
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_views_concurrent_execution(sentry_init, capture_events, settings):
import asyncio
import time
settings.MIDDLEWARE = []
asgi_application.load_middleware(is_async=True)
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")
loop = asyncio.get_event_loop()
start = time.time()
r1 = loop.create_task(comm.get_response(timeout=5))
r2 = loop.create_task(comm2.get_response(timeout=5))
(resp1, resp2), _ = await asyncio.wait({r1, r2})
end = time.time()
assert resp1.result()["status"] == 200
assert resp2.result()["status"] == 200
assert end - start < 1.5
@pytest.mark.asyncio
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_middleware_that_is_function_concurrent_execution(
sentry_init, capture_events, settings
):
import asyncio
import time
settings.MIDDLEWARE = [
"tests.integrations.django.myapp.middleware.simple_middleware"
]
asgi_application.load_middleware(is_async=True)
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")
loop = asyncio.get_event_loop()
start = time.time()
r1 = loop.create_task(comm.get_response(timeout=5))
r2 = loop.create_task(comm2.get_response(timeout=5))
(resp1, resp2), _ = await asyncio.wait({r1, r2})
end = time.time()
assert resp1.result()["status"] == 200
assert resp2.result()["status"] == 200
assert end - start < 1.5
@pytest.mark.asyncio
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_middleware_spans(
sentry_init, render_span_tree, capture_events, settings
):
settings.MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"tests.integrations.django.myapp.settings.TestMiddleware",
]
asgi_application.load_middleware(is_async=True)
sentry_init(
integrations=[DjangoIntegration(middleware_spans=True)],
traces_sample_rate=1.0,
_experiments={"record_sql_params": True},
)
events = capture_events()
comm = HttpCommunicator(asgi_application, "GET", "/async_message")
response = await comm.get_response()
assert response["status"] == 200
await comm.wait()
message, transaction = events
assert (
render_span_tree(transaction)
== """\
- op="http.server": description=null
- op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
- op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
- op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
- op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
- op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
- op="django.view": description="async_message\""""
)
sentry-python-1.4.3/tests/integrations/django/myapp/ 0000775 0000000 0000000 00000000000 14125057761 0022632 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/myapp/__init__.py 0000664 0000000 0000000 00000000000 14125057761 0024731 0 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/myapp/asgi.py 0000664 0000000 0000000 00000000747 14125057761 0024137 0 ustar 00root root 0000000 0000000 """
ASGI entrypoint. Configures Django and then runs the application
defined in the ASGI_APPLICATION setting.
"""
import os
import django
from channels.routing import get_default_application
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)
django.setup()
channels_application = get_default_application()
if django.VERSION >= (3, 0):
from django.core.asgi import get_asgi_application
asgi_application = get_asgi_application()
sentry-python-1.4.3/tests/integrations/django/myapp/manage.py 0000664 0000000 0000000 00000000434 14125057761 0024435 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
sentry-python-1.4.3/tests/integrations/django/myapp/management/ 0000775 0000000 0000000 00000000000 14125057761 0024746 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/myapp/management/__init__.py 0000664 0000000 0000000 00000000000 14125057761 0027045 0 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/myapp/management/commands/ 0000775 0000000 0000000 00000000000 14125057761 0026547 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/myapp/management/commands/__init__.py 0000664 0000000 0000000 00000000000 14125057761 0030646 0 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/myapp/management/commands/mycrash.py 0000664 0000000 0000000 00000000273 14125057761 0030571 0 ustar 00root root 0000000 0000000 from django.core.management.base import BaseCommand
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
1 / 0
sentry-python-1.4.3/tests/integrations/django/myapp/middleware.py 0000664 0000000 0000000 00000000705 14125057761 0025323 0 ustar 00root root 0000000 0000000 import asyncio
from django.utils.decorators import sync_and_async_middleware
@sync_and_async_middleware
def simple_middleware(get_response):
if asyncio.iscoroutinefunction(get_response):
async def middleware(request):
response = await get_response(request)
return response
else:
def middleware(request):
response = get_response(request)
return response
return middleware
sentry-python-1.4.3/tests/integrations/django/myapp/routing.py 0000664 0000000 0000000 00000000427 14125057761 0024676 0 ustar 00root root 0000000 0000000 import channels
from channels.http import AsgiHandler
from channels.routing import ProtocolTypeRouter
if channels.__version__ < "3.0.0":
channels_handler = AsgiHandler
else:
channels_handler = AsgiHandler()
application = ProtocolTypeRouter({"http": channels_handler})
sentry-python-1.4.3/tests/integrations/django/myapp/settings.py 0000664 0000000 0000000 00000011331 14125057761 0025043 0 ustar 00root root 0000000 0000000 """
Django settings for myapp project.
Generated by 'django-admin startproject' using Django 2.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
# We shouldn't access settings while setting up integrations. Initialize SDK
# here to provoke any errors that might occur.
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
sentry_sdk.init(integrations=[DjangoIntegration()])
import os
try:
# Django >= 1.10
from django.utils.deprecation import MiddlewareMixin
except ImportError:
# Not required for Django <= 1.9, see:
# https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
MiddlewareMixin = object
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "u95e#xr$t3!vdux)fj11!*q*^w^^r#kiyrvt3kjui-t_k%m3op"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["localhost"]
# Application definition
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"tests.integrations.django.myapp",
]
class TestMiddleware(MiddlewareMixin):
def process_request(self, request):
# https://github.com/getsentry/sentry-python/issues/837 -- We should
# not touch the resolver_match because apparently people rely on it.
if request.resolver_match:
assert not getattr(request.resolver_match.callback, "__wrapped__", None)
if "middleware-exc" in request.path:
1 / 0
def process_response(self, request, response):
return response
def TestFunctionMiddleware(get_response): # noqa: N802
def middleware(request):
return get_response(request)
return middleware
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"tests.integrations.django.myapp.settings.TestMiddleware",
]
if MiddlewareMixin is not object:
MIDDLEWARE = MIDDLEWARE_CLASSES + [
"tests.integrations.django.myapp.settings.TestFunctionMiddleware"
]
ROOT_URLCONF = "tests.integrations.django.myapp.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"debug": True,
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
]
WSGI_APPLICATION = "tests.integrations.django.myapp.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
try:
import psycopg2 # noqa
DATABASES["postgres"] = {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"],
"USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"],
"PASSWORD": os.environ["SENTRY_PYTHON_TEST_POSTGRES_PASSWORD"],
"HOST": "localhost",
"PORT": 5432,
}
except (ImportError, KeyError):
pass
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = False
TEMPLATE_DEBUG = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = "/static/"
# django-channels specific
ASGI_APPLICATION = "tests.integrations.django.myapp.routing.application"
sentry-python-1.4.3/tests/integrations/django/myapp/templates/ 0000775 0000000 0000000 00000000000 14125057761 0024630 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/django/myapp/templates/error.html 0000664 0000000 0000000 00000000113 14125057761 0026642 0 ustar 00root root 0000000 0000000 1
2
3
4
5
6
7
8
9
{% invalid template tag %}
11
12
13
14
15
16
17
18
19
20
sentry-python-1.4.3/tests/integrations/django/myapp/templates/user_name.html 0000664 0000000 0000000 00000000043 14125057761 0027471 0 ustar 00root root 0000000 0000000 {{ request.user }}: {{ user_age }}
sentry-python-1.4.3/tests/integrations/django/myapp/urls.py 0000664 0000000 0000000 00000005715 14125057761 0024201 0 ustar 00root root 0000000 0000000 """myapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from __future__ import absolute_import
try:
from django.urls import path
except ImportError:
from django.conf.urls import url
def path(path, *args, **kwargs):
return url("^{}$".format(path), *args, **kwargs)
from . import views
urlpatterns = [
path("view-exc", views.view_exc, name="view_exc"),
path(
"read-body-and-view-exc",
views.read_body_and_view_exc,
name="read_body_and_view_exc",
),
path("middleware-exc", views.message, name="middleware_exc"),
path("message", views.message, name="message"),
path("mylogin", views.mylogin, name="mylogin"),
path("classbased", views.ClassBasedView.as_view(), name="classbased"),
path("sentryclass", views.SentryClassBasedView(), name="sentryclass"),
path(
"sentryclass-csrf",
views.SentryClassBasedViewWithCsrf(),
name="sentryclass_csrf",
),
path("post-echo", views.post_echo, name="post_echo"),
path("template-exc", views.template_exc, name="template_exc"),
path("template-test", views.template_test, name="template_test"),
path("template-test2", views.template_test2, name="template_test2"),
path(
"permission-denied-exc",
views.permission_denied_exc,
name="permission_denied_exc",
),
path(
"csrf-hello-not-exempt",
views.csrf_hello_not_exempt,
name="csrf_hello_not_exempt",
),
]
# async views
if views.async_message is not None:
urlpatterns.append(path("async_message", views.async_message, name="async_message"))
if views.my_async_view is not None:
urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view"))
# rest framework
try:
urlpatterns.append(
path("rest-framework-exc", views.rest_framework_exc, name="rest_framework_exc")
)
urlpatterns.append(
path(
"rest-framework-read-body-and-exc",
views.rest_framework_read_body_and_exc,
name="rest_framework_read_body_and_exc",
)
)
urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
urlpatterns.append(
path(
"rest-permission-denied-exc",
views.rest_permission_denied_exc,
name="rest_permission_denied_exc",
)
)
except AttributeError:
pass
handler500 = views.handler500
handler404 = views.handler404
sentry-python-1.4.3/tests/integrations/django/myapp/views.py 0000664 0000000 0000000 00000007145 14125057761 0024350 0 ustar 00root root 0000000 0000000 from django import VERSION
from django.contrib.auth import login
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError
from django.shortcuts import render
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import ListView
try:
from rest_framework.decorators import api_view
@api_view(["POST"])
def rest_framework_exc(request):
1 / 0
@api_view(["POST"])
def rest_framework_read_body_and_exc(request):
request.data
1 / 0
@api_view(["GET"])
def rest_hello(request):
return HttpResponse("ok")
@api_view(["GET"])
def rest_permission_denied_exc(request):
raise PermissionDenied("bye")
except ImportError:
pass
import sentry_sdk
@csrf_exempt
def view_exc(request):
1 / 0
# This is a "class based view" as previously found in the sentry codebase. The
# interesting property of this one is that csrf_exempt, as a class attribute,
# is not in __dict__, so regular use of functools.wraps will not forward the
# attribute.
class SentryClassBasedView(object):
csrf_exempt = True
def __call__(self, request):
return HttpResponse("ok")
class SentryClassBasedViewWithCsrf(object):
def __call__(self, request):
return HttpResponse("ok")
@csrf_exempt
def read_body_and_view_exc(request):
request.read()
1 / 0
@csrf_exempt
def message(request):
sentry_sdk.capture_message("hi")
return HttpResponse("ok")
@csrf_exempt
def mylogin(request):
user = User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
user.backend = "django.contrib.auth.backends.ModelBackend"
login(request, user)
return HttpResponse("ok")
@csrf_exempt
def handler500(request):
return HttpResponseServerError("Sentry error: %s" % sentry_sdk.last_event_id())
class ClassBasedView(ListView):
model = None
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super(ClassBasedView, self).dispatch(request, *args, **kwargs)
def head(self, *args, **kwargs):
sentry_sdk.capture_message("hi")
return HttpResponse("")
def post(self, *args, **kwargs):
return HttpResponse("ok")
@csrf_exempt
def post_echo(request):
sentry_sdk.capture_message("hi")
return HttpResponse(request.body)
@csrf_exempt
def handler404(*args, **kwargs):
sentry_sdk.capture_message("not found", level="error")
return HttpResponseNotFound("404")
@csrf_exempt
def template_exc(request, *args, **kwargs):
return render(request, "error.html")
@csrf_exempt
def template_test(request, *args, **kwargs):
return render(request, "user_name.html", {"user_age": 20})
@csrf_exempt
def template_test2(request, *args, **kwargs):
return TemplateResponse(
request, ("user_name.html", "another_template.html"), {"user_age": 25}
)
@csrf_exempt
def permission_denied_exc(*args, **kwargs):
raise PermissionDenied("bye")
def csrf_hello_not_exempt(*args, **kwargs):
return HttpResponse("ok")
if VERSION >= (3, 1):
# Use exec to produce valid Python 2
exec(
"""async def async_message(request):
sentry_sdk.capture_message("hi")
return HttpResponse("ok")"""
)
exec(
"""async def my_async_view(request):
import asyncio
await asyncio.sleep(1)
return HttpResponse('Hello World')"""
)
else:
async_message = None
my_async_view = None
sentry-python-1.4.3/tests/integrations/django/myapp/wsgi.py 0000664 0000000 0000000 00000000643 14125057761 0024160 0 ustar 00root root 0000000 0000000 """
WSGI config for myapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)
application = get_wsgi_application()
sentry-python-1.4.3/tests/integrations/django/test_basic.py 0000664 0000000 0000000 00000052660 14125057761 0024207 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import pytest
import pytest_django
import json
from werkzeug.test import Client
from django import VERSION as DJANGO_VERSION
from django.contrib.auth.models import User
from django.core.management import execute_from_command_line
from django.db.utils import OperationalError, ProgrammingError, DataError
from sentry_sdk.integrations.executing import ExecutingIntegration
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
from sentry_sdk import capture_message, capture_exception, configure_scope
from sentry_sdk.integrations.django import DjangoIntegration
from tests.integrations.django.myapp.wsgi import application
# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
# requires explicit database allow from failing the test
pytest_mark_django_db_decorator = pytest.mark.django_db
try:
pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
if pytest_version > (4, 2, 0):
pytest_mark_django_db_decorator = pytest.mark.django_db(databases="__all__")
except ValueError:
if "dev" in pytest_django.__version__:
pytest_mark_django_db_decorator = pytest.mark.django_db(databases="__all__")
except AttributeError:
pass
@pytest.fixture
def client():
return Client(application)
def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
events = capture_events()
client.get(reverse("view_exc"))
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "django"
def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django(
sentry_init, client, capture_exceptions, capture_events, settings
):
"""
Test that ensures if django settings.USE_X_FORWARDED_HOST is set to True
then the SDK sets the request url to the `HTTP_X_FORWARDED_FOR`
"""
settings.USE_X_FORWARDED_HOST = True
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
events = capture_events()
client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
assert event["request"]["url"] == "http://example.com/view-exc"
def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django(
sentry_init, client, capture_exceptions, capture_events
):
"""
Test that ensures if django settings.USE_X_FORWARDED_HOST is set to False
then the SDK sets the request url to the `HTTP_POST`
"""
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
events = capture_events()
client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
assert event["request"]["url"] == "http://localhost/view-exc"
def test_middleware_exceptions(sentry_init, client, capture_exceptions):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
client.get(reverse("middleware_exc"))
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
def test_request_captured(sentry_init, client, capture_events):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
content, status, headers = client.get(reverse("message"))
assert b"".join(content) == b"ok"
(event,) = events
assert event["transaction"] == "/message"
assert event["request"] == {
"cookies": {},
"env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
"headers": {"Host": "localhost"},
"method": "GET",
"query_string": "",
"url": "http://localhost/message",
}
def test_transaction_with_class_view(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration(transaction_style="function_name")],
send_default_pii=True,
)
events = capture_events()
content, status, headers = client.head(reverse("classbased"))
assert status.lower() == "200 ok"
(event,) = events
assert (
event["transaction"] == "tests.integrations.django.myapp.views.ClassBasedView"
)
assert event["message"] == "hi"
@pytest.mark.forked
@pytest.mark.django_db
def test_user_captured(sentry_init, client, capture_events):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
content, status, headers = client.get(reverse("mylogin"))
assert b"".join(content) == b"ok"
assert not events
content, status, headers = client.get(reverse("message"))
assert b"".join(content) == b"ok"
(event,) = events
assert event["user"] == {
"email": "lennon@thebeatles.com",
"username": "john",
"id": "1",
}
@pytest.mark.forked
@pytest.mark.django_db
def test_queryset_repr(sentry_init, capture_events):
sentry_init(integrations=[DjangoIntegration()])
events = capture_events()
User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
try:
my_queryset = User.objects.all() # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
(frame,) = exception["stacktrace"]["frames"]
assert frame["vars"]["my_queryset"].startswith(
"= (1, 7):
views_tests.append(
(
reverse("template_test"),
'- op="django.template.render": description="user_name.html"',
),
)
for url, expected_line in views_tests:
events = capture_events()
_content, status, _headers = client.get(url)
transaction = events[0]
assert expected_line in render_span_tree(transaction)
def test_middleware_spans(sentry_init, client, capture_events, render_span_tree):
sentry_init(
integrations=[DjangoIntegration()],
traces_sample_rate=1.0,
_experiments={"record_sql_params": True},
)
events = capture_events()
_content, status, _headers = client.get(reverse("message"))
message, transaction = events
assert message["message"] == "hi"
if DJANGO_VERSION >= (1, 10):
assert (
render_span_tree(transaction)
== """\
- op="http.server": description=null
- op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
- op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
- op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
- op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
- op="django.middleware": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
- op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
- op="django.view": description="message"\
"""
)
else:
assert (
render_span_tree(transaction)
== """\
- op="http.server": description=null
- op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
- op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
- op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
- op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
- op="django.view": description="message"
- op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
- op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
- op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
"""
)
def test_middleware_spans_disabled(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration(middleware_spans=False)], traces_sample_rate=1.0
)
events = capture_events()
_content, status, _headers = client.get(reverse("message"))
message, transaction = events
assert message["message"] == "hi"
assert not transaction["spans"]
def test_csrf(sentry_init, client):
"""
Assert that CSRF view decorator works even with the view wrapped in our own
callable.
"""
sentry_init(integrations=[DjangoIntegration()])
content, status, _headers = client.post(reverse("csrf_hello_not_exempt"))
assert status.lower() == "403 forbidden"
content, status, _headers = client.post(reverse("sentryclass_csrf"))
assert status.lower() == "403 forbidden"
content, status, _headers = client.post(reverse("sentryclass"))
assert status.lower() == "200 ok"
assert b"".join(content) == b"ok"
content, status, _headers = client.post(reverse("classbased"))
assert status.lower() == "200 ok"
assert b"".join(content) == b"ok"
content, status, _headers = client.post(reverse("message"))
assert status.lower() == "200 ok"
assert b"".join(content) == b"ok"
sentry-python-1.4.3/tests/integrations/django/test_transactions.py 0000664 0000000 0000000 00000004041 14125057761 0025624 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import pytest
import django
if django.VERSION >= (2, 0):
# TODO: once we stop supporting django < 2, use the real name of this
# function (re_path)
from django.urls import re_path as url
from django.conf.urls import include
else:
from django.conf.urls import url, include
if django.VERSION < (1, 9):
included_url_conf = (url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", ""
else:
included_url_conf = ((url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "")
from sentry_sdk.integrations.django.transactions import RavenResolver
example_url_conf = (
url(r"^api/(?P[\w_-]+)/store/$", lambda x: ""),
url(r"^api/(?P(v1|v2))/author/$", lambda x: ""),
url(r"^report/", lambda x: ""),
url(r"^example/", include(included_url_conf)),
)
def test_legacy_resolver_no_match():
resolver = RavenResolver()
result = resolver.resolve("/foo/bar", example_url_conf)
assert result == "/foo/bar"
def test_legacy_resolver_complex_match():
resolver = RavenResolver()
result = resolver.resolve("/api/1234/store/", example_url_conf)
assert result == "/api/{project_id}/store/"
def test_legacy_resolver_complex_either_match():
resolver = RavenResolver()
result = resolver.resolve("/api/v1/author/", example_url_conf)
assert result == "/api/{version}/author/"
result = resolver.resolve("/api/v2/author/", example_url_conf)
assert result == "/api/{version}/author/"
def test_legacy_resolver_included_match():
resolver = RavenResolver()
result = resolver.resolve("/example/foo/bar/baz", example_url_conf)
assert result == "/example/foo/bar/{param}"
@pytest.mark.skipif(django.VERSION < (2, 0), reason="Requires Django > 2.0")
def test_legacy_resolver_newstyle_django20_urlconf():
from django.urls import path
url_conf = (path("api/v2//store/", lambda x: ""),)
resolver = RavenResolver()
result = resolver.resolve("/api/v2/1234/store/", url_conf)
assert result == "/api/v2/{project_id}/store/"
sentry-python-1.4.3/tests/integrations/excepthook/ 0000775 0000000 0000000 00000000000 14125057761 0022413 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/excepthook/test_excepthook.py 0000664 0000000 0000000 00000003305 14125057761 0026176 0 ustar 00root root 0000000 0000000 import pytest
import sys
import subprocess
from textwrap import dedent
def test_excepthook(tmpdir):
app = tmpdir.join("app.py")
app.write(
dedent(
"""
from sentry_sdk import init, transport
def send_event(self, event):
print("capture event was called")
print(event)
transport.HttpTransport._send_event = send_event
init("http://foobar@localhost/123")
frame_value = "LOL"
1/0
"""
)
)
with pytest.raises(subprocess.CalledProcessError) as excinfo:
subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)
output = excinfo.value.output
print(output)
assert b"ZeroDivisionError" in output
assert b"LOL" in output
assert b"capture event was called" in output
def test_always_value_excepthook(tmpdir):
app = tmpdir.join("app.py")
app.write(
dedent(
"""
import sys
from sentry_sdk import init, transport
from sentry_sdk.integrations.excepthook import ExcepthookIntegration
def send_event(self, event):
print("capture event was called")
print(event)
transport.HttpTransport._send_event = send_event
sys.ps1 = "always_value_test"
init("http://foobar@localhost/123",
integrations=[ExcepthookIntegration(always_run=True)]
)
frame_value = "LOL"
1/0
"""
)
)
with pytest.raises(subprocess.CalledProcessError) as excinfo:
subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)
output = excinfo.value.output
print(output)
assert b"ZeroDivisionError" in output
assert b"LOL" in output
assert b"capture event was called" in output
sentry-python-1.4.3/tests/integrations/falcon/ 0000775 0000000 0000000 00000000000 14125057761 0021504 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/falcon/test_falcon.py 0000664 0000000 0000000 00000017533 14125057761 0024370 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import logging
import pytest
pytest.importorskip("falcon")
import falcon
import falcon.testing
import sentry_sdk
from sentry_sdk.integrations.falcon import FalconIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
@pytest.fixture
def make_app(sentry_init):
def inner():
class MessageResource:
def on_get(self, req, resp):
sentry_sdk.capture_message("hi")
resp.media = "hi"
app = falcon.API()
app.add_route("/message", MessageResource())
return app
return inner
@pytest.fixture
def make_client(make_app):
def inner():
app = make_app()
return falcon.testing.TestClient(app)
return inner
def test_has_context(sentry_init, capture_events, make_client):
sentry_init(integrations=[FalconIntegration()])
events = capture_events()
client = make_client()
response = client.simulate_get("/message")
assert response.status == falcon.HTTP_200
(event,) = events
assert event["transaction"] == "/message" # Falcon URI template
assert "data" not in event["request"]
assert event["request"]["url"] == "http://falconframework.org/message"
@pytest.mark.parametrize(
"transaction_style,expected_transaction",
[("uri_template", "/message"), ("path", "/message")],
)
def test_transaction_style(
sentry_init, make_client, capture_events, transaction_style, expected_transaction
):
integration = FalconIntegration(transaction_style=transaction_style)
sentry_init(integrations=[integration])
events = capture_events()
client = make_client()
response = client.simulate_get("/message")
assert response.status == falcon.HTTP_200
(event,) = events
assert event["transaction"] == expected_transaction
def test_errors(sentry_init, capture_exceptions, capture_events):
sentry_init(integrations=[FalconIntegration()], debug=True)
class ZeroDivisionErrorResource:
def on_get(self, req, resp):
1 / 0
app = falcon.API()
app.add_route("/", ZeroDivisionErrorResource())
exceptions = capture_exceptions()
events = capture_events()
client = falcon.testing.TestClient(app)
try:
client.simulate_get("/")
except ZeroDivisionError:
pass
(exc,) = exceptions
assert isinstance(exc, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
def test_falcon_large_json_request(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
data = {"foo": {"bar": "a" * 2000}}
class Resource:
def on_post(self, req, resp):
assert req.media == data
sentry_sdk.capture_message("hi")
resp.media = "ok"
app = falcon.API()
app.add_route("/", Resource())
events = capture_events()
client = falcon.testing.TestClient(app)
response = client.simulate_post("/", json=data)
assert response.status == falcon.HTTP_200
(event,) = events
assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]["bar"]) == 512
@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_falcon_empty_json_request(sentry_init, capture_events, data):
sentry_init(integrations=[FalconIntegration()])
class Resource:
def on_post(self, req, resp):
assert req.media == data
sentry_sdk.capture_message("hi")
resp.media = "ok"
app = falcon.API()
app.add_route("/", Resource())
events = capture_events()
client = falcon.testing.TestClient(app)
response = client.simulate_post("/", json=data)
assert response.status == falcon.HTTP_200
(event,) = events
assert event["request"]["data"] == data
def test_falcon_raw_data_request(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
class Resource:
def on_post(self, req, resp):
sentry_sdk.capture_message("hi")
resp.media = "ok"
app = falcon.API()
app.add_route("/", Resource())
events = capture_events()
client = falcon.testing.TestClient(app)
response = client.simulate_post("/", body="hi")
assert response.status == falcon.HTTP_200
(event,) = events
assert event["request"]["headers"]["Content-Length"] == "2"
assert event["request"]["data"] == ""
def test_logging(sentry_init, capture_events):
sentry_init(
integrations=[FalconIntegration(), LoggingIntegration(event_level="ERROR")]
)
logger = logging.getLogger()
app = falcon.API()
class Resource:
def on_get(self, req, resp):
logger.error("hi")
resp.media = "ok"
app.add_route("/", Resource())
events = capture_events()
client = falcon.testing.TestClient(app)
client.simulate_get("/")
(event,) = events
assert event["level"] == "error"
def test_500(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
app = falcon.API()
class Resource:
def on_get(self, req, resp):
1 / 0
app.add_route("/", Resource())
def http500_handler(ex, req, resp, params):
sentry_sdk.capture_exception(ex)
resp.media = {"message": "Sentry error: %s" % sentry_sdk.last_event_id()}
app.add_error_handler(Exception, http500_handler)
events = capture_events()
client = falcon.testing.TestClient(app)
response = client.simulate_get("/")
(event,) = events
assert response.json == {"message": "Sentry error: %s" % event["event_id"]}
def test_error_in_errorhandler(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
app = falcon.API()
class Resource:
def on_get(self, req, resp):
raise ValueError()
app.add_route("/", Resource())
def http500_handler(ex, req, resp, params):
1 / 0
app.add_error_handler(Exception, http500_handler)
events = capture_events()
client = falcon.testing.TestClient(app)
with pytest.raises(ZeroDivisionError):
client.simulate_get("/")
(event,) = events
last_ex_values = event["exception"]["values"][-1]
assert last_ex_values["type"] == "ZeroDivisionError"
assert last_ex_values["stacktrace"]["frames"][-1]["vars"]["ex"] == "ValueError()"
def test_bad_request_not_captured(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
events = capture_events()
app = falcon.API()
class Resource:
def on_get(self, req, resp):
raise falcon.HTTPBadRequest()
app.add_route("/", Resource())
client = falcon.testing.TestClient(app)
client.simulate_get("/")
assert not events
def test_does_not_leak_scope(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
events = capture_events()
with sentry_sdk.configure_scope() as scope:
scope.set_tag("request_data", False)
app = falcon.API()
class Resource:
def on_get(self, req, resp):
with sentry_sdk.configure_scope() as scope:
scope.set_tag("request_data", True)
def generator():
for row in range(1000):
with sentry_sdk.configure_scope() as scope:
assert scope._tags["request_data"]
yield (str(row) + "\n").encode()
resp.stream = generator()
app.add_route("/", Resource())
client = falcon.testing.TestClient(app)
response = client.simulate_get("/")
expected_response = "".join(str(row) + "\n" for row in range(1000))
assert response.text == expected_response
assert not events
with sentry_sdk.configure_scope() as scope:
assert not scope._tags["request_data"]
sentry-python-1.4.3/tests/integrations/flask/ 0000775 0000000 0000000 00000000000 14125057761 0021342 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/flask/test_flask.py 0000664 0000000 0000000 00000046537 14125057761 0024072 0 ustar 00root root 0000000 0000000 import json
import pytest
import logging
from io import BytesIO
flask = pytest.importorskip("flask")
from flask import Flask, Response, request, abort, stream_with_context
from flask.views import View
from flask_login import LoginManager, login_user
from sentry_sdk import (
set_tag,
configure_scope,
capture_message,
capture_exception,
last_event_id,
Hub,
)
from sentry_sdk.integrations.logging import LoggingIntegration
import sentry_sdk.integrations.flask as flask_sentry
login_manager = LoginManager()
@pytest.fixture
def app():
app = Flask(__name__)
app.config["TESTING"] = True
app.secret_key = "haha"
login_manager.init_app(app)
@app.route("/message")
def hi():
capture_message("hi")
return "ok"
return app
@pytest.fixture(params=("auto", "manual"))
def integration_enabled_params(request):
if request.param == "auto":
return {"auto_enabling_integrations": True}
elif request.param == "manual":
return {"integrations": [flask_sentry.FlaskIntegration()]}
else:
raise ValueError(request.param)
def test_has_context(sentry_init, app, capture_events):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
client = app.test_client()
response = client.get("/message")
assert response.status_code == 200
(event,) = events
assert event["transaction"] == "hi"
assert "data" not in event["request"]
assert event["request"]["url"] == "http://localhost/message"
@pytest.mark.parametrize(
"transaction_style,expected_transaction", [("endpoint", "hi"), ("url", "/message")]
)
def test_transaction_style(
sentry_init, app, capture_events, transaction_style, expected_transaction
):
sentry_init(
integrations=[
flask_sentry.FlaskIntegration(transaction_style=transaction_style)
]
)
events = capture_events()
client = app.test_client()
response = client.get("/message")
assert response.status_code == 200
(event,) = events
assert event["transaction"] == expected_transaction
@pytest.mark.parametrize("debug", (True, False))
@pytest.mark.parametrize("testing", (True, False))
def test_errors(
sentry_init,
capture_exceptions,
capture_events,
app,
debug,
testing,
integration_enabled_params,
):
sentry_init(debug=True, **integration_enabled_params)
app.debug = debug
app.testing = testing
@app.route("/")
def index():
1 / 0
exceptions = capture_exceptions()
events = capture_events()
client = app.test_client()
try:
client.get("/")
except ZeroDivisionError:
pass
(exc,) = exceptions
assert isinstance(exc, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "flask"
def test_flask_login_not_installed(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
monkeypatch.setattr(flask_sentry, "flask_login", None)
events = capture_events()
client = app.test_client()
client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
def test_flask_login_not_configured(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
assert flask_sentry.flask_login
events = capture_events()
client = app.test_client()
client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
def test_flask_login_partially_configured(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
events = capture_events()
login_manager = LoginManager()
login_manager.init_app(app)
client = app.test_client()
client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
@pytest.mark.parametrize("send_default_pii", [True, False])
@pytest.mark.parametrize("user_id", [None, "42", 3])
def test_flask_login_configured(
send_default_pii,
sentry_init,
app,
user_id,
capture_events,
monkeypatch,
integration_enabled_params,
):
sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)
class User(object):
is_authenticated = is_active = True
is_anonymous = user_id is not None
def get_id(self):
return str(user_id)
@login_manager.user_loader
def load_user(user_id):
if user_id is not None:
return User()
@app.route("/login")
def login():
if user_id is not None:
login_user(User())
return "ok"
events = capture_events()
client = app.test_client()
assert client.get("/login").status_code == 200
assert not events
assert client.get("/message").status_code == 200
(event,) = events
if user_id is None or not send_default_pii:
assert event.get("user", {}).get("id") is None
else:
assert event["user"]["id"] == str(user_id)
def test_flask_large_json_request(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
data = {"foo": {"bar": "a" * 2000}}
@app.route("/", methods=["POST"])
def index():
assert request.get_json() == data
assert request.get_data() == json.dumps(data).encode("ascii")
assert not request.form
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response.status_code == 200
(event,) = events
assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]["bar"]) == 512
def test_flask_session_tracking(sentry_init, capture_envelopes, app):
sentry_init(
integrations=[flask_sentry.FlaskIntegration()],
release="demo-release",
)
@app.route("/")
def index():
with configure_scope() as scope:
scope.set_user({"ip_address": "1.2.3.4", "id": "42"})
try:
raise ValueError("stuff")
except Exception:
logging.exception("stuff happened")
1 / 0
envelopes = capture_envelopes()
with app.test_client() as client:
try:
client.get("/", headers={"User-Agent": "blafasel/1.0"})
except ZeroDivisionError:
pass
Hub.current.client.flush()
(first_event, error_event, session) = envelopes
first_event = first_event.get_event()
error_event = error_event.get_event()
session = session.items[0].payload.json
aggregates = session["aggregates"]
assert first_event["exception"]["values"][0]["type"] == "ValueError"
assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert len(aggregates) == 1
assert aggregates[0]["crashed"] == 1
assert aggregates[0]["started"]
assert session["attrs"]["release"] == "demo-release"
@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_flask_empty_json_request(sentry_init, capture_events, app, data):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
@app.route("/", methods=["POST"])
def index():
assert request.get_json() == data
assert request.get_data() == json.dumps(data).encode("ascii")
assert not request.form
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response.status_code == 200
(event,) = events
assert event["request"]["data"] == data
def test_flask_medium_formdata_request(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
data = {"foo": "a" * 2000}
@app.route("/", methods=["POST"])
def index():
assert request.form["foo"] == data["foo"]
assert not request.get_data()
assert not request.get_json()
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", data=data)
assert response.status_code == 200
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]) == 512
def test_flask_formdata_request_appear_transaction_body(
sentry_init, capture_events, app
):
"""
Test that ensures that transaction request data contains body, even if no exception was raised
"""
sentry_init(integrations=[flask_sentry.FlaskIntegration()], traces_sample_rate=1.0)
data = {"username": "sentry-user", "age": "26"}
@app.route("/", methods=["POST"])
def index():
assert request.form["username"] == data["username"]
assert request.form["age"] == data["age"]
assert not request.get_data()
assert not request.get_json()
set_tag("view", "yes")
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", data=data)
assert response.status_code == 200
event, transaction_event = events
assert "request" in transaction_event
assert "data" in transaction_event["request"]
assert transaction_event["request"]["data"] == data
@pytest.mark.parametrize("input_char", [u"a", b"a"])
def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="small")
data = input_char * 2000
@app.route("/", methods=["POST"])
def index():
assert not request.form
if isinstance(data, bytes):
assert request.get_data() == data
else:
assert request.get_data() == data.encode("ascii")
assert not request.get_json()
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", data=data)
assert response.status_code == 200
(event,) = events
assert event["_meta"]["request"]["data"] == {
"": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
}
assert not event["request"]["data"]
def test_flask_files_and_form(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="always")
data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
@app.route("/", methods=["POST"])
def index():
assert list(request.form) == ["foo"]
assert list(request.files) == ["file"]
assert not request.get_json()
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", data=data)
assert response.status_code == 200
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]) == 512
assert event["_meta"]["request"]["data"]["file"] == {
"": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
}
assert not event["request"]["data"]["file"]
@pytest.mark.parametrize(
"integrations",
[
[flask_sentry.FlaskIntegration()],
[flask_sentry.FlaskIntegration(), LoggingIntegration(event_level="ERROR")],
],
)
def test_errors_not_reported_twice(sentry_init, integrations, capture_events, app):
sentry_init(integrations=integrations)
@app.route("/")
def index():
try:
1 / 0
except Exception as e:
app.logger.exception(e)
raise e
events = capture_events()
client = app.test_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
assert len(events) == 1
def test_logging(sentry_init, capture_events, app):
# ensure that Flask's logger magic doesn't break ours
sentry_init(
integrations=[
flask_sentry.FlaskIntegration(),
LoggingIntegration(event_level="ERROR"),
]
)
@app.route("/")
def index():
app.logger.error("hi")
return "ok"
events = capture_events()
client = app.test_client()
client.get("/")
(event,) = events
assert event["level"] == "error"
def test_no_errors_without_request(app, sentry_init):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
with app.app_context():
capture_exception(ValueError())
def test_cli_commands_raise(app):
if not hasattr(app, "cli"):
pytest.skip("Too old flask version")
from flask.cli import ScriptInfo
@app.cli.command()
def foo():
1 / 0
with pytest.raises(ZeroDivisionError):
app.cli.main(
args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=lambda _: app)
)
def test_wsgi_level_error_is_caught(
app, capture_exceptions, capture_events, sentry_init
):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
def wsgi_app(environ, start_response):
1 / 0
app.wsgi_app = wsgi_app
client = app.test_client()
exceptions = capture_exceptions()
events = capture_events()
with pytest.raises(ZeroDivisionError) as exc:
client.get("/")
(error,) = exceptions
assert error is exc.value
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"
def test_500(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
app.debug = False
app.testing = False
@app.route("/")
def index():
1 / 0
@app.errorhandler(500)
def error_handler(err):
return "Sentry error: %s" % last_event_id()
events = capture_events()
client = app.test_client()
response = client.get("/")
(event,) = events
assert response.data.decode("utf-8") == "Sentry error: %s" % event["event_id"]
def test_error_in_errorhandler(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
app.debug = False
app.testing = False
@app.route("/")
def index():
raise ValueError()
@app.errorhandler(500)
def error_handler(err):
1 / 0
events = capture_events()
client = app.test_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
event1, event2 = events
(exception,) = event1["exception"]["values"]
assert exception["type"] == "ValueError"
exception = event2["exception"]["values"][-1]
assert exception["type"] == "ZeroDivisionError"
def test_bad_request_not_captured(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/")
def index():
abort(400)
client = app.test_client()
client.get("/")
assert not events
def test_does_not_leak_scope(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
with configure_scope() as scope:
scope.set_tag("request_data", False)
@app.route("/")
def index():
with configure_scope() as scope:
scope.set_tag("request_data", True)
def generate():
for row in range(1000):
with configure_scope() as scope:
assert scope._tags["request_data"]
yield str(row) + "\n"
return Response(stream_with_context(generate()), mimetype="text/csv")
client = app.test_client()
response = client.get("/")
assert response.data.decode() == "".join(str(row) + "\n" for row in range(1000))
assert not events
with configure_scope() as scope:
assert not scope._tags["request_data"]
def test_scoped_test_client(sentry_init, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
@app.route("/")
def index():
return "ok"
with app.test_client() as client:
response = client.get("/")
assert response.status_code == 200
@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
def test_errorhandler_for_exception_swallows_exception(
sentry_init, app, capture_events, exc_cls
):
# In contrast to error handlers for a status code, error
# handlers for exceptions can swallow the exception (this is
# just how the Flask signal works)
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/")
def index():
1 / 0
@app.errorhandler(exc_cls)
def zerodivision(e):
return "ok"
with app.test_client() as client:
response = client.get("/")
assert response.status_code == 200
assert not events
def test_tracing_success(sentry_init, capture_events, app):
sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])
@app.before_request
def _():
set_tag("before_request", "yes")
@app.route("/message_tx")
def hi_tx():
set_tag("view", "yes")
capture_message("hi")
return "ok"
events = capture_events()
with app.test_client() as client:
response = client.get("/message_tx")
assert response.status_code == 200
message_event, transaction_event = events
assert transaction_event["type"] == "transaction"
assert transaction_event["transaction"] == "hi_tx"
assert transaction_event["contexts"]["trace"]["status"] == "ok"
assert transaction_event["tags"]["view"] == "yes"
assert transaction_event["tags"]["before_request"] == "yes"
assert message_event["message"] == "hi"
assert message_event["transaction"] == "hi_tx"
assert message_event["tags"]["view"] == "yes"
assert message_event["tags"]["before_request"] == "yes"
def test_tracing_error(sentry_init, capture_events, app):
sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/error")
def error():
1 / 0
with pytest.raises(ZeroDivisionError):
with app.test_client() as client:
response = client.get("/error")
assert response.status_code == 500
error_event, transaction_event = events
assert transaction_event["type"] == "transaction"
assert transaction_event["transaction"] == "error"
assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
assert error_event["transaction"] == "error"
(exception,) = error_event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
def test_class_based_views(sentry_init, app, capture_events):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/")
class HelloClass(View):
def dispatch_request(self):
capture_message("hi")
return "ok"
app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))
with app.test_client() as client:
response = client.get("/hello-class/")
assert response.status_code == 200
(event,) = events
assert event["message"] == "hi"
assert event["transaction"] == "hello_class"
sentry-python-1.4.3/tests/integrations/gcp/ 0000775 0000000 0000000 00000000000 14125057761 0021013 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/gcp/test_gcp.py 0000664 0000000 0000000 00000026110 14125057761 0023175 0 ustar 00root root 0000000 0000000 """
# GCP Cloud Functions unit tests
"""
import json
from textwrap import dedent
import tempfile
import sys
import subprocess
import pytest
import os.path
import os
pytestmark = pytest.mark.skipif(
not hasattr(tempfile, "TemporaryDirectory"), reason="need Python 3.2+"
)
FUNCTIONS_PRELUDE = """
from unittest.mock import Mock
import __main__ as gcp_functions
import os
# Initializing all the necessary environment variables
os.environ["FUNCTION_TIMEOUT_SEC"] = "3"
os.environ["FUNCTION_NAME"] = "Google Cloud function"
os.environ["ENTRY_POINT"] = "cloud_function"
os.environ["FUNCTION_IDENTITY"] = "func_ID"
os.environ["FUNCTION_REGION"] = "us-central1"
os.environ["GCP_PROJECT"] = "serverless_project"
def log_return_value(func):
def inner(*args, **kwargs):
rv = func(*args, **kwargs)
print("\\nRETURN VALUE: {}\\n".format(json.dumps(rv)))
return rv
return inner
gcp_functions.worker_v1 = Mock()
gcp_functions.worker_v1.FunctionHandler = Mock()
gcp_functions.worker_v1.FunctionHandler.invoke_user_function = log_return_value(cloud_function)
import sentry_sdk
from sentry_sdk.integrations.gcp import GcpIntegration
import json
import time
from sentry_sdk.transport import HttpTransport
def event_processor(event):
# Adding delay which would allow us to capture events.
time.sleep(1)
return event
def envelope_processor(envelope):
(item,) = envelope.items
return item.get_bytes()
class TestTransport(HttpTransport):
def _send_event(self, event):
event = event_processor(event)
# Writing a single string to stdout holds the GIL (seems like) and
# therefore cannot be interleaved with other threads. This is why we
# explicitly add a newline at the end even though `print` would provide
# us one.
print("\\nEVENT: {}\\n".format(json.dumps(event)))
def _send_envelope(self, envelope):
envelope = envelope_processor(envelope)
print("\\nENVELOPE: {}\\n".format(envelope.decode(\"utf-8\")))
def init_sdk(timeout_warning=False, **extra_init_args):
sentry_sdk.init(
dsn="https://123abc@example.com/123",
transport=TestTransport,
integrations=[GcpIntegration(timeout_warning=timeout_warning)],
shutdown_timeout=10,
**extra_init_args
)
"""
@pytest.fixture
def run_cloud_function():
def inner(code, subprocess_kwargs=()):
event = []
envelope = []
return_value = None
# STEP : Create a zip of cloud function
subprocess_kwargs = dict(subprocess_kwargs)
with tempfile.TemporaryDirectory() as tmpdir:
main_py = os.path.join(tmpdir, "main.py")
with open(main_py, "w") as f:
f.write(code)
setup_cfg = os.path.join(tmpdir, "setup.cfg")
with open(setup_cfg, "w") as f:
f.write("[install]\nprefix=")
subprocess.check_call(
[sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")],
**subprocess_kwargs
)
subprocess.check_call(
"pip install ../*.tar.gz -t .",
cwd=tmpdir,
shell=True,
**subprocess_kwargs
)
stream = os.popen("python {}/main.py".format(tmpdir))
stream_data = stream.read()
stream.close()
for line in stream_data.splitlines():
print("GCP:", line)
if line.startswith("EVENT: "):
line = line[len("EVENT: ") :]
event = json.loads(line)
elif line.startswith("ENVELOPE: "):
line = line[len("ENVELOPE: ") :]
envelope = json.loads(line)
elif line.startswith("RETURN VALUE: "):
line = line[len("RETURN VALUE: ") :]
return_value = json.loads(line)
else:
continue
return envelope, event, return_value
return inner
def test_handled_exception(run_cloud_function):
envelope, event, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
raise Exception("something went wrong")
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(timeout_warning=False)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
assert exception["mechanism"] == {"type": "gcp", "handled": False}
def test_unhandled_exception(run_cloud_function):
envelope, event, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
x = 3/0
return "3"
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(timeout_warning=False)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["value"] == "division by zero"
assert exception["mechanism"] == {"type": "gcp", "handled": False}
def test_timeout_error(run_cloud_function):
envelope, event, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
time.sleep(10)
return "3"
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(timeout_warning=True)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ServerlessTimeoutWarning"
assert (
exception["value"]
== "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds."
)
assert exception["mechanism"] == {"type": "threading", "handled": False}
def test_performance_no_error(run_cloud_function):
envelope, event, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
return "test_string"
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"]["op"] == "serverless.function"
assert envelope["transaction"].startswith("Google Cloud function")
assert envelope["transaction"] in envelope["request"]["url"]
def test_performance_error(run_cloud_function):
envelope, event, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
raise Exception("something went wrong")
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"]["op"] == "serverless.function"
assert envelope["transaction"].startswith("Google Cloud function")
assert envelope["transaction"] in envelope["request"]["url"]
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
assert exception["mechanism"] == {"type": "gcp", "handled": False}
def test_traces_sampler_gets_correct_values_in_sampling_context(
run_cloud_function, DictionaryContaining # noqa:N803
):
# TODO: There are some decent sized hacks below. For more context, see the
# long comment in the test of the same name in the AWS integration. The
# situations there and here aren't identical, but they're similar enough
# that solving one would probably solve both.
import inspect
envelopes, events, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
event = {
"type": "chase",
"chasers": ["Maisey", "Charlie"],
"num_squirrels": 2,
}
def cloud_function(functionhandler, event):
# this runs after the transaction has started, which means we
# can make assertions about traces_sampler
try:
traces_sampler.assert_any_call(
DictionaryContaining({
"gcp_env": DictionaryContaining({
"function_name": "chase_into_tree",
"function_region": "dogpark",
"function_project": "SquirrelChasing",
}),
"gcp_event": {
"type": "chase",
"chasers": ["Maisey", "Charlie"],
"num_squirrels": 2,
},
})
)
except AssertionError:
# catch the error and return it because the error itself will
# get swallowed by the SDK as an "internal exception"
return {"AssertionError raised": True,}
return {"AssertionError raised": False,}
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(inspect.getsource(DictionaryContaining))
+ dedent(
"""
os.environ["FUNCTION_NAME"] = "chase_into_tree"
os.environ["FUNCTION_REGION"] = "dogpark"
os.environ["GCP_PROJECT"] = "SquirrelChasing"
def _safe_is_equal(x, y):
# copied from conftest.py - see docstring and comments there
try:
is_equal = x.__eq__(y)
except AttributeError:
is_equal = NotImplemented
if is_equal == NotImplemented:
return x == y
return is_equal
traces_sampler = Mock(return_value=True)
init_sdk(
traces_sampler=traces_sampler,
)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert return_value["AssertionError raised"] is False
sentry-python-1.4.3/tests/integrations/httpx/ 0000775 0000000 0000000 00000000000 14125057761 0021411 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/httpx/__init__.py 0000664 0000000 0000000 00000000054 14125057761 0023521 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("httpx")
sentry-python-1.4.3/tests/integrations/httpx/test_httpx.py 0000664 0000000 0000000 00000004563 14125057761 0024201 0 ustar 00root root 0000000 0000000 import asyncio
import httpx
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.httpx import HttpxIntegration
def test_crumb_capture_and_hint(sentry_init, capture_events):
def before_breadcrumb(crumb, hint):
crumb["data"]["extra"] = "foo"
return crumb
sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb)
clients = (httpx.Client(), httpx.AsyncClient())
for i, c in enumerate(clients):
with start_transaction():
events = capture_events()
url = "https://httpbin.org/status/200"
if not asyncio.iscoroutinefunction(c.get):
response = c.get(url)
else:
response = asyncio.get_event_loop().run_until_complete(c.get(url))
assert response.status_code == 200
capture_message("Testing!")
(event,) = events
# send request twice so we need get breadcrumb by index
crumb = event["breadcrumbs"]["values"][i]
assert crumb["type"] == "http"
assert crumb["category"] == "httplib"
assert crumb["data"] == {
"url": url,
"method": "GET",
"status_code": 200,
"reason": "OK",
"extra": "foo",
}
def test_outgoing_trace_headers(sentry_init):
sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()])
clients = (httpx.Client(), httpx.AsyncClient())
for i, c in enumerate(clients):
with start_transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
# make trace_id difference between transactions
trace_id=f"012345678901234567890123456789{i}",
) as transaction:
url = "https://httpbin.org/status/200"
if not asyncio.iscoroutinefunction(c.get):
response = c.get(url)
else:
response = asyncio.get_event_loop().run_until_complete(c.get(url))
request_span = transaction._span_recorder.spans[-1]
assert response.request.headers[
"sentry-trace"
] == "{trace_id}-{parent_span_id}-{sampled}".format(
trace_id=transaction.trace_id,
parent_span_id=request_span.span_id,
sampled=1,
)
sentry-python-1.4.3/tests/integrations/logging/ 0000775 0000000 0000000 00000000000 14125057761 0021670 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/logging/test_logging.py 0000664 0000000 0000000 00000010642 14125057761 0024732 0 ustar 00root root 0000000 0000000 import sys
import pytest
import logging
from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger
other_logger = logging.getLogger("testfoo")
logger = logging.getLogger(__name__)
@pytest.fixture(autouse=True)
def reset_level():
other_logger.setLevel(logging.DEBUG)
logger.setLevel(logging.DEBUG)
@pytest.mark.parametrize("logger", [logger, other_logger])
def test_logging_works_with_many_loggers(sentry_init, capture_events, logger):
sentry_init(integrations=[LoggingIntegration(event_level="ERROR")])
events = capture_events()
logger.info("bread")
logger.critical("LOL")
(event,) = events
assert event["level"] == "fatal"
assert not event["logentry"]["params"]
assert event["logentry"]["message"] == "LOL"
assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])
@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]])
@pytest.mark.parametrize(
"kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}]
)
def test_logging_defaults(integrations, sentry_init, capture_events, kwargs):
sentry_init(integrations=integrations)
events = capture_events()
logger.info("bread")
logger.critical("LOL", **kwargs)
(event,) = events
assert event["level"] == "fatal"
assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])
assert not any(
crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"]
)
assert "threads" not in event
def test_logging_extra_data(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
logger.info("bread", extra=dict(foo=42))
logger.critical("lol", extra=dict(bar=69))
(event,) = events
assert event["level"] == "fatal"
assert event["extra"] == {"bar": 69}
assert any(
crumb["message"] == "bread" and crumb["data"] == {"foo": 42}
for crumb in event["breadcrumbs"]["values"]
)
def test_logging_extra_data_integer_keys(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
logger.critical("integer in extra keys", extra={1: 1})
(event,) = events
assert event["extra"] == {"1": 1}
@pytest.mark.xfail(sys.version_info[:2] == (3, 4), reason="buggy logging module")
def test_logging_stack(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
logger.error("first", exc_info=True)
logger.error("second")
(
event_with,
event_without,
) = events
assert event_with["level"] == "error"
assert event_with["threads"]["values"][0]["stacktrace"]["frames"]
assert event_without["level"] == "error"
assert "threads" not in event_without
def test_logging_level(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
logger.setLevel(logging.WARNING)
logger.error("hi")
(event,) = events
assert event["level"] == "error"
assert event["logentry"]["message"] == "hi"
del events[:]
logger.setLevel(logging.ERROR)
logger.warning("hi")
assert not events
def test_logging_filters(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
should_log = False
class MyFilter(logging.Filter):
def filter(self, record):
return should_log
logger.addFilter(MyFilter())
logger.error("hi")
assert not events
should_log = True
logger.error("hi")
(event,) = events
assert event["logentry"]["message"] == "hi"
def test_ignore_logger(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
ignore_logger("testfoo")
other_logger.error("hi")
assert not events
def test_ignore_logger_wildcard(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
ignore_logger("testfoo.*")
nested_logger = logging.getLogger("testfoo.submodule")
logger.error("hi")
nested_logger.error("bye")
(event,) = events
assert event["logentry"]["message"] == "hi"
sentry-python-1.4.3/tests/integrations/modules/ 0000775 0000000 0000000 00000000000 14125057761 0021712 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/modules/test_modules.py 0000664 0000000 0000000 00000000557 14125057761 0025002 0 ustar 00root root 0000000 0000000 import sentry_sdk
from sentry_sdk.integrations.modules import ModulesIntegration
def test_basic(sentry_init, capture_events):
sentry_init(integrations=[ModulesIntegration()])
events = capture_events()
sentry_sdk.capture_exception(ValueError())
(event,) = events
assert "sentry-sdk" in event["modules"]
assert "pytest" in event["modules"]
sentry-python-1.4.3/tests/integrations/pure_eval/ 0000775 0000000 0000000 00000000000 14125057761 0022224 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/pure_eval/__init__.py 0000664 0000000 0000000 00000000074 14125057761 0024336 0 ustar 00root root 0000000 0000000 import pytest
pure_eval = pytest.importorskip("pure_eval")
sentry-python-1.4.3/tests/integrations/pure_eval/test_pure_eval.py 0000664 0000000 0000000 00000004565 14125057761 0025631 0 ustar 00root root 0000000 0000000 import sys
from types import SimpleNamespace
import pytest
from sentry_sdk import capture_exception, serializer
from sentry_sdk.integrations.pure_eval import PureEvalIntegration
@pytest.mark.parametrize("integrations", [[], [PureEvalIntegration()]])
def test_with_locals_enabled(sentry_init, capture_events, integrations):
sentry_init(with_locals=True, integrations=integrations)
events = capture_events()
def foo():
namespace = SimpleNamespace()
q = 1
w = 2
e = 3
r = 4
t = 5
y = 6
u = 7
i = 8
o = 9
p = 10
a = 11
s = 12
str((q, w, e, r, t, y, u, i, o, p, a, s)) # use variables for linter
namespace.d = {1: 2}
print(namespace.d[1] / 0)
# Appearances of variables after the main statement don't affect order
print(q)
print(s)
print(events)
try:
foo()
except Exception:
capture_exception()
(event,) = events
assert all(
frame["vars"]
for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
)
frame_vars = event["exception"]["values"][0]["stacktrace"]["frames"][-1]["vars"]
if integrations:
# Values closest to the exception line appear first
# Test this order if possible given the Python version and dict order
expected_keys = [
"namespace",
"namespace.d",
"namespace.d[1]",
"s",
"a",
"p",
"o",
"i",
"u",
"y",
]
if sys.version_info[:2] == (3, 5):
assert frame_vars.keys() == set(expected_keys)
else:
assert list(frame_vars.keys()) == expected_keys
assert frame_vars["namespace.d"] == {"1": "2"}
assert frame_vars["namespace.d[1]"] == "2"
else:
# Without pure_eval, the variables are unpredictable.
# In later versions, those at the top appear first and are thus included
assert frame_vars.keys() <= {
"namespace",
"q",
"w",
"e",
"r",
"t",
"y",
"u",
"i",
"o",
"p",
"a",
"s",
"events",
}
assert len(frame_vars) == serializer.MAX_DATABAG_BREADTH
sentry-python-1.4.3/tests/integrations/pyramid/ 0000775 0000000 0000000 00000000000 14125057761 0021707 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/pyramid/__init__.py 0000664 0000000 0000000 00000000070 14125057761 0024015 0 ustar 00root root 0000000 0000000 import pytest
pyramid = pytest.importorskip("pyramid")
sentry-python-1.4.3/tests/integrations/pyramid/test_pyramid.py 0000664 0000000 0000000 00000022102 14125057761 0024762 0 ustar 00root root 0000000 0000000 import json
import logging
import pkg_resources
import pytest
from io import BytesIO
import pyramid.testing
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.response import Response
from sentry_sdk import capture_message, add_breadcrumb
from sentry_sdk.integrations.pyramid import PyramidIntegration
from werkzeug.test import Client
PYRAMID_VERSION = tuple(
map(int, pkg_resources.get_distribution("pyramid").version.split("."))
)
def hi(request):
capture_message("hi")
return Response("hi")
@pytest.fixture
def pyramid_config():
config = pyramid.testing.setUp()
try:
config.add_route("hi", "/message")
config.add_view(hi, route_name="hi")
yield config
finally:
pyramid.testing.tearDown()
@pytest.fixture
def route(pyramid_config):
def inner(url):
def wrapper(f):
pyramid_config.add_route(f.__name__, url)
pyramid_config.add_view(f, route_name=f.__name__)
return f
return wrapper
return inner
@pytest.fixture
def get_client(pyramid_config):
def inner():
return Client(pyramid_config.make_wsgi_app())
return inner
def test_view_exceptions(
get_client, route, sentry_init, capture_events, capture_exceptions
):
sentry_init(integrations=[PyramidIntegration()])
events = capture_events()
exceptions = capture_exceptions()
add_breadcrumb({"message": "hi"})
@route("/errors")
def errors(request):
add_breadcrumb({"message": "hi2"})
1 / 0
client = get_client()
with pytest.raises(ZeroDivisionError):
client.get("/errors")
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
(breadcrumb,) = event["breadcrumbs"]["values"]
assert breadcrumb["message"] == "hi2"
assert event["exception"]["values"][0]["mechanism"]["type"] == "pyramid"
def test_has_context(route, get_client, sentry_init, capture_events):
sentry_init(integrations=[PyramidIntegration()])
events = capture_events()
@route("/message/{msg}")
def hi2(request):
capture_message(request.matchdict["msg"])
return Response("hi")
client = get_client()
client.get("/message/yoo")
(event,) = events
assert event["message"] == "yoo"
assert event["request"] == {
"env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
"headers": {"Host": "localhost"},
"method": "GET",
"query_string": "",
"url": "http://localhost/message/yoo",
}
assert event["transaction"] == "hi2"
@pytest.mark.parametrize(
"transaction_style,expected_transaction",
[("route_name", "hi"), ("route_pattern", "/message")],
)
def test_transaction_style(
sentry_init, get_client, capture_events, transaction_style, expected_transaction
):
sentry_init(integrations=[PyramidIntegration(transaction_style=transaction_style)])
events = capture_events()
client = get_client()
client.get("/message")
(event,) = events
assert event["transaction"] == expected_transaction
def test_large_json_request(sentry_init, capture_events, route, get_client):
sentry_init(integrations=[PyramidIntegration()])
data = {"foo": {"bar": "a" * 2000}}
@route("/")
def index(request):
assert request.json == data
assert request.text == json.dumps(data)
assert not request.POST
capture_message("hi")
return Response("ok")
events = capture_events()
client = get_client()
client.post("/", content_type="application/json", data=json.dumps(data))
(event,) = events
assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]["bar"]) == 512
@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_flask_empty_json_request(sentry_init, capture_events, route, get_client, data):
sentry_init(integrations=[PyramidIntegration()])
@route("/")
def index(request):
assert request.json == data
assert request.text == json.dumps(data)
assert not request.POST
capture_message("hi")
return Response("ok")
events = capture_events()
client = get_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response[1] == "200 OK"
(event,) = events
assert event["request"]["data"] == data
def test_files_and_form(sentry_init, capture_events, route, get_client):
sentry_init(integrations=[PyramidIntegration()], request_bodies="always")
data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
@route("/")
def index(request):
capture_message("hi")
return Response("ok")
events = capture_events()
client = get_client()
client.post("/", data=data)
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
}
assert len(event["request"]["data"]["foo"]) == 512
assert event["_meta"]["request"]["data"]["file"] == {
"": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
}
assert not event["request"]["data"]["file"]
def test_bad_request_not_captured(
sentry_init, pyramid_config, capture_events, route, get_client
):
import pyramid.httpexceptions as exc
sentry_init(integrations=[PyramidIntegration()])
events = capture_events()
@route("/")
def index(request):
raise exc.HTTPBadRequest()
def errorhandler(exc, request):
return Response("bad request")
pyramid_config.add_view(errorhandler, context=exc.HTTPBadRequest)
client = get_client()
client.get("/")
assert not events
def test_errorhandler_ok(
sentry_init, pyramid_config, capture_exceptions, route, get_client
):
sentry_init(integrations=[PyramidIntegration()])
errors = capture_exceptions()
@route("/")
def index(request):
raise Exception()
def errorhandler(exc, request):
return Response("bad request")
pyramid_config.add_view(errorhandler, context=Exception)
client = get_client()
client.get("/")
assert not errors
@pytest.mark.skipif(
PYRAMID_VERSION < (1, 9),
reason="We don't have the right hooks in older Pyramid versions",
)
def test_errorhandler_500(
sentry_init, pyramid_config, capture_exceptions, route, get_client
):
sentry_init(integrations=[PyramidIntegration()])
errors = capture_exceptions()
@route("/")
def index(request):
1 / 0
def errorhandler(exc, request):
return Response("bad request", status=500)
pyramid_config.add_view(errorhandler, context=Exception)
client = get_client()
app_iter, status, headers = client.get("/")
assert b"".join(app_iter) == b"bad request"
assert status.lower() == "500 internal server error"
(error,) = errors
assert isinstance(error, ZeroDivisionError)
def test_error_in_errorhandler(
sentry_init, pyramid_config, capture_events, route, get_client
):
sentry_init(integrations=[PyramidIntegration()])
@route("/")
def index(request):
raise ValueError()
def error_handler(err, request):
1 / 0
pyramid_config.add_view(error_handler, context=ValueError)
events = capture_events()
client = get_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
(event,) = events
exception = event["exception"]["values"][-1]
assert exception["type"] == "ZeroDivisionError"
def test_error_in_authenticated_userid(
sentry_init, pyramid_config, capture_events, route, get_client
):
from sentry_sdk.integrations.logging import LoggingIntegration
sentry_init(
send_default_pii=True,
integrations=[
PyramidIntegration(),
LoggingIntegration(event_level=logging.ERROR),
],
)
logger = logging.getLogger("test_pyramid")
class AuthenticationPolicy(object):
def authenticated_userid(self, request):
logger.error("failed to identify user")
pyramid_config.set_authorization_policy(ACLAuthorizationPolicy())
pyramid_config.set_authentication_policy(AuthenticationPolicy())
events = capture_events()
client = get_client()
client.get("/message")
assert len(events) == 1
def tween_factory(handler, registry):
def tween(request):
try:
response = handler(request)
except Exception:
mroute = request.matched_route
if mroute and mroute.name in ("index",):
return Response("bad request", status_code=400)
return response
return tween
def test_tween_ok(sentry_init, pyramid_config, capture_exceptions, route, get_client):
sentry_init(integrations=[PyramidIntegration()])
errors = capture_exceptions()
@route("/")
def index(request):
raise Exception()
pyramid_config.add_tween(
"tests.integrations.pyramid.test_pyramid.tween_factory",
under=pyramid.tweens.INGRESS,
)
client = get_client()
client.get("/")
assert not errors
sentry-python-1.4.3/tests/integrations/redis/ 0000775 0000000 0000000 00000000000 14125057761 0021350 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/redis/__init__.py 0000664 0000000 0000000 00000000054 14125057761 0023460 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("redis")
sentry-python-1.4.3/tests/integrations/redis/test_redis.py 0000664 0000000 0000000 00000001212 14125057761 0024063 0 ustar 00root root 0000000 0000000 from sentry_sdk import capture_message
from sentry_sdk.integrations.redis import RedisIntegration
from fakeredis import FakeStrictRedis
def test_basic(sentry_init, capture_events):
sentry_init(integrations=[RedisIntegration()])
events = capture_events()
connection = FakeStrictRedis()
connection.get("foobar")
capture_message("hi")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb == {
"category": "redis",
"message": "GET 'foobar'",
"data": {"redis.key": "foobar", "redis.command": "GET"},
"timestamp": crumb["timestamp"],
"type": "redis",
}
sentry-python-1.4.3/tests/integrations/rediscluster/ 0000775 0000000 0000000 00000000000 14125057761 0022752 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/rediscluster/__init__.py 0000664 0000000 0000000 00000000063 14125057761 0025062 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("rediscluster")
sentry-python-1.4.3/tests/integrations/rediscluster/test_rediscluster.py 0000664 0000000 0000000 00000002103 14125057761 0027067 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk import capture_message
from sentry_sdk.integrations.redis import RedisIntegration
import rediscluster
rediscluster_classes = [rediscluster.RedisCluster]
if hasattr(rediscluster, "StrictRedisCluster"):
rediscluster_classes.append(rediscluster.StrictRedisCluster)
@pytest.fixture(scope="module", autouse=True)
def monkeypatch_rediscluster_classes():
for cls in rediscluster_classes:
cls.execute_command = lambda *_, **__: None
@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
sentry_init(integrations=[RedisIntegration()])
events = capture_events()
rc = rediscluster_cls(connection_pool=True)
rc.get("foobar")
capture_message("hi")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb == {
"category": "redis",
"message": "GET 'foobar'",
"data": {"redis.key": "foobar", "redis.command": "GET"},
"timestamp": crumb["timestamp"],
"type": "redis",
}
sentry-python-1.4.3/tests/integrations/requests/ 0000775 0000000 0000000 00000000000 14125057761 0022115 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/requests/test_requests.py 0000664 0000000 0000000 00000001327 14125057761 0025404 0 ustar 00root root 0000000 0000000 import pytest
requests = pytest.importorskip("requests")
from sentry_sdk import capture_message
from sentry_sdk.integrations.stdlib import StdlibIntegration
def test_crumb_capture(sentry_init, capture_events):
sentry_init(integrations=[StdlibIntegration()])
events = capture_events()
response = requests.get("https://httpbin.org/status/418")
capture_message("Testing!")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb["type"] == "http"
assert crumb["category"] == "httplib"
assert crumb["data"] == {
"url": "https://httpbin.org/status/418",
"method": "GET",
"status_code": response.status_code,
"reason": response.reason,
}
sentry-python-1.4.3/tests/integrations/rq/ 0000775 0000000 0000000 00000000000 14125057761 0020664 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/rq/__init__.py 0000664 0000000 0000000 00000000056 14125057761 0022776 0 ustar 00root root 0000000 0000000 import pytest
rq = pytest.importorskip("rq")
sentry-python-1.4.3/tests/integrations/rq/test_rq.py 0000664 0000000 0000000 00000014227 14125057761 0022725 0 ustar 00root root 0000000 0000000 import pytest
from fakeredis import FakeStrictRedis
from sentry_sdk.integrations.rq import RqIntegration
import rq
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
@pytest.fixture(autouse=True)
def _patch_rq_get_server_version(monkeypatch):
"""
Patch up RQ 1.5 to work with fakeredis.
https://github.com/jamesls/fakeredis/issues/273
"""
from distutils.version import StrictVersion
if tuple(map(int, rq.VERSION.split("."))) >= (1, 5):
for k in (
"rq.job.Job.get_redis_server_version",
"rq.worker.Worker.get_redis_server_version",
):
monkeypatch.setattr(k, lambda _: StrictVersion("4.0.0"))
def crashing_job(foo):
1 / 0
def chew_up_shoes(dog, human, shoes):
raise Exception("{}!! Why did you eat {}'s {}??".format(dog, human, shoes))
def do_trick(dog, trick):
return "{}, can you {}? Good dog!".format(dog, trick)
def test_basic(sentry_init, capture_events):
sentry_init(integrations=[RqIntegration()])
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(crashing_job, foo=42)
worker.work(burst=True)
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"]["type"] == "rq"
assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"
assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
assert event["extra"]["rq-job"] == {
"args": [],
"description": "tests.integrations.rq.test_rq.crashing_job(foo=42)",
"func": "tests.integrations.rq.test_rq.crashing_job",
"job_id": event["extra"]["rq-job"]["job_id"],
"kwargs": {"foo": 42},
}
def test_transport_shutdown(sentry_init, capture_events_forksafe):
sentry_init(integrations=[RqIntegration()])
events = capture_events_forksafe()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.Worker([queue], connection=queue.connection)
queue.enqueue(crashing_job, foo=42)
worker.work(burst=True)
event = events.read_event()
events.read_flush()
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
def test_transaction_with_error(
sentry_init, capture_events, DictionaryContaining # noqa:N803
):
sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(chew_up_shoes, "Charlie", "Katie", shoes="flip-flops")
worker.work(burst=True)
error_event, envelope = events
assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes"
assert error_event["contexts"]["trace"]["op"] == "rq.task"
assert error_event["exception"]["values"][0]["type"] == "Exception"
assert (
error_event["exception"]["values"][0]["value"]
== "Charlie!! Why did you eat Katie's flip-flops??"
)
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]
assert envelope["transaction"] == error_event["transaction"]
assert envelope["extra"]["rq-job"] == DictionaryContaining(
{
"args": ["Charlie", "Katie"],
"kwargs": {"shoes": "flip-flops"},
"func": "tests.integrations.rq.test_rq.chew_up_shoes",
"description": "tests.integrations.rq.test_rq.chew_up_shoes('Charlie', 'Katie', shoes='flip-flops')",
}
)
def test_transaction_no_error(
sentry_init, capture_events, DictionaryContaining # noqa:N803
):
sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(do_trick, "Maisey", trick="kangaroo")
worker.work(burst=True)
envelope = events[0]
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"]["op"] == "rq.task"
assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick"
assert envelope["extra"]["rq-job"] == DictionaryContaining(
{
"args": ["Maisey"],
"kwargs": {"trick": "kangaroo"},
"func": "tests.integrations.rq.test_rq.do_trick",
"description": "tests.integrations.rq.test_rq.do_trick('Maisey', trick='kangaroo')",
}
)
def test_traces_sampler_gets_correct_values_in_sampling_context(
sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803
):
traces_sampler = mock.Mock(return_value=True)
sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler)
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(do_trick, "Bodhi", trick="roll over")
worker.work(burst=True)
traces_sampler.assert_any_call(
DictionaryContaining(
{
"rq_job": ObjectDescribedBy(
type=rq.job.Job,
attrs={
"description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')",
"result": "Bodhi, can you roll over? Good dog!",
"func_name": "tests.integrations.rq.test_rq.do_trick",
"args": ("Bodhi",),
"kwargs": {"trick": "roll over"},
},
),
}
)
)
@pytest.mark.skipif(
rq.__version__.split(".") < ["1", "5"], reason="At least rq-1.5 required"
)
def test_job_with_retries(sentry_init, capture_events):
sentry_init(integrations=[RqIntegration()])
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(crashing_job, foo=42, retry=rq.Retry(max=1))
worker.work(burst=True)
assert len(events) == 1
sentry-python-1.4.3/tests/integrations/sanic/ 0000775 0000000 0000000 00000000000 14125057761 0021337 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/sanic/__init__.py 0000664 0000000 0000000 00000000064 14125057761 0023450 0 ustar 00root root 0000000 0000000 import pytest
sanic = pytest.importorskip("sanic")
sentry-python-1.4.3/tests/integrations/sanic/test_sanic.py 0000664 0000000 0000000 00000014757 14125057761 0024063 0 ustar 00root root 0000000 0000000 import sys
import random
import asyncio
import pytest
from sentry_sdk import capture_message, configure_scope
from sentry_sdk.integrations.sanic import SanicIntegration
from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW
from sanic.response import HTTPResponse
from sanic.exceptions import abort
SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split(".")))
@pytest.fixture
def app():
if SANIC_VERSION >= (20, 12):
# Build (20.12.0) adds a feature where the instance is stored in an internal class
# registry for later retrieval, and so add register=False to disable that
app = Sanic(__name__, register=False)
else:
app = Sanic(__name__)
@app.route("/message")
def hi(request):
capture_message("hi")
return response.text("ok")
return app
def test_request_data(sentry_init, app, capture_events):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
request, response = app.test_client.get("/message?foo=bar")
assert response.status == 200
(event,) = events
assert event["transaction"] == "hi"
assert event["request"]["env"] == {"REMOTE_ADDR": ""}
assert set(event["request"]["headers"]) >= {
"accept",
"accept-encoding",
"host",
"user-agent",
}
assert event["request"]["query_string"] == "foo=bar"
assert event["request"]["url"].endswith("/message")
assert event["request"]["method"] == "GET"
# Assert that state is not leaked
events.clear()
capture_message("foo")
(event,) = events
assert "request" not in event
assert "transaction" not in event
def test_errors(sentry_init, app, capture_events):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
@app.route("/error")
def myerror(request):
raise ValueError("oh no")
request, response = app.test_client.get("/error")
assert response.status == 500
(event,) = events
assert event["transaction"] == "myerror"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ValueError"
assert exception["value"] == "oh no"
assert any(
frame["filename"].endswith("test_sanic.py")
for frame in exception["stacktrace"]["frames"]
)
def test_bad_request_not_captured(sentry_init, app, capture_events):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
@app.route("/")
def index(request):
abort(400)
request, response = app.test_client.get("/")
assert response.status == 400
assert not events
def test_error_in_errorhandler(sentry_init, app, capture_events):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
@app.route("/error")
def myerror(request):
raise ValueError("oh no")
@app.exception(ValueError)
def myhandler(request, exception):
1 / 0
request, response = app.test_client.get("/error")
assert response.status == 500
event1, event2 = events
(exception,) = event1["exception"]["values"]
assert exception["type"] == "ValueError"
assert any(
frame["filename"].endswith("test_sanic.py")
for frame in exception["stacktrace"]["frames"]
)
exception = event2["exception"]["values"][-1]
assert exception["type"] == "ZeroDivisionError"
assert any(
frame["filename"].endswith("test_sanic.py")
for frame in exception["stacktrace"]["frames"]
)
def test_concurrency(sentry_init, app):
"""
Make sure we instrument Sanic in a way where request data does not leak
between request handlers. This test also implicitly tests our concept of
how async code should be instrumented, so if it breaks it likely has
ramifications for other async integrations and async usercode.
We directly call the request handler instead of using Sanic's test client
because that's the only way we could reproduce leakage with such a low
amount of concurrent tasks.
"""
sentry_init(integrations=[SanicIntegration()])
@app.route("/context-check/")
async def context_check(request, i):
with configure_scope() as scope:
scope.set_tag("i", i)
await asyncio.sleep(random.random())
with configure_scope() as scope:
assert scope._tags["i"] == i
return response.text("ok")
async def task(i):
responses = []
kwargs = {
"url_bytes": "http://localhost/context-check/{i}".format(i=i).encode(
"ascii"
),
"headers": {},
"version": "1.1",
"method": "GET",
"transport": None,
}
if SANIC_VERSION >= (19,):
kwargs["app"] = app
if SANIC_VERSION >= (21, 3):
try:
app.router.reset()
app.router.finalize()
except AttributeError:
...
class MockAsyncStreamer:
def __init__(self, request_body):
self.request_body = request_body
self.iter = iter(self.request_body)
self.response = b"success"
def respond(self, response):
responses.append(response)
patched_response = HTTPResponse()
patched_response.send = lambda end_stream: asyncio.sleep(0.001)
return patched_response
def __aiter__(self):
return self
async def __anext__(self):
try:
return next(self.iter)
except StopIteration:
raise StopAsyncIteration
patched_request = request.Request(**kwargs)
patched_request.stream = MockAsyncStreamer([b"hello", b"foo"])
await app.handle_request(
patched_request,
)
else:
await app.handle_request(
request.Request(**kwargs),
write_callback=responses.append,
stream_callback=responses.append,
)
(r,) = responses
assert r.status == 200
async def runner():
await asyncio.gather(*(task(i) for i in range(1000)))
if sys.version_info < (3, 7):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(runner())
else:
asyncio.run(runner())
with configure_scope() as scope:
assert not scope._tags
sentry-python-1.4.3/tests/integrations/serverless/ 0000775 0000000 0000000 00000000000 14125057761 0022437 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/serverless/test_serverless.py 0000664 0000000 0000000 00000002022 14125057761 0026241 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk.integrations.serverless import serverless_function
def test_basic(sentry_init, capture_exceptions, monkeypatch):
sentry_init()
exceptions = capture_exceptions()
flush_calls = []
@serverless_function
def foo():
monkeypatch.setattr(
"sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1)
)
1 / 0
with pytest.raises(ZeroDivisionError):
foo()
(exception,) = exceptions
assert isinstance(exception, ZeroDivisionError)
assert flush_calls == [1]
def test_flush_disabled(sentry_init, capture_exceptions, monkeypatch):
sentry_init()
exceptions = capture_exceptions()
flush_calls = []
monkeypatch.setattr("sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1))
@serverless_function(flush=False)
def foo():
1 / 0
with pytest.raises(ZeroDivisionError):
foo()
(exception,) = exceptions
assert isinstance(exception, ZeroDivisionError)
assert flush_calls == []
sentry-python-1.4.3/tests/integrations/spark/ 0000775 0000000 0000000 00000000000 14125057761 0021362 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/spark/test_spark.py 0000664 0000000 0000000 00000015003 14125057761 0024112 0 ustar 00root root 0000000 0000000 import pytest
import sys
from sentry_sdk.integrations.spark.spark_driver import (
_set_app_properties,
_start_sentry_listener,
SentryListener,
)
from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
pytest.importorskip("pyspark")
pytest.importorskip("py4j")
from pyspark import SparkContext
from py4j.protocol import Py4JJavaError
################
# DRIVER TESTS #
################
def test_set_app_properties():
spark_context = SparkContext(appName="Testing123")
_set_app_properties()
assert spark_context.getLocalProperty("sentry_app_name") == "Testing123"
# applicationId generated by sparkContext init
assert (
spark_context.getLocalProperty("sentry_application_id")
== spark_context.applicationId
)
def test_start_sentry_listener():
spark_context = SparkContext.getOrCreate()
gateway = spark_context._gateway
assert gateway._callback_server is None
_start_sentry_listener(spark_context)
assert gateway._callback_server is not None
@pytest.fixture
def sentry_listener(monkeypatch):
class MockHub:
def __init__(self):
self.args = []
self.kwargs = {}
def add_breadcrumb(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
listener = SentryListener()
mock_hub = MockHub()
monkeypatch.setattr(listener, "hub", mock_hub)
return listener, mock_hub
def test_sentry_listener_on_job_start(sentry_listener):
listener, mock_hub = sentry_listener
class MockJobStart:
def jobId(self): # noqa: N802
return "sample-job-id-start"
mock_job_start = MockJobStart()
listener.onJobStart(mock_job_start)
assert mock_hub.kwargs["level"] == "info"
assert "sample-job-id-start" in mock_hub.kwargs["message"]
@pytest.mark.parametrize(
"job_result, level", [("JobSucceeded", "info"), ("JobFailed", "warning")]
)
def test_sentry_listener_on_job_end(sentry_listener, job_result, level):
listener, mock_hub = sentry_listener
class MockJobResult:
def toString(self): # noqa: N802
return job_result
class MockJobEnd:
def jobId(self): # noqa: N802
return "sample-job-id-end"
def jobResult(self): # noqa: N802
result = MockJobResult()
return result
mock_job_end = MockJobEnd()
listener.onJobEnd(mock_job_end)
assert mock_hub.kwargs["level"] == level
assert mock_hub.kwargs["data"]["result"] == job_result
assert "sample-job-id-end" in mock_hub.kwargs["message"]
def test_sentry_listener_on_stage_submitted(sentry_listener):
listener, mock_hub = sentry_listener
class StageInfo:
def stageId(self): # noqa: N802
return "sample-stage-id-submit"
def name(self):
return "run-job"
def attemptId(self): # noqa: N802
return 14
class MockStageSubmitted:
def stageInfo(self): # noqa: N802
stageinf = StageInfo()
return stageinf
mock_stage_submitted = MockStageSubmitted()
listener.onStageSubmitted(mock_stage_submitted)
assert mock_hub.kwargs["level"] == "info"
assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
assert mock_hub.kwargs["data"]["attemptId"] == 14
assert mock_hub.kwargs["data"]["name"] == "run-job"
@pytest.fixture
def get_mock_stage_completed():
def _inner(failure_reason):
class JavaException:
def __init__(self):
self._target_id = "id"
class FailureReason:
def get(self):
if failure_reason:
return "failure-reason"
else:
raise Py4JJavaError("msg", JavaException())
class StageInfo:
def stageId(self): # noqa: N802
return "sample-stage-id-submit"
def name(self):
return "run-job"
def attemptId(self): # noqa: N802
return 14
def failureReason(self): # noqa: N802
return FailureReason()
class MockStageCompleted:
def stageInfo(self): # noqa: N802
return StageInfo()
return MockStageCompleted()
return _inner
def test_sentry_listener_on_stage_completed_success(
sentry_listener, get_mock_stage_completed
):
listener, mock_hub = sentry_listener
mock_stage_completed = get_mock_stage_completed(failure_reason=False)
listener.onStageCompleted(mock_stage_completed)
assert mock_hub.kwargs["level"] == "info"
assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
assert mock_hub.kwargs["data"]["attemptId"] == 14
assert mock_hub.kwargs["data"]["name"] == "run-job"
assert "reason" not in mock_hub.kwargs["data"]
def test_sentry_listener_on_stage_completed_failure(
sentry_listener, get_mock_stage_completed
):
listener, mock_hub = sentry_listener
mock_stage_completed = get_mock_stage_completed(failure_reason=True)
listener.onStageCompleted(mock_stage_completed)
assert mock_hub.kwargs["level"] == "warning"
assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
assert mock_hub.kwargs["data"]["attemptId"] == 14
assert mock_hub.kwargs["data"]["name"] == "run-job"
assert mock_hub.kwargs["data"]["reason"] == "failure-reason"
################
# WORKER TESTS #
################
def test_spark_worker(monkeypatch, sentry_init, capture_events, capture_exceptions):
import pyspark.worker as original_worker
import pyspark.daemon as original_daemon
from pyspark.taskcontext import TaskContext
task_context = TaskContext._getOrCreate()
def mock_main():
task_context._stageId = 0
task_context._attemptNumber = 1
task_context._partitionId = 2
task_context._taskAttemptId = 3
try:
raise ZeroDivisionError
except ZeroDivisionError:
sys.exit(-1)
monkeypatch.setattr(original_worker, "main", mock_main)
sentry_init(integrations=[SparkWorkerIntegration()])
events = capture_events()
exceptions = capture_exceptions()
original_daemon.worker_main()
# SystemExit called, but not recorded as part of event
assert type(exceptions.pop()) == SystemExit
assert len(events[0]["exception"]["values"]) == 1
assert events[0]["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert events[0]["tags"] == {
"stageId": "0",
"attemptNumber": "1",
"partitionId": "2",
"taskAttemptId": "3",
}
sentry-python-1.4.3/tests/integrations/sqlalchemy/ 0000775 0000000 0000000 00000000000 14125057761 0022404 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/sqlalchemy/__init__.py 0000664 0000000 0000000 00000000061 14125057761 0024512 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("sqlalchemy")
sentry-python-1.4.3/tests/integrations/sqlalchemy/test_sqlalchemy.py 0000664 0000000 0000000 00000017235 14125057761 0026167 0 ustar 00root root 0000000 0000000 import sys
import pytest
from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from sentry_sdk import capture_message, start_transaction, configure_scope
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from sentry_sdk.utils import json_dumps, MAX_STRING_LENGTH
from sentry_sdk.serializer import MAX_EVENT_BYTES
def test_orm_queries(sentry_init, capture_events):
sentry_init(
integrations=[SqlalchemyIntegration()], _experiments={"record_sql_params": True}
)
events = capture_events()
Base = declarative_base() # noqa: N806
class Person(Base):
__tablename__ = "person"
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
class Address(Base):
__tablename__ = "address"
id = Column(Integer, primary_key=True)
street_name = Column(String(250))
street_number = Column(String(250))
post_code = Column(String(250), nullable=False)
person_id = Column(Integer, ForeignKey("person.id"))
person = relationship(Person)
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
session = Session()
bob = Person(name="Bob")
session.add(bob)
assert session.query(Person).first() == bob
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"][-2:] == [
{
"category": "query",
"data": {"db.params": ["Bob"], "db.paramstyle": "qmark"},
"message": "INSERT INTO person (name) VALUES (?)",
"type": "default",
},
{
"category": "query",
"data": {"db.params": [1, 0], "db.paramstyle": "qmark"},
"message": "SELECT person.id AS person_id, person.name AS person_name \n"
"FROM person\n"
" LIMIT ? OFFSET ?",
"type": "default",
},
]
@pytest.mark.skipif(
sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
)
def test_transactions(sentry_init, capture_events, render_span_tree):
sentry_init(
integrations=[SqlalchemyIntegration()],
_experiments={"record_sql_params": True},
traces_sample_rate=1.0,
)
events = capture_events()
Base = declarative_base() # noqa: N806
class Person(Base):
__tablename__ = "person"
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
class Address(Base):
__tablename__ = "address"
id = Column(Integer, primary_key=True)
street_name = Column(String(250))
street_number = Column(String(250))
post_code = Column(String(250), nullable=False)
person_id = Column(Integer, ForeignKey("person.id"))
person = relationship(Person)
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
session = Session()
with start_transaction(name="test_transaction", sampled=True):
with session.begin_nested():
session.query(Person).first()
for _ in range(2):
with pytest.raises(IntegrityError):
with session.begin_nested():
session.add(Person(id=1, name="bob"))
session.add(Person(id=1, name="bob"))
with session.begin_nested():
session.query(Person).first()
(event,) = events
assert (
render_span_tree(event)
== """\
- op=null: description=null
- op="db": description="SAVEPOINT sa_savepoint_1"
- op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
- op="db": description="RELEASE SAVEPOINT sa_savepoint_1"
- op="db": description="SAVEPOINT sa_savepoint_2"
- op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
- op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_2"
- op="db": description="SAVEPOINT sa_savepoint_3"
- op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
- op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_3"
- op="db": description="SAVEPOINT sa_savepoint_4"
- op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
- op="db": description="RELEASE SAVEPOINT sa_savepoint_4"\
"""
)
def test_long_sql_query_preserved(sentry_init, capture_events):
sentry_init(
traces_sample_rate=1,
integrations=[SqlalchemyIntegration()],
_experiments={"smart_transaction_trimming": True},
)
events = capture_events()
engine = create_engine("sqlite:///:memory:")
with start_transaction(name="test"):
with engine.connect() as con:
con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))
(event,) = events
description = event["spans"][0]["description"]
assert description.startswith("SELECT 0 UNION SELECT 1")
assert description.endswith("SELECT 98 UNION SELECT 99")
def test_too_large_event_truncated(sentry_init, capture_events):
sentry_init(
traces_sample_rate=1,
integrations=[SqlalchemyIntegration()],
_experiments={"smart_transaction_trimming": True},
)
events = capture_events()
long_str = "x" * (MAX_STRING_LENGTH + 10)
with configure_scope() as scope:
@scope.add_event_processor
def processor(event, hint):
event["message"] = long_str
return event
engine = create_engine("sqlite:///:memory:")
with start_transaction(name="test"):
with engine.connect() as con:
for _ in range(2000):
con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))
(event,) = events
# Because of attached metadata in the "_meta" key, we may send out a little
# bit more than MAX_EVENT_BYTES.
max_bytes = 1.2 * MAX_EVENT_BYTES
assert len(json_dumps(event)) < max_bytes
# Some spans are discarded.
assert len(event["spans"]) == 1000
# Some spans have their descriptions truncated. Because the test always
# generates the same amount of descriptions and truncation is deterministic,
# the number here should never change across test runs.
#
# Which exact span descriptions are truncated depends on the span durations
# of each SQL query and is non-deterministic.
assert len(event["_meta"]["spans"]) == 537
for i, span in enumerate(event["spans"]):
description = span["description"]
assert description.startswith("SELECT ")
if str(i) in event["_meta"]["spans"]:
# Description must have been truncated
assert len(description) == 10
assert description.endswith("...")
else:
# Description was not truncated, check for original length
assert len(description) == 1583
assert description.endswith("SELECT 98 UNION SELECT 99")
# Smoke check the meta info for one of the spans.
assert next(iter(event["_meta"]["spans"].values())) == {
"description": {"": {"len": 1583, "rem": [["!limit", "x", 7, 10]]}}
}
# Smoke check that truncation of other fields has not changed.
assert len(event["message"]) == MAX_STRING_LENGTH
# The _meta for other truncated fields should be there as well.
assert event["_meta"]["message"] == {
"": {"len": 522, "rem": [["!limit", "x", 509, 512]]}
}
sentry-python-1.4.3/tests/integrations/stdlib/ 0000775 0000000 0000000 00000000000 14125057761 0021523 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/stdlib/test_httplib.py 0000664 0000000 0000000 00000010161 14125057761 0024601 0 ustar 00root root 0000000 0000000 import platform
import sys
import pytest
try:
# py3
from urllib.request import urlopen
except ImportError:
# py2
from urllib import urlopen
try:
# py2
from httplib import HTTPSConnection
except ImportError:
# py3
from http.client import HTTPSConnection
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.stdlib import StdlibIntegration
def test_crumb_capture(sentry_init, capture_events):
sentry_init(integrations=[StdlibIntegration()])
events = capture_events()
url = "https://httpbin.org/status/200"
response = urlopen(url)
assert response.getcode() == 200
capture_message("Testing!")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb["type"] == "http"
assert crumb["category"] == "httplib"
assert crumb["data"] == {
"url": url,
"method": "GET",
"status_code": 200,
"reason": "OK",
}
def test_crumb_capture_hint(sentry_init, capture_events):
def before_breadcrumb(crumb, hint):
crumb["data"]["extra"] = "foo"
return crumb
sentry_init(integrations=[StdlibIntegration()], before_breadcrumb=before_breadcrumb)
events = capture_events()
url = "https://httpbin.org/status/200"
response = urlopen(url)
assert response.getcode() == 200
capture_message("Testing!")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb["type"] == "http"
assert crumb["category"] == "httplib"
assert crumb["data"] == {
"url": url,
"method": "GET",
"status_code": 200,
"reason": "OK",
"extra": "foo",
}
if platform.python_implementation() != "PyPy":
assert sys.getrefcount(response) == 2
def test_httplib_misuse(sentry_init, capture_events):
"""HTTPConnection.getresponse must be called after every call to
HTTPConnection.request. However, if somebody does not abide by
this contract, we still should handle this gracefully and not
send mixed breadcrumbs.
Test whether our breadcrumbs are coherent when somebody uses HTTPConnection
wrongly.
"""
sentry_init()
events = capture_events()
conn = HTTPSConnection("httpbin.org", 443)
conn.request("GET", "/anything/foo")
with pytest.raises(Exception):
# This raises an exception, because we didn't call `getresponse` for
# the previous request yet.
#
# This call should not affect our breadcrumb.
conn.request("POST", "/anything/bar")
response = conn.getresponse()
assert response._method == "GET"
capture_message("Testing!")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb["type"] == "http"
assert crumb["category"] == "httplib"
assert crumb["data"] == {
"url": "https://httpbin.org/anything/foo",
"method": "GET",
"status_code": 200,
"reason": "OK",
}
def test_outgoing_trace_headers(
sentry_init, monkeypatch, StringContaining # noqa: N803
):
# HTTPSConnection.send is passed a string containing (among other things)
# the headers on the request. Mock it so we can check the headers, and also
# so it doesn't try to actually talk to the internet.
mock_send = mock.Mock()
monkeypatch.setattr(HTTPSConnection, "send", mock_send)
sentry_init(traces_sample_rate=1.0)
with start_transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
trace_id="12312012123120121231201212312012",
) as transaction:
HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")
request_span = transaction._span_recorder.spans[-1]
expected_sentry_trace = (
"sentry-trace: {trace_id}-{parent_span_id}-{sampled}".format(
trace_id=transaction.trace_id,
parent_span_id=request_span.span_id,
sampled=1,
)
)
mock_send.assert_called_with(StringContaining(expected_sentry_trace))
sentry-python-1.4.3/tests/integrations/stdlib/test_subprocess.py 0000664 0000000 0000000 00000011603 14125057761 0025325 0 ustar 00root root 0000000 0000000 import os
import platform
import subprocess
import sys
import pytest
from sentry_sdk import capture_message, start_transaction
from sentry_sdk._compat import PY2
from sentry_sdk.integrations.stdlib import StdlibIntegration
if PY2:
from collections import Mapping
else:
from collections.abc import Mapping
class ImmutableDict(Mapping):
def __init__(self, inner):
self.inner = inner
def __getitem__(self, key):
return self.inner[key]
def __iter__(self):
return iter(self.inner)
def __len__(self):
return len(self.inner)
@pytest.mark.parametrize("positional_args", [True, False])
@pytest.mark.parametrize(
"iterator",
[
pytest.param(
True,
marks=pytest.mark.skipif(
platform.python_implementation() == "PyPy",
reason="https://bitbucket.org/pypy/pypy/issues/3050/subprocesspopen-only-accepts-sequences",
),
),
False,
],
ids=("as_iterator", "as_list"),
)
@pytest.mark.parametrize("env_mapping", [None, os.environ, ImmutableDict(os.environ)])
@pytest.mark.parametrize("with_cwd", [True, False])
def test_subprocess_basic(
sentry_init,
capture_events,
monkeypatch,
positional_args,
iterator,
env_mapping,
with_cwd,
):
monkeypatch.setenv("FOO", "bar")
old_environ = dict(os.environ)
sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0)
events = capture_events()
with start_transaction(name="foo") as transaction:
args = [
sys.executable,
"-c",
"import os; "
"import sentry_sdk; "
"from sentry_sdk.integrations.stdlib import get_subprocess_traceparent_headers; "
"sentry_sdk.init(); "
"assert os.environ['FOO'] == 'bar'; "
"print(dict(get_subprocess_traceparent_headers()))",
]
if iterator:
args = iter(args)
if positional_args:
a = (
args,
0, # bufsize
None, # executable
None, # stdin
subprocess.PIPE, # stdout
None, # stderr
None, # preexec_fn
False, # close_fds
False, # shell
os.getcwd() if with_cwd else None, # cwd
)
if env_mapping is not None:
a += (env_mapping,)
popen = subprocess.Popen(*a)
else:
kw = {"args": args, "stdout": subprocess.PIPE}
if with_cwd:
kw["cwd"] = os.getcwd()
if env_mapping is not None:
kw["env"] = env_mapping
popen = subprocess.Popen(**kw)
output, unused_err = popen.communicate()
retcode = popen.poll()
assert not retcode
assert os.environ == old_environ
assert transaction.trace_id in str(output)
capture_message("hi")
(
transaction_event,
message_event,
) = events
assert message_event["message"] == "hi"
data = {"subprocess.cwd": os.getcwd()} if with_cwd else {}
(crumb,) = message_event["breadcrumbs"]["values"]
assert crumb == {
"category": "subprocess",
"data": data,
"message": crumb["message"],
"timestamp": crumb["timestamp"],
"type": "subprocess",
}
if not iterator:
assert crumb["message"].startswith(sys.executable + " ")
assert transaction_event["type"] == "transaction"
(
subprocess_init_span,
subprocess_communicate_span,
subprocess_wait_span,
) = transaction_event["spans"]
assert (
subprocess_init_span["op"],
subprocess_communicate_span["op"],
subprocess_wait_span["op"],
) == ("subprocess", "subprocess.communicate", "subprocess.wait")
# span hierarchy
assert (
subprocess_wait_span["parent_span_id"] == subprocess_communicate_span["span_id"]
)
assert (
subprocess_communicate_span["parent_span_id"]
== subprocess_init_span["parent_span_id"]
== transaction_event["contexts"]["trace"]["span_id"]
)
# common data
assert (
subprocess_init_span["tags"]["subprocess.pid"]
== subprocess_wait_span["tags"]["subprocess.pid"]
== subprocess_communicate_span["tags"]["subprocess.pid"]
)
# data of init span
assert subprocess_init_span.get("data", {}) == data
if iterator:
assert "iterator" in subprocess_init_span["description"]
assert subprocess_init_span["description"].startswith("<")
else:
assert sys.executable + " -c" in subprocess_init_span["description"]
def test_subprocess_invalid_args(sentry_init):
sentry_init(integrations=[StdlibIntegration()])
with pytest.raises(TypeError) as excinfo:
subprocess.Popen(1)
assert "'int' object is not iterable" in str(excinfo.value)
sentry-python-1.4.3/tests/integrations/test_gnu_backtrace.py 0000664 0000000 0000000 00000025201 14125057761 0024443 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk import capture_exception
from sentry_sdk.integrations.gnu_backtrace import GnuBacktraceIntegration
LINES = r"""
0. clickhouse-server(StackTrace::StackTrace()+0x16) [0x99d31a6]
1. clickhouse-server(DB::Exception::Exception(std::__cxx11::basic_string, std::allocator > const&, int)+0x22) [0x372c822]
10. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1a12) [0x6ae45d2]
10. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr const&, bool)+0x11af) [0x75c68ff]
10. clickhouse-server(ThreadPoolImpl::worker(std::_List_iterator)+0x1ab) [0x6f90c1b]
11. clickhouse-server() [0xae06ddf]
11. clickhouse-server(DB::ExpressionAnalyzer::getRootActions(std::shared_ptr const&, bool, std::shared_ptr&, bool)+0xdb) [0x6a0a63b]
11. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr const&, DB::Context const&, std::shared_ptr const&, std::shared_ptr const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x5e6) [0x75c7516]
12. /lib/x86_64-linux-gnu/libpthread.so.0(+0x8184) [0x7f3bbc568184]
12. clickhouse-server(DB::ExpressionAnalyzer::getConstActions()+0xc9) [0x6a0b059]
12. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr const&, DB::Context const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x56) [0x75c8276]
13. /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7f3bbbb8303d]
13. clickhouse-server(DB::InterpreterSelectWithUnionQuery::InterpreterSelectWithUnionQuery(std::shared_ptr const&, DB::Context const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x7e7) [0x75d4067]
13. clickhouse-server(DB::evaluateConstantExpression(std::shared_ptr const&, DB::Context const&)+0x3ed) [0x656bfdd]
14. clickhouse-server(DB::InterpreterFactory::get(std::shared_ptr&, DB::Context&, DB::QueryProcessingStage::Enum)+0x3a8) [0x75b0298]
14. clickhouse-server(DB::makeExplicitSet(DB::ASTFunction const*, DB::Block const&, bool, DB::Context const&, DB::SizeLimits const&, std::unordered_map, DB::PreparedSetKey::Hash, std::equal_to, std::allocator > > >&)+0x382) [0x6adf692]
15. clickhouse-server() [0x7664c79]
15. clickhouse-server(DB::ActionsVisitor::makeSet(DB::ASTFunction const*, DB::Block const&)+0x2a7) [0x6ae2227]
16. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1973) [0x6ae4533]
16. clickhouse-server(DB::executeQuery(std::__cxx11::basic_string, std::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum)+0x8a) [0x76669fa]
17. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1324) [0x6ae3ee4]
17. clickhouse-server(DB::TCPHandler::runImpl()+0x4b9) [0x30973c9]
18. clickhouse-server(DB::ExpressionAnalyzer::getRootActions(std::shared_ptr const&, bool, std::shared_ptr&, bool)+0xdb) [0x6a0a63b]
18. clickhouse-server(DB::TCPHandler::run()+0x2b) [0x30985ab]
19. clickhouse-server(DB::ExpressionAnalyzer::appendGroupBy(DB::ExpressionActionsChain&, bool)+0x100) [0x6a0b4f0]
19. clickhouse-server(Poco::Net::TCPServerConnection::start()+0xf) [0x9b53e4f]
2. clickhouse-server(DB::FunctionTuple::getReturnTypeImpl(std::vector, std::allocator > > const&) const+0x122) [0x3a2a0f2]
2. clickhouse-server(DB::readException(DB::Exception&, DB::ReadBuffer&, std::__cxx11::basic_string, std::allocator > const&)+0x21f) [0x6fb253f]
2. clickhouse-server(void DB::readDateTimeTextFallback(long&, DB::ReadBuffer&, DateLUTImpl const&)+0x318) [0x99ffed8]
20. clickhouse-server(DB::InterpreterSelectQuery::analyzeExpressions(DB::QueryProcessingStage::Enum, bool)+0x364) [0x6437fa4]
20. clickhouse-server(Poco::Net::TCPServerDispatcher::run()+0x16a) [0x9b5422a]
21. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr const&, bool)+0x36d) [0x643c28d]
21. clickhouse-server(Poco::PooledThread::run()+0x77) [0x9c70f37]
22. clickhouse-server(DB::InterpreterSelectQuery::executeWithMultipleStreams()+0x50) [0x643ecd0]
22. clickhouse-server(Poco::ThreadImpl::runnableEntry(void*)+0x38) [0x9c6caa8]
23. clickhouse-server() [0xa3c68cf]
23. clickhouse-server(DB::InterpreterSelectWithUnionQuery::executeWithMultipleStreams()+0x6c) [0x644805c]
24. /lib/x86_64-linux-gnu/libpthread.so.0(+0x8184) [0x7fe839d2d184]
24. clickhouse-server(DB::InterpreterSelectWithUnionQuery::execute()+0x38) [0x6448658]
25. /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7fe83934803d]
25. clickhouse-server() [0x65744ef]
26. clickhouse-server(DB::executeQuery(std::__cxx11::basic_string, std::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum, bool)+0x81) [0x6576141]
27. clickhouse-server(DB::TCPHandler::runImpl()+0x752) [0x3739f82]
28. clickhouse-server(DB::TCPHandler::run()+0x2b) [0x373a5cb]
29. clickhouse-server(Poco::Net::TCPServerConnection::start()+0xf) [0x708e63f]
3. clickhouse-server(DB::Connection::receiveException()+0x81) [0x67d3ad1]
3. clickhouse-server(DB::DefaultFunctionBuilder::getReturnTypeImpl(std::vector > const&) const+0x223) [0x38ac3b3]
3. clickhouse-server(DB::FunctionComparison::executeDateOrDateTimeOrEnumOrUUIDWithConstString(DB::Block&, unsigned long, DB::IColumn const*, DB::IColumn const*, std::shared_ptr const&, std::shared_ptr const&, bool, unsigned long)+0xbb3) [0x411dee3]
30. clickhouse-server(Poco::Net::TCPServerDispatcher::run()+0xe9) [0x708ed79]
31. clickhouse-server(Poco::PooledThread::run()+0x81) [0x7142011]
4. clickhouse-server(DB::Connection::receivePacket()+0x767) [0x67d9cd7]
4. clickhouse-server(DB::FunctionBuilderImpl::getReturnTypeWithoutLowCardinality(std::vector > const&) const+0x75) [0x6869635]
4. clickhouse-server(DB::FunctionComparison::executeImpl(DB::Block&, std::vector > const&, unsigned long, unsigned long)+0x576) [0x41ab006]
5. clickhouse-server(DB::FunctionBuilderImpl::getReturnType(std::vector > const&) const+0x350) [0x6869f10]
5. clickhouse-server(DB::MultiplexedConnections::receivePacket()+0x7e) [0x67e7ede]
5. clickhouse-server(DB::PreparedFunctionImpl::execute(DB::Block&, std::vector > const&, unsigned long, unsigned long)+0x3e2) [0x7933492]
6. clickhouse-server(DB::ExpressionAction::execute(DB::Block&, std::unordered_map, std::allocator >, unsigned long, std::hash, std::allocator > >, std::equal_to, std::allocator > >, std::allocator, std::allocator > const, unsigned long> > >&) const+0x61a) [0x7ae093a]
6. clickhouse-server(DB::FunctionBuilderImpl::build(std::vector > const&) const+0x3c) [0x38accfc]
6. clickhouse-server(DB::RemoteBlockInputStream::readImpl()+0x87) [0x631da97]
7. clickhouse-server(DB::ExpressionActions::addImpl(DB::ExpressionAction, std::vector, std::allocator >, std::allocator, std::allocator > > >&)+0x552) [0x6a00052]
7. clickhouse-server(DB::ExpressionActions::execute(DB::Block&) const+0xe6) [0x7ae1e06]
7. clickhouse-server(DB::IBlockInputStream::read()+0x178) [0x63075e8]
8. clickhouse-server(DB::ExpressionActions::add(DB::ExpressionAction const&, std::vector, std::allocator >, std::allocator, std::allocator > > >&)+0x42) [0x6a00422]
8. clickhouse-server(DB::FilterBlockInputStream::FilterBlockInputStream(std::shared_ptr const&, std::shared_ptr const&, std::__cxx11::basic_string, std::allocator > const&, bool)+0x711) [0x79970d1]
8. clickhouse-server(DB::ParallelInputsProcessor::thread(std::shared_ptr, unsigned long)+0x2f1) [0x64467c1]
9. clickhouse-server() [0x75bd5a3]
9. clickhouse-server(DB::ScopeStack::addAction(DB::ExpressionAction const&)+0xd2) [0x6ae04d2]
9. clickhouse-server(ThreadFromGlobalPool::ThreadFromGlobalPool::process()::{lambda()#1}>(DB::ParallelInputsProcessor::process()::{lambda()#1}&&)::{lambda()#1}::operator()() const+0x6d) [0x644722d]
"""
@pytest.mark.parametrize("input", LINES.strip().splitlines())
def test_basic(sentry_init, capture_events, input):
sentry_init(integrations=[GnuBacktraceIntegration()])
events = capture_events()
try:
raise ValueError(input)
except ValueError:
capture_exception()
(event,) = events
(exception,) = event["exception"]["values"]
assert (
exception["value"]
== ""
)
(frame,) = exception["stacktrace"]["frames"][1:]
if frame.get("function") is None:
assert "clickhouse-server()" in input or "pthread" in input
else:
assert ")" not in frame["function"] and "(" not in frame["function"]
assert frame["function"] in input
sentry-python-1.4.3/tests/integrations/threading/ 0000775 0000000 0000000 00000000000 14125057761 0022207 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/threading/test_threading.py 0000664 0000000 0000000 00000005366 14125057761 0025577 0 ustar 00root root 0000000 0000000 import gc
from threading import Thread
import pytest
from sentry_sdk import configure_scope, capture_message
from sentry_sdk.integrations.threading import ThreadingIntegration
@pytest.mark.forked
@pytest.mark.parametrize("integrations", [[ThreadingIntegration()], []])
def test_handles_exceptions(sentry_init, capture_events, integrations):
sentry_init(default_integrations=False, integrations=integrations)
events = capture_events()
def crash():
1 / 0
t = Thread(target=crash)
t.start()
t.join()
if integrations:
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"] == {"type": "threading", "handled": False}
else:
assert not events
@pytest.mark.forked
@pytest.mark.parametrize("propagate_hub", (True, False))
def test_propagates_hub(sentry_init, capture_events, propagate_hub):
sentry_init(
default_integrations=False,
integrations=[ThreadingIntegration(propagate_hub=propagate_hub)],
)
events = capture_events()
def stage1():
with configure_scope() as scope:
scope.set_tag("stage1", "true")
t = Thread(target=stage2)
t.start()
t.join()
def stage2():
1 / 0
t = Thread(target=stage1)
t.start()
t.join()
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"] == {"type": "threading", "handled": False}
if propagate_hub:
assert event["tags"]["stage1"] == "true"
else:
assert "stage1" not in event.get("tags", {})
def test_circular_references(sentry_init, request):
sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
gc.collect()
gc.disable()
request.addfinalizer(gc.enable)
class MyThread(Thread):
def run(self):
pass
t = MyThread()
t.start()
t.join()
del t
assert not gc.collect()
@pytest.mark.forked
def test_double_patching(sentry_init, capture_events):
sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
events = capture_events()
# XXX: Workaround for race condition in the py library's magic import
# system (py is a dependency of pytest)
capture_message("hi")
del events[:]
class MyThread(Thread):
def run(self):
1 / 0
ts = []
for _ in range(10):
t = MyThread()
t.start()
ts.append(t)
for t in ts:
t.join()
assert len(events) == 10
for event in events:
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
sentry-python-1.4.3/tests/integrations/tornado/ 0000775 0000000 0000000 00000000000 14125057761 0021710 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/tornado/__init__.py 0000664 0000000 0000000 00000000070 14125057761 0024016 0 ustar 00root root 0000000 0000000 import pytest
tornado = pytest.importorskip("tornado")
sentry-python-1.4.3/tests/integrations/tornado/test_tornado.py 0000664 0000000 0000000 00000020105 14125057761 0024765 0 ustar 00root root 0000000 0000000 import json
import pytest
from sentry_sdk import configure_scope, start_transaction
from sentry_sdk.integrations.tornado import TornadoIntegration
from tornado.web import RequestHandler, Application, HTTPError
from tornado.testing import AsyncHTTPTestCase
@pytest.fixture
def tornado_testcase(request):
# Take the unittest class provided by tornado and manually call its setUp
# and tearDown.
#
# The pytest plugins for tornado seem too complicated to use, as they for
# some reason assume I want to write my tests in async code.
def inner(app):
class TestBogus(AsyncHTTPTestCase):
def get_app(self):
return app
def bogustest(self):
# We need to pass a valid test method name to the ctor, so this
# is the method. It does nothing.
pass
self = TestBogus("bogustest")
self.setUp()
request.addfinalizer(self.tearDown)
return self
return inner
class CrashingHandler(RequestHandler):
def get(self):
with configure_scope() as scope:
scope.set_tag("foo", "42")
1 / 0
def post(self):
with configure_scope() as scope:
scope.set_tag("foo", "43")
1 / 0
class HelloHandler(RequestHandler):
async def get(self):
with configure_scope() as scope:
scope.set_tag("foo", "42")
return b"hello"
async def post(self):
with configure_scope() as scope:
scope.set_tag("foo", "43")
return b"hello"
def test_basic(tornado_testcase, sentry_init, capture_events):
sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
events = capture_events()
client = tornado_testcase(Application([(r"/hi", CrashingHandler)]))
response = client.fetch(
"/hi?foo=bar", headers={"Cookie": "name=value; name2=value2; name3=value3"}
)
assert response.code == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"]["type"] == "tornado"
request = event["request"]
host = request["headers"]["Host"]
assert event["request"] == {
"env": {"REMOTE_ADDR": "127.0.0.1"},
"headers": {
"Accept-Encoding": "gzip",
"Connection": "close",
"Cookie": "name=value; name2=value2; name3=value3",
**request["headers"],
},
"cookies": {"name": "value", "name2": "value2", "name3": "value3"},
"method": "GET",
"query_string": "foo=bar",
"url": "http://{host}/hi".format(host=host),
}
assert event["tags"] == {"foo": "42"}
assert (
event["transaction"]
== "tests.integrations.tornado.test_tornado.CrashingHandler.get"
)
with configure_scope() as scope:
assert not scope._tags
@pytest.mark.parametrize(
"handler,code",
[
(CrashingHandler, 500),
(HelloHandler, 200),
],
)
def test_transactions(tornado_testcase, sentry_init, capture_events, handler, code):
sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0, debug=True)
events = capture_events()
client = tornado_testcase(Application([(r"/hi", handler)]))
with start_transaction(name="client") as span:
pass
response = client.fetch(
"/hi", method="POST", body=b"heyoo", headers=dict(span.iter_headers())
)
assert response.code == code
if code == 200:
client_tx, server_tx = events
server_error = None
else:
client_tx, server_error, server_tx = events
assert client_tx["type"] == "transaction"
assert client_tx["transaction"] == "client"
if server_error is not None:
assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert (
server_error["transaction"]
== "tests.integrations.tornado.test_tornado.CrashingHandler.post"
)
if code == 200:
assert (
server_tx["transaction"]
== "tests.integrations.tornado.test_tornado.HelloHandler.post"
)
else:
assert (
server_tx["transaction"]
== "tests.integrations.tornado.test_tornado.CrashingHandler.post"
)
assert server_tx["type"] == "transaction"
request = server_tx["request"]
host = request["headers"]["Host"]
assert server_tx["request"] == {
"env": {"REMOTE_ADDR": "127.0.0.1"},
"headers": {
"Accept-Encoding": "gzip",
"Connection": "close",
**request["headers"],
},
"method": "POST",
"query_string": "",
"data": {"heyoo": [""]},
"url": "http://{host}/hi".format(host=host),
}
assert (
client_tx["contexts"]["trace"]["trace_id"]
== server_tx["contexts"]["trace"]["trace_id"]
)
if server_error is not None:
assert (
server_error["contexts"]["trace"]["trace_id"]
== server_tx["contexts"]["trace"]["trace_id"]
)
def test_400_not_logged(tornado_testcase, sentry_init, capture_events):
sentry_init(integrations=[TornadoIntegration()])
events = capture_events()
class CrashingHandler(RequestHandler):
def get(self):
raise HTTPError(400, "Oops")
client = tornado_testcase(Application([(r"/", CrashingHandler)]))
response = client.fetch("/")
assert response.code == 400
assert not events
def test_user_auth(tornado_testcase, sentry_init, capture_events):
sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
events = capture_events()
class UserHandler(RequestHandler):
def get(self):
1 / 0
def get_current_user(self):
return 42
class NoUserHandler(RequestHandler):
def get(self):
1 / 0
client = tornado_testcase(
Application([(r"/auth", UserHandler), (r"/noauth", NoUserHandler)])
)
# has user
response = client.fetch("/auth")
assert response.code == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert event["user"] == {"is_authenticated": True}
events.clear()
# has no user
response = client.fetch("/noauth")
assert response.code == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert "user" not in event
def test_formdata(tornado_testcase, sentry_init, capture_events):
sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
events = capture_events()
class FormdataHandler(RequestHandler):
def post(self):
raise ValueError(json.dumps(sorted(self.request.body_arguments)))
client = tornado_testcase(Application([(r"/form", FormdataHandler)]))
response = client.fetch(
"/form?queryarg=1",
method="POST",
headers={"Content-Type": "application/x-www-form-urlencoded"},
body=b"field1=value1&field2=value2",
)
assert response.code == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["value"] == '["field1", "field2"]'
assert event["request"]["data"] == {"field1": ["value1"], "field2": ["value2"]}
def test_json(tornado_testcase, sentry_init, capture_events):
sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
events = capture_events()
class FormdataHandler(RequestHandler):
def post(self):
raise ValueError(json.dumps(sorted(self.request.body_arguments)))
client = tornado_testcase(Application([(r"/form", FormdataHandler)]))
response = client.fetch(
"/form?queryarg=1",
method="POST",
headers={"Content-Type": "application/json"},
body=b"""
{"foo": {"bar": 42}}
""",
)
assert response.code == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["value"] == "[]"
assert event
assert event["request"]["data"] == {"foo": {"bar": 42}}
sentry-python-1.4.3/tests/integrations/trytond/ 0000775 0000000 0000000 00000000000 14125057761 0021745 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/trytond/test_trytond.py 0000664 0000000 0000000 00000007040 14125057761 0025062 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("trytond")
import json
import unittest.mock
import trytond
from trytond.exceptions import TrytonException as TrytondBaseException
from trytond.exceptions import UserError as TrytondUserError
from trytond.exceptions import UserWarning as TrytondUserWarning
from trytond.exceptions import LoginException
from trytond.wsgi import app as trytond_app
from werkzeug.test import Client
from sentry_sdk import last_event_id
from sentry_sdk.integrations.trytond import TrytondWSGIIntegration
@pytest.fixture(scope="function")
def app(sentry_init):
yield trytond_app
@pytest.fixture
def get_client(app):
def inner():
return Client(app)
return inner
@pytest.mark.parametrize(
"exception", [Exception("foo"), type("FooException", (Exception,), {})("bar")]
)
def test_exceptions_captured(
sentry_init, app, capture_exceptions, get_client, exception
):
sentry_init(integrations=[TrytondWSGIIntegration()])
exceptions = capture_exceptions()
unittest.mock.sentinel.exception = exception
@app.route("/exception")
def _(request):
raise unittest.mock.sentinel.exception
client = get_client()
_ = client.get("/exception")
(e,) = exceptions
assert e is exception
@pytest.mark.parametrize(
"exception",
[
TrytondUserError("title"),
TrytondUserWarning("title", "details"),
LoginException("title", "details"),
],
)
def test_trytonderrors_not_captured(
sentry_init, app, capture_exceptions, get_client, exception
):
sentry_init(integrations=[TrytondWSGIIntegration()])
exceptions = capture_exceptions()
unittest.mock.sentinel.exception = exception
@app.route("/usererror")
def _(request):
raise unittest.mock.sentinel.exception
client = get_client()
_ = client.get("/usererror")
assert not exceptions
@pytest.mark.skipif(
trytond.__version__.split(".") < ["5", "4"], reason="At least Trytond-5.4 required"
)
def test_rpc_error_page(sentry_init, app, capture_events, get_client):
"""Test that, after initializing the Trytond-SentrySDK integration
a custom error handler can be registered to the Trytond WSGI app so as to
inform the event identifiers to the Tryton RPC client"""
sentry_init(integrations=[TrytondWSGIIntegration()])
events = capture_events()
@app.route("/rpcerror", methods=["POST"])
def _(request):
raise Exception("foo")
@app.error_handler
def _(app, request, e):
if isinstance(e, TrytondBaseException):
return
else:
event_id = last_event_id()
data = TrytondUserError(str(event_id), str(e))
return app.make_response(request, data)
client = get_client()
# This would look like a natural Tryton RPC call
_data = dict(
id=42, # request sequence
method="class.method", # rpc call
params=[
[1234], # ids
["bar", "baz"], # values
dict( # context
client="12345678-9abc-def0-1234-56789abc",
groups=[1],
language="ca",
language_direction="ltr",
),
],
)
response = client.post(
"/rpcerror", content_type="application/json", data=json.dumps(_data)
)
(event,) = events
(content, status, headers) = response
data = json.loads(next(content))
assert status == "200 OK"
assert headers.get("Content-Type") == "application/json"
assert data == dict(id=42, error=["UserError", [event["event_id"], "foo", None]])
sentry-python-1.4.3/tests/integrations/wsgi/ 0000775 0000000 0000000 00000000000 14125057761 0021213 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/integrations/wsgi/test_wsgi.py 0000664 0000000 0000000 00000015161 14125057761 0023601 0 ustar 00root root 0000000 0000000 from werkzeug.test import Client
import pytest
import sentry_sdk
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
@pytest.fixture
def crashing_app():
def app(environ, start_response):
1 / 0
return app
class IterableApp(object):
def __init__(self, iterable):
self.iterable = iterable
def __call__(self, environ, start_response):
return self.iterable
class ExitingIterable(object):
def __init__(self, exc_func):
self._exc_func = exc_func
def __iter__(self):
return self
def __next__(self):
raise self._exc_func()
def next(self):
return type(self).__next__(self)
def test_basic(sentry_init, crashing_app, capture_events):
sentry_init(send_default_pii=True)
app = SentryWsgiMiddleware(crashing_app)
client = Client(app)
events = capture_events()
with pytest.raises(ZeroDivisionError):
client.get("/")
(event,) = events
assert event["transaction"] == "generic WSGI request"
assert event["request"] == {
"env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
"headers": {"Host": "localhost"},
"method": "GET",
"query_string": "",
"url": "http://localhost/",
}
@pytest.fixture(params=[0, None])
def test_systemexit_zero_is_ignored(sentry_init, capture_events, request):
zero_code = request.param
sentry_init(send_default_pii=True)
iterable = ExitingIterable(lambda: SystemExit(zero_code))
app = SentryWsgiMiddleware(IterableApp(iterable))
client = Client(app)
events = capture_events()
with pytest.raises(SystemExit):
client.get("/")
assert len(events) == 0
@pytest.fixture(params=["", "foo", 1, 2])
def test_systemexit_nonzero_is_captured(sentry_init, capture_events, request):
nonzero_code = request.param
sentry_init(send_default_pii=True)
iterable = ExitingIterable(lambda: SystemExit(nonzero_code))
app = SentryWsgiMiddleware(IterableApp(iterable))
client = Client(app)
events = capture_events()
with pytest.raises(SystemExit):
client.get("/")
(event,) = events
assert "exception" in event
exc = event["exception"]["values"][-1]
assert exc["type"] == "SystemExit"
assert exc["value"] == nonzero_code
assert event["level"] == "error"
def test_keyboard_interrupt_is_captured(sentry_init, capture_events):
sentry_init(send_default_pii=True)
iterable = ExitingIterable(lambda: KeyboardInterrupt())
app = SentryWsgiMiddleware(IterableApp(iterable))
client = Client(app)
events = capture_events()
with pytest.raises(KeyboardInterrupt):
client.get("/")
(event,) = events
assert "exception" in event
exc = event["exception"]["values"][-1]
assert exc["type"] == "KeyboardInterrupt"
assert exc["value"] == ""
assert event["level"] == "error"
def test_transaction_with_error(
sentry_init, crashing_app, capture_events, DictionaryContaining # noqa:N803
):
def dogpark(environ, start_response):
raise Exception("Fetch aborted. The ball was not returned.")
sentry_init(send_default_pii=True, traces_sample_rate=1.0)
app = SentryWsgiMiddleware(dogpark)
client = Client(app)
events = capture_events()
with pytest.raises(Exception):
client.get("http://dogs.are.great/sit/stay/rollover/")
error_event, envelope = events
assert error_event["transaction"] == "generic WSGI request"
assert error_event["contexts"]["trace"]["op"] == "http.server"
assert error_event["exception"]["values"][0]["type"] == "Exception"
assert (
error_event["exception"]["values"][0]["value"]
== "Fetch aborted. The ball was not returned."
)
assert envelope["type"] == "transaction"
# event trace context is a subset of envelope trace context
assert envelope["contexts"]["trace"] == DictionaryContaining(
error_event["contexts"]["trace"]
)
assert envelope["contexts"]["trace"]["status"] == "internal_error"
assert envelope["transaction"] == error_event["transaction"]
assert envelope["request"] == error_event["request"]
def test_transaction_no_error(
sentry_init, capture_events, DictionaryContaining # noqa:N803
):
def dogpark(environ, start_response):
start_response("200 OK", [])
return ["Go get the ball! Good dog!"]
sentry_init(send_default_pii=True, traces_sample_rate=1.0)
app = SentryWsgiMiddleware(dogpark)
client = Client(app)
events = capture_events()
client.get("/dogs/are/great/")
envelope = events[0]
assert envelope["type"] == "transaction"
assert envelope["transaction"] == "generic WSGI request"
assert envelope["contexts"]["trace"]["op"] == "http.server"
assert envelope["request"] == DictionaryContaining(
{"method": "GET", "url": "http://localhost/dogs/are/great/"}
)
def test_traces_sampler_gets_correct_values_in_sampling_context(
sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803
):
def app(environ, start_response):
start_response("200 OK", [])
return ["Go get the ball! Good dog!"]
traces_sampler = mock.Mock(return_value=True)
sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
app = SentryWsgiMiddleware(app)
client = Client(app)
client.get("/dogs/are/great/")
traces_sampler.assert_any_call(
DictionaryContaining(
{
"wsgi_environ": DictionaryContaining(
{
"PATH_INFO": "/dogs/are/great/",
"REQUEST_METHOD": "GET",
},
),
}
)
)
def test_session_mode_defaults_to_request_mode_in_wsgi_handler(
capture_envelopes, sentry_init
):
"""
Test that ensures that even though the default `session_mode` for
auto_session_tracking is `application`, that flips to `request` when we are
in the WSGI handler
"""
def app(environ, start_response):
start_response("200 OK", [])
return ["Go get the ball! Good dog!"]
traces_sampler = mock.Mock(return_value=True)
sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
app = SentryWsgiMiddleware(app)
envelopes = capture_envelopes()
client = Client(app)
client.get("/dogs/are/great/")
sentry_sdk.flush()
sess = envelopes[1]
assert len(sess.items) == 1
sess_event = sess.items[0].payload.json
aggregates = sess_event["aggregates"]
assert len(aggregates) == 1
assert aggregates[0]["exited"] == 1
sentry-python-1.4.3/tests/test_basics.py 0000664 0000000 0000000 00000022767 14125057761 0020427 0 ustar 00root root 0000000 0000000 import os
import logging
import pytest
from sentry_sdk import (
Client,
push_scope,
configure_scope,
capture_event,
capture_exception,
capture_message,
add_breadcrumb,
last_event_id,
Hub,
)
from sentry_sdk.integrations import _AUTO_ENABLING_INTEGRATIONS
from sentry_sdk.integrations.logging import LoggingIntegration
def test_processors(sentry_init, capture_events):
sentry_init()
events = capture_events()
with configure_scope() as scope:
def error_processor(event, exc_info):
event["exception"]["values"][0]["value"] += " whatever"
return event
scope.add_error_processor(error_processor, ValueError)
try:
raise ValueError("aha!")
except Exception:
capture_exception()
(event,) = events
assert event["exception"]["values"][0]["value"] == "aha! whatever"
def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
caplog.set_level(logging.DEBUG)
sentry_init(auto_enabling_integrations=True, debug=True)
for import_string in _AUTO_ENABLING_INTEGRATIONS:
assert any(
record.message.startswith(
"Did not import default integration {}:".format(import_string)
)
for record in caplog.records
)
def test_event_id(sentry_init, capture_events):
sentry_init()
events = capture_events()
try:
raise ValueError("aha!")
except Exception:
event_id = capture_exception()
int(event_id, 16)
assert len(event_id) == 32
(event,) = events
assert event["event_id"] == event_id
assert last_event_id() == event_id
assert Hub.current.last_event_id() == event_id
new_event_id = Hub.current.capture_event({"type": "transaction"})
assert new_event_id is not None
assert new_event_id != event_id
assert Hub.current.last_event_id() == event_id
def test_option_callback(sentry_init, capture_events):
drop_events = False
drop_breadcrumbs = False
def before_send(event, hint):
assert isinstance(hint["exc_info"][1], ValueError)
if not drop_events:
event["extra"] = {"foo": "bar"}
return event
def before_breadcrumb(crumb, hint):
assert hint == {"foo": 42}
if not drop_breadcrumbs:
crumb["data"] = {"foo": "bar"}
return crumb
sentry_init(before_send=before_send, before_breadcrumb=before_breadcrumb)
events = capture_events()
def do_this():
add_breadcrumb(message="Hello", hint={"foo": 42})
try:
raise ValueError("aha!")
except Exception:
capture_exception()
do_this()
drop_breadcrumbs = True
do_this()
drop_events = True
do_this()
normal, no_crumbs = events
assert normal["exception"]["values"][0]["type"] == "ValueError"
(crumb,) = normal["breadcrumbs"]["values"]
assert "timestamp" in crumb
assert crumb["message"] == "Hello"
assert crumb["data"] == {"foo": "bar"}
assert crumb["type"] == "default"
def test_breadcrumb_arguments(sentry_init, capture_events):
assert_hint = {"bar": 42}
def before_breadcrumb(crumb, hint):
assert crumb["foo"] == 42
assert hint == assert_hint
sentry_init(before_breadcrumb=before_breadcrumb)
add_breadcrumb(foo=42, hint=dict(bar=42))
add_breadcrumb(dict(foo=42), dict(bar=42))
add_breadcrumb(dict(foo=42), hint=dict(bar=42))
add_breadcrumb(crumb=dict(foo=42), hint=dict(bar=42))
assert_hint.clear()
add_breadcrumb(foo=42)
add_breadcrumb(crumb=dict(foo=42))
def test_push_scope(sentry_init, capture_events):
sentry_init()
events = capture_events()
with push_scope() as scope:
scope.level = "warning"
try:
1 / 0
except Exception as e:
capture_exception(e)
(event,) = events
assert event["level"] == "warning"
assert "exception" in event
def test_push_scope_null_client(sentry_init, capture_events):
sentry_init()
events = capture_events()
Hub.current.bind_client(None)
with push_scope() as scope:
scope.level = "warning"
try:
1 / 0
except Exception as e:
capture_exception(e)
assert len(events) == 0
@pytest.mark.parametrize("null_client", (True, False))
def test_push_scope_callback(sentry_init, null_client, capture_events):
sentry_init()
if null_client:
Hub.current.bind_client(None)
outer_scope = Hub.current.scope
calls = []
@push_scope
def _(scope):
assert scope is Hub.current.scope
assert scope is not outer_scope
calls.append(1)
# push_scope always needs to execute the callback regardless of
# client state, because that actually runs usercode in it, not
# just scope config code
assert calls == [1]
# Assert scope gets popped correctly
assert Hub.current.scope is outer_scope
def test_breadcrumbs(sentry_init, capture_events):
sentry_init(max_breadcrumbs=10)
events = capture_events()
for i in range(20):
add_breadcrumb(
category="auth", message="Authenticated user %s" % i, level="info"
)
capture_exception(ValueError())
(event,) = events
assert len(event["breadcrumbs"]["values"]) == 10
assert "user 10" in event["breadcrumbs"]["values"][0]["message"]
assert "user 19" in event["breadcrumbs"]["values"][-1]["message"]
del events[:]
for i in range(2):
add_breadcrumb(
category="auth", message="Authenticated user %s" % i, level="info"
)
with configure_scope() as scope:
scope.clear()
capture_exception(ValueError())
(event,) = events
assert len(event["breadcrumbs"]["values"]) == 0
def test_attachments(sentry_init, capture_envelopes):
sentry_init()
envelopes = capture_envelopes()
this_file = os.path.abspath(__file__.rstrip("c"))
with configure_scope() as scope:
scope.add_attachment(bytes=b"Hello World!", filename="message.txt")
scope.add_attachment(path=this_file)
capture_exception(ValueError())
(envelope,) = envelopes
assert len(envelope.items) == 3
assert envelope.get_event()["exception"] is not None
attachments = [x for x in envelope.items if x.type == "attachment"]
(message, pyfile) = attachments
assert message.headers["filename"] == "message.txt"
assert message.headers["type"] == "attachment"
assert message.headers["content_type"] == "text/plain"
assert message.payload.bytes == message.payload.get_bytes() == b"Hello World!"
assert pyfile.headers["filename"] == os.path.basename(this_file)
assert pyfile.headers["type"] == "attachment"
assert pyfile.headers["content_type"].startswith("text/")
assert pyfile.payload.bytes is None
with open(this_file, "rb") as f:
assert pyfile.payload.get_bytes() == f.read()
def test_integration_scoping(sentry_init, capture_events):
logger = logging.getLogger("test_basics")
# This client uses the logging integration
logging_integration = LoggingIntegration(event_level=logging.WARNING)
sentry_init(default_integrations=False, integrations=[logging_integration])
events = capture_events()
logger.warning("This is a warning")
assert len(events) == 1
# This client does not
sentry_init(default_integrations=False)
events = capture_events()
logger.warning("This is not a warning")
assert not events
def test_client_initialized_within_scope(sentry_init, caplog):
caplog.set_level(logging.WARNING)
sentry_init(debug=True)
with push_scope():
Hub.current.bind_client(Client())
(record,) = (x for x in caplog.records if x.levelname == "WARNING")
assert record.msg.startswith("init() called inside of pushed scope.")
def test_scope_leaks_cleaned_up(sentry_init, caplog):
caplog.set_level(logging.WARNING)
sentry_init(debug=True)
old_stack = list(Hub.current._stack)
with push_scope():
push_scope()
assert Hub.current._stack == old_stack
(record,) = (x for x in caplog.records if x.levelname == "WARNING")
assert record.message.startswith("Leaked 1 scopes:")
def test_scope_popped_too_soon(sentry_init, caplog):
caplog.set_level(logging.ERROR)
sentry_init(debug=True)
old_stack = list(Hub.current._stack)
with push_scope():
Hub.current.pop_scope_unsafe()
assert Hub.current._stack == old_stack
(record,) = (x for x in caplog.records if x.levelname == "ERROR")
assert record.message == ("Scope popped too soon. Popped 1 scopes too many.")
def test_scope_event_processor_order(sentry_init, capture_events):
def before_send(event, hint):
event["message"] += "baz"
return event
sentry_init(debug=True, before_send=before_send)
events = capture_events()
with push_scope() as scope:
@scope.add_event_processor
def foo(event, hint):
event["message"] += "foo"
return event
with push_scope() as scope:
@scope.add_event_processor
def bar(event, hint):
event["message"] += "bar"
return event
capture_message("hi")
(event,) = events
assert event["message"] == "hifoobarbaz"
def test_capture_event_with_scope_kwargs(sentry_init, capture_events):
sentry_init(debug=True)
events = capture_events()
capture_event({}, level="info", extras={"foo": "bar"})
(event,) = events
assert event["level"] == "info"
assert event["extra"]["foo"] == "bar"
sentry-python-1.4.3/tests/test_client.py 0000664 0000000 0000000 00000057625 14125057761 0020442 0 ustar 00root root 0000000 0000000 # coding: utf-8
import os
import json
import pytest
import subprocess
import sys
import time
from textwrap import dedent
from sentry_sdk import (
Hub,
Client,
add_breadcrumb,
configure_scope,
capture_message,
capture_exception,
capture_event,
start_transaction,
set_tag,
)
from sentry_sdk.integrations.executing import ExecutingIntegration
from sentry_sdk.transport import Transport
from sentry_sdk._compat import reraise, text_type, PY2
from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS
if PY2:
# Importing ABCs from collections is deprecated, and will stop working in 3.8
# https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
from collections import Mapping
else:
# New in 3.3
# https://docs.python.org/3/library/collections.abc.html
from collections.abc import Mapping
class EventCaptured(Exception):
pass
class _TestTransport(Transport):
def capture_event(self, event):
raise EventCaptured(event)
def test_transport_option(monkeypatch):
if "SENTRY_DSN" in os.environ:
monkeypatch.delenv("SENTRY_DSN")
dsn = "https://foo@sentry.io/123"
dsn2 = "https://bar@sentry.io/124"
assert str(Client(dsn=dsn).dsn) == dsn
assert Client().dsn is None
monkeypatch.setenv("SENTRY_DSN", dsn)
transport = Transport({"dsn": dsn2})
assert text_type(transport.parsed_dsn) == dsn2
assert str(Client(transport=transport).dsn) == dsn
@pytest.mark.parametrize(
"testcase",
[
{
"dsn": "http://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": None,
"arg_http_proxy": "http://localhost/123",
"arg_https_proxy": None,
"expected_proxy_scheme": "http",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": None,
"arg_http_proxy": "https://localhost/123",
"arg_https_proxy": None,
"expected_proxy_scheme": "https",
},
{
"dsn": "http://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": None,
"arg_http_proxy": "http://localhost/123",
"arg_https_proxy": "https://localhost/123",
"expected_proxy_scheme": "http",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": None,
"arg_http_proxy": "http://localhost/123",
"arg_https_proxy": "https://localhost/123",
"expected_proxy_scheme": "https",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": None,
"arg_http_proxy": "http://localhost/123",
"arg_https_proxy": None,
"expected_proxy_scheme": "http",
},
{
"dsn": "http://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": None,
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": None,
},
{
"dsn": "http://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": None,
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": "http",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": "https://localhost/123",
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": "https",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": None,
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": "http",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": "https://localhost/123",
"arg_http_proxy": "",
"arg_https_proxy": "",
"expected_proxy_scheme": None,
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": "https://localhost/123",
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": "https",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": None,
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": "http",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": "https://localhost/123",
"arg_http_proxy": None,
"arg_https_proxy": "",
"expected_proxy_scheme": "http",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": "https://localhost/123",
"arg_http_proxy": "",
"arg_https_proxy": None,
"expected_proxy_scheme": "https",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": "https://localhost/123",
"arg_http_proxy": None,
"arg_https_proxy": "",
"expected_proxy_scheme": None,
},
{
"dsn": "http://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": "https://localhost/123",
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": "http",
},
# NO_PROXY testcases
{
"dsn": "http://foo@sentry.io/123",
"env_http_proxy": "http://localhost/123",
"env_https_proxy": None,
"env_no_proxy": "sentry.io,example.com",
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": None,
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": "https://localhost/123",
"env_no_proxy": "example.com,sentry.io",
"arg_http_proxy": None,
"arg_https_proxy": None,
"expected_proxy_scheme": None,
},
{
"dsn": "http://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": None,
"env_no_proxy": "sentry.io,example.com",
"arg_http_proxy": "http://localhost/123",
"arg_https_proxy": None,
"expected_proxy_scheme": "http",
},
{
"dsn": "https://foo@sentry.io/123",
"env_http_proxy": None,
"env_https_proxy": None,
"env_no_proxy": "sentry.io,example.com",
"arg_http_proxy": None,
"arg_https_proxy": "https://localhost/123",
"expected_proxy_scheme": "https",
},
],
)
def test_proxy(monkeypatch, testcase):
if testcase["env_http_proxy"] is not None:
monkeypatch.setenv("HTTP_PROXY", testcase["env_http_proxy"])
if testcase["env_https_proxy"] is not None:
monkeypatch.setenv("HTTPS_PROXY", testcase["env_https_proxy"])
if testcase.get("env_no_proxy") is not None:
monkeypatch.setenv("NO_PROXY", testcase["env_no_proxy"])
kwargs = {}
if testcase["arg_http_proxy"] is not None:
kwargs["http_proxy"] = testcase["arg_http_proxy"]
if testcase["arg_https_proxy"] is not None:
kwargs["https_proxy"] = testcase["arg_https_proxy"]
client = Client(testcase["dsn"], **kwargs)
if testcase["expected_proxy_scheme"] is None:
assert client.transport._pool.proxy is None
else:
assert client.transport._pool.proxy.scheme == testcase["expected_proxy_scheme"]
def test_simple_transport(sentry_init):
events = []
sentry_init(transport=events.append)
capture_message("Hello World!")
assert events[0]["message"] == "Hello World!"
def test_ignore_errors(sentry_init, capture_events):
class MyDivisionError(ZeroDivisionError):
pass
def raise_it(exc_info):
reraise(*exc_info)
sentry_init(ignore_errors=[ZeroDivisionError], transport=_TestTransport())
Hub.current._capture_internal_exception = raise_it
def e(exc):
try:
raise exc
except Exception:
capture_exception()
e(ZeroDivisionError())
e(MyDivisionError())
pytest.raises(EventCaptured, lambda: e(ValueError()))
def test_with_locals_enabled(sentry_init, capture_events):
sentry_init(with_locals=True)
events = capture_events()
try:
1 / 0
except Exception:
capture_exception()
(event,) = events
assert all(
frame["vars"]
for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
)
def test_with_locals_disabled(sentry_init, capture_events):
sentry_init(with_locals=False)
events = capture_events()
try:
1 / 0
except Exception:
capture_exception()
(event,) = events
assert all(
"vars" not in frame
for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
)
@pytest.mark.parametrize("integrations", [[], [ExecutingIntegration()]])
def test_function_names(sentry_init, capture_events, integrations):
sentry_init(integrations=integrations)
events = capture_events()
def foo():
try:
bar()
except Exception:
capture_exception()
def bar():
1 / 0
foo()
(event,) = events
(thread,) = event["exception"]["values"]
functions = [x["function"] for x in thread["stacktrace"]["frames"]]
if integrations:
assert functions == [
"test_function_names..foo",
"test_function_names..bar",
]
else:
assert functions == ["foo", "bar"]
def test_attach_stacktrace_enabled(sentry_init, capture_events):
sentry_init(attach_stacktrace=True)
events = capture_events()
def foo():
bar()
def bar():
capture_message("HI")
foo()
(event,) = events
(thread,) = event["threads"]["values"]
functions = [x["function"] for x in thread["stacktrace"]["frames"]]
assert functions[-2:] == ["foo", "bar"]
def test_attach_stacktrace_enabled_no_locals(sentry_init, capture_events):
sentry_init(attach_stacktrace=True, with_locals=False)
events = capture_events()
def foo():
bar()
def bar():
capture_message("HI")
foo()
(event,) = events
(thread,) = event["threads"]["values"]
local_vars = [x.get("vars") for x in thread["stacktrace"]["frames"]]
assert local_vars[-2:] == [None, None]
def test_attach_stacktrace_in_app(sentry_init, capture_events):
sentry_init(attach_stacktrace=True, in_app_exclude=["_pytest"])
events = capture_events()
capture_message("hi")
(event,) = events
(thread,) = event["threads"]["values"]
frames = thread["stacktrace"]["frames"]
pytest_frames = [f for f in frames if f["module"].startswith("_pytest")]
assert pytest_frames
assert all(f["in_app"] is False for f in pytest_frames)
assert any(f["in_app"] for f in frames)
def test_attach_stacktrace_disabled(sentry_init, capture_events):
sentry_init(attach_stacktrace=False)
events = capture_events()
capture_message("HI")
(event,) = events
assert "threads" not in event
def test_capture_event_works(sentry_init):
sentry_init(transport=_TestTransport())
pytest.raises(EventCaptured, lambda: capture_event({}))
pytest.raises(EventCaptured, lambda: capture_event({}))
@pytest.mark.parametrize("num_messages", [10, 20])
def test_atexit(tmpdir, monkeypatch, num_messages):
app = tmpdir.join("app.py")
app.write(
dedent(
"""
import time
from sentry_sdk import init, transport, capture_message
def send_event(self, event):
time.sleep(0.1)
print(event["message"])
transport.HttpTransport._send_event = send_event
init("http://foobar@localhost/123", shutdown_timeout={num_messages})
for _ in range({num_messages}):
capture_message("HI")
""".format(
num_messages=num_messages
)
)
)
start = time.time()
output = subprocess.check_output([sys.executable, str(app)])
end = time.time()
# Each message takes at least 0.1 seconds to process
assert int(end - start) >= num_messages / 10
assert output.count(b"HI") == num_messages
def test_configure_scope_available(sentry_init, request, monkeypatch):
# Test that scope is configured if client is configured
sentry_init()
with configure_scope() as scope:
assert scope is Hub.current.scope
scope.set_tag("foo", "bar")
calls = []
def callback(scope):
calls.append(scope)
scope.set_tag("foo", "bar")
assert configure_scope(callback) is None
assert len(calls) == 1
assert calls[0] is Hub.current.scope
@pytest.mark.tests_internal_exceptions
def test_client_debug_option_enabled(sentry_init, caplog):
sentry_init(debug=True)
Hub.current._capture_internal_exception((ValueError, ValueError("OK"), None))
assert "OK" in caplog.text
@pytest.mark.tests_internal_exceptions
@pytest.mark.parametrize("with_client", (True, False))
def test_client_debug_option_disabled(with_client, sentry_init, caplog):
if with_client:
sentry_init()
Hub.current._capture_internal_exception((ValueError, ValueError("OK"), None))
assert "OK" not in caplog.text
def test_scope_initialized_before_client(sentry_init, capture_events):
"""
This is a consequence of how configure_scope() works. We must
make `configure_scope()` a noop if no client is configured. Even
if the user later configures a client: We don't know that.
"""
with configure_scope() as scope:
scope.set_tag("foo", 42)
sentry_init()
events = capture_events()
capture_message("hi")
(event,) = events
assert "tags" not in event
def test_weird_chars(sentry_init, capture_events):
sentry_init()
events = capture_events()
capture_message(u"föö".encode("latin1"))
(event,) = events
assert json.loads(json.dumps(event)) == event
def test_nan(sentry_init, capture_events):
sentry_init()
events = capture_events()
try:
# should_repr_strings=False
set_tag("mynan", float("nan"))
# should_repr_strings=True
nan = float("nan") # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
frames = event["exception"]["values"][0]["stacktrace"]["frames"]
(frame,) = frames
assert frame["vars"]["nan"] == "nan"
assert event["tags"]["mynan"] == "nan"
def test_cyclic_frame_vars(sentry_init, capture_events):
sentry_init()
events = capture_events()
try:
a = {}
a["a"] = a
1 / 0
except Exception:
capture_exception()
(event,) = events
assert event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"] == {
"a": ""
}
def test_cyclic_data(sentry_init, capture_events):
sentry_init()
events = capture_events()
with configure_scope() as scope:
data = {}
data["is_cyclic"] = data
other_data = ""
data["not_cyclic"] = other_data
data["not_cyclic2"] = other_data
scope.set_extra("foo", data)
capture_message("hi")
(event,) = events
data = event["extra"]["foo"]
assert data == {"not_cyclic2": "", "not_cyclic": "", "is_cyclic": ""}
def test_databag_depth_stripping(sentry_init, capture_events, benchmark):
sentry_init()
events = capture_events()
value = ["a"]
for _ in range(100000):
value = [value]
@benchmark
def inner():
del events[:]
try:
a = value # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
assert len(json.dumps(event)) < 10000
def test_databag_string_stripping(sentry_init, capture_events, benchmark):
sentry_init()
events = capture_events()
@benchmark
def inner():
del events[:]
try:
a = "A" * 1000000 # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
assert len(json.dumps(event)) < 10000
def test_databag_breadth_stripping(sentry_init, capture_events, benchmark):
sentry_init()
events = capture_events()
@benchmark
def inner():
del events[:]
try:
a = ["a"] * 1000000 # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
assert (
len(event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"])
== MAX_DATABAG_BREADTH
)
assert len(json.dumps(event)) < 10000
@pytest.mark.skipif(not HAS_CHAINED_EXCEPTIONS, reason="Only works on 3.3+")
def test_chained_exceptions(sentry_init, capture_events):
sentry_init()
events = capture_events()
try:
try:
raise ValueError()
except Exception:
1 / 0
except Exception:
capture_exception()
(event,) = events
e1, e2 = event["exception"]["values"]
# This is the order all other SDKs send chained exceptions in. Including
# Raven-Python.
assert e1["type"] == "ValueError"
assert e2["type"] == "ZeroDivisionError"
@pytest.mark.tests_internal_exceptions
def test_broken_mapping(sentry_init, capture_events):
sentry_init()
events = capture_events()
class C(Mapping):
def broken(self, *args, **kwargs):
raise Exception("broken")
__getitem__ = broken
__setitem__ = broken
__delitem__ = broken
__iter__ = broken
__len__ = broken
def __repr__(self):
return "broken"
try:
a = C() # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
assert (
event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]
== ""
)
def test_mapping_sends_exception(sentry_init, capture_events):
sentry_init()
events = capture_events()
class C(Mapping):
def __iter__(self):
try:
1 / 0
except ZeroDivisionError:
capture_exception()
yield "hi"
def __len__(self):
"""List length"""
return 1
def __getitem__(self, ii):
"""Get a list item"""
if ii == "hi":
return "hi"
raise KeyError()
try:
a = C() # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
assert event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"] == {
"hi": "'hi'"
}
def test_object_sends_exception(sentry_init, capture_events):
sentry_init()
events = capture_events()
class C(object):
def __repr__(self):
try:
1 / 0
except ZeroDivisionError:
capture_exception()
return "hi, i am a repr"
try:
a = C() # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
assert (
event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]
== "hi, i am a repr"
)
def test_errno_errors(sentry_init, capture_events):
sentry_init()
events = capture_events()
class Foo(Exception):
errno = 69
capture_exception(Foo())
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["mechanism"]["meta"]["errno"]["number"] == 69
def test_non_string_variables(sentry_init, capture_events):
"""There is some extremely terrible code in the wild that
inserts non-strings as variable names into `locals()`."""
sentry_init()
events = capture_events()
try:
locals()[42] = True
1 / 0
except ZeroDivisionError:
capture_exception()
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
(frame,) = exception["stacktrace"]["frames"]
assert frame["vars"]["42"] == "True"
def test_dict_changed_during_iteration(sentry_init, capture_events):
"""
Some versions of Bottle modify the WSGI environment inside of this __repr__
impl: https://github.com/bottlepy/bottle/blob/0.12.16/bottle.py#L1386
See https://github.com/getsentry/sentry-python/pull/298 for discussion
"""
sentry_init(send_default_pii=True)
events = capture_events()
class TooSmartClass(object):
def __init__(self, environ):
self.environ = environ
def __repr__(self):
if "my_representation" in self.environ:
return self.environ["my_representation"]
self.environ["my_representation"] = ""
return self.environ["my_representation"]
try:
environ = {}
environ["a"] = TooSmartClass(environ)
1 / 0
except ZeroDivisionError:
capture_exception()
(event,) = events
(exception,) = event["exception"]["values"]
(frame,) = exception["stacktrace"]["frames"]
assert frame["vars"]["environ"] == {"a": ""}
@pytest.mark.parametrize(
"dsn",
[
"http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
u"http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
],
)
def test_init_string_types(dsn, sentry_init):
# Allow unicode strings on Python 3 and both on Python 2 (due to
# unicode_literals)
#
# Supporting bytes on Python 3 is not really wrong but probably would be
# extra code
sentry_init(dsn)
assert (
Hub.current.client.dsn
== "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2"
)
def test_envelope_types():
"""
Tests for calling the right transport method (capture_event vs
capture_envelope) from the SDK client for different data types.
"""
envelopes = []
events = []
class CustomTransport(Transport):
def capture_envelope(self, envelope):
envelopes.append(envelope)
def capture_event(self, event):
events.append(event)
with Hub(Client(traces_sample_rate=1.0, transport=CustomTransport())):
event_id = capture_message("hello")
# Assert error events get passed in via capture_event
assert not envelopes
event = events.pop()
assert event["event_id"] == event_id
assert "type" not in event
with start_transaction(name="foo"):
pass
# Assert transactions get passed in via capture_envelope
assert not events
envelope = envelopes.pop()
(item,) = envelope.items
assert item.data_category == "transaction"
assert item.headers.get("type") == "transaction"
assert not envelopes
assert not events
@pytest.mark.parametrize(
"sdk_options, expected_breadcrumbs",
[({}, DEFAULT_MAX_BREADCRUMBS), ({"max_breadcrumbs": 50}, 50)],
)
def test_max_breadcrumbs_option(
sentry_init, capture_events, sdk_options, expected_breadcrumbs
):
sentry_init(sdk_options)
events = capture_events()
for _ in range(1231):
add_breadcrumb({"type": "sourdough"})
capture_message("dogs are great")
assert len(events[0]["breadcrumbs"]["values"]) == expected_breadcrumbs
sentry-python-1.4.3/tests/test_conftest.py 0000664 0000000 0000000 00000006534 14125057761 0021002 0 ustar 00root root 0000000 0000000 import pytest
@pytest.mark.parametrize(
"test_string, expected_result",
[
# type matches
("dogs are great!", True), # full containment - beginning
("go, dogs, go!", True), # full containment - middle
("I like dogs", True), # full containment - end
("dogs", True), # equality
("", False), # reverse containment
("dog", False), # reverse containment
("good dog!", False), # partial overlap
("cats", False), # no overlap
# type mismatches
(1231, False),
(11.21, False),
([], False),
({}, False),
(True, False),
],
)
def test_string_containing(
test_string, expected_result, StringContaining # noqa: N803
):
assert (test_string == StringContaining("dogs")) is expected_result
@pytest.mark.parametrize(
"test_dict, expected_result",
[
# type matches
({"dogs": "yes", "cats": "maybe", "spiders": "nope"}, True), # full containment
({"dogs": "yes", "cats": "maybe"}, True), # equality
({}, False), # reverse containment
({"dogs": "yes"}, False), # reverse containment
({"dogs": "yes", "birds": "only outside"}, False), # partial overlap
({"coyotes": "from afar"}, False), # no overlap
# type mismatches
('{"dogs": "yes", "cats": "maybe"}', False),
(1231, False),
(11.21, False),
([], False),
(True, False),
],
)
def test_dictionary_containing(
test_dict, expected_result, DictionaryContaining # noqa: N803
):
assert (
test_dict == DictionaryContaining({"dogs": "yes", "cats": "maybe"})
) is expected_result
class Animal(object): # noqa: B903
def __init__(self, name=None, age=None, description=None):
self.name = name
self.age = age
self.description = description
class Dog(Animal):
pass
class Cat(Animal):
pass
@pytest.mark.parametrize(
"test_obj, type_and_attrs_result, type_only_result, attrs_only_result",
[
# type matches
(Dog("Maisey", 7, "silly"), True, True, True), # full attr containment
(Dog("Maisey", 7), True, True, True), # type and attr equality
(Dog(), False, True, False), # reverse attr containment
(Dog("Maisey"), False, True, False), # reverse attr containment
(Dog("Charlie", 7, "goofy"), False, True, False), # partial attr overlap
(Dog("Bodhi", 6, "floppy"), False, True, False), # no attr overlap
# type mismatches
(Cat("Maisey", 7), False, False, True), # attr equality
(Cat("Piper", 1, "doglike"), False, False, False),
("Good girl, Maisey", False, False, False),
({"name": "Maisey", "age": 7}, False, False, False),
(1231, False, False, False),
(11.21, False, False, False),
([], False, False, False),
(True, False, False, False),
],
)
def test_object_described_by(
test_obj,
type_and_attrs_result,
type_only_result,
attrs_only_result,
ObjectDescribedBy, # noqa: N803
):
assert (
test_obj == ObjectDescribedBy(type=Dog, attrs={"name": "Maisey", "age": 7})
) is type_and_attrs_result
assert (test_obj == ObjectDescribedBy(type=Dog)) is type_only_result
assert (
test_obj == ObjectDescribedBy(attrs={"name": "Maisey", "age": 7})
) is attrs_only_result
sentry-python-1.4.3/tests/test_envelope.py 0000664 0000000 0000000 00000010424 14125057761 0020763 0 ustar 00root root 0000000 0000000 from sentry_sdk.envelope import Envelope
from sentry_sdk.session import Session
from sentry_sdk import capture_event
from sentry_sdk.tracing_utils import compute_tracestate_value
import sentry_sdk.client
import pytest
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
def generate_transaction_item():
return {
"event_id": "15210411201320122115110420122013",
"type": "transaction",
"transaction": "/interactions/other-dogs/new-dog",
"start_timestamp": 1353568872.11122131,
"timestamp": 1356942672.09040815,
"contexts": {
"trace": {
"trace_id": "12312012123120121231201212312012",
"span_id": "0415201309082013",
"parent_span_id": None,
"description": "",
"op": "greeting.sniff",
"tracestate": compute_tracestate_value(
{
"trace_id": "12312012123120121231201212312012",
"environment": "dogpark",
"release": "off.leash.park",
"public_key": "dogsarebadatkeepingsecrets",
"user": {"id": 12312013, "segment": "bigs"},
"transaction": "/interactions/other-dogs/new-dog",
}
),
}
},
"spans": [
{
"description": "",
"op": "greeting.sniff",
"parent_span_id": None,
"span_id": "0415201309082013",
"start_timestamp": 1353568872.11122131,
"timestamp": 1356942672.09040815,
"trace_id": "12312012123120121231201212312012",
}
],
}
def test_add_and_get_basic_event():
envelope = Envelope()
expected = {"message": "Hello, World!"}
envelope.add_event(expected)
assert envelope.get_event() == {"message": "Hello, World!"}
def test_add_and_get_transaction_event():
envelope = Envelope()
transaction_item = generate_transaction_item()
transaction_item.update({"event_id": "a" * 32})
envelope.add_transaction(transaction_item)
# typically it should not be possible to be able to add a second transaction;
# but we do it anyways
another_transaction_item = generate_transaction_item()
envelope.add_transaction(another_transaction_item)
# should only fetch the first inserted transaction event
assert envelope.get_transaction_event() == transaction_item
def test_add_and_get_session():
envelope = Envelope()
expected = Session()
envelope.add_session(expected)
for item in envelope:
if item.type == "session":
assert item.payload.json == expected.to_json()
# TODO (kmclb) remove this parameterization once tracestate is a real feature
@pytest.mark.parametrize("tracestate_enabled", [True, False])
def test_envelope_headers(
sentry_init, capture_envelopes, monkeypatch, tracestate_enabled
):
monkeypatch.setattr(
sentry_sdk.client,
"format_timestamp",
lambda x: "2012-11-21T12:31:12.415908Z",
)
monkeypatch.setattr(
sentry_sdk.client,
"has_tracestate_enabled",
mock.Mock(return_value=tracestate_enabled),
)
sentry_init(
dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
)
envelopes = capture_envelopes()
capture_event(generate_transaction_item())
assert len(envelopes) == 1
if tracestate_enabled:
assert envelopes[0].headers == {
"event_id": "15210411201320122115110420122013",
"sent_at": "2012-11-21T12:31:12.415908Z",
"trace": {
"trace_id": "12312012123120121231201212312012",
"environment": "dogpark",
"release": "off.leash.park",
"public_key": "dogsarebadatkeepingsecrets",
"user": {"id": 12312013, "segment": "bigs"},
"transaction": "/interactions/other-dogs/new-dog",
},
}
else:
assert envelopes[0].headers == {
"event_id": "15210411201320122115110420122013",
"sent_at": "2012-11-21T12:31:12.415908Z",
}
sentry-python-1.4.3/tests/test_scope.py 0000664 0000000 0000000 00000002771 14125057761 0020265 0 ustar 00root root 0000000 0000000 import copy
from sentry_sdk import capture_exception
from sentry_sdk.scope import Scope
def test_copying():
s1 = Scope()
s1.fingerprint = {}
s1.set_tag("foo", "bar")
s2 = copy.copy(s1)
assert "foo" in s2._tags
s1.set_tag("bam", "baz")
assert "bam" in s1._tags
assert "bam" not in s2._tags
assert s1._fingerprint is s2._fingerprint
def test_merging(sentry_init, capture_events):
sentry_init()
s = Scope()
s.set_user({"id": "42"})
events = capture_events()
capture_exception(NameError(), scope=s)
(event,) = events
assert event["user"] == {"id": "42"}
def test_common_args():
s = Scope()
s.update_from_kwargs(
user={"id": 23},
level="warning",
extras={"k": "v"},
contexts={"os": {"name": "Blafasel"}},
tags={"x": "y"},
fingerprint=["foo"],
)
s2 = Scope()
s2.set_extra("foo", "bar")
s2.set_tag("a", "b")
s2.set_context("device", {"a": "b"})
s2.update_from_scope(s)
assert s._user == {"id": 23}
assert s._level == "warning"
assert s._extras == {"k": "v"}
assert s._contexts == {"os": {"name": "Blafasel"}}
assert s._tags == {"x": "y"}
assert s._fingerprint == ["foo"]
assert s._user == s2._user
assert s._level == s2._level
assert s._fingerprint == s2._fingerprint
assert s2._extras == {"k": "v", "foo": "bar"}
assert s2._tags == {"a": "b", "x": "y"}
assert s2._contexts == {"os": {"name": "Blafasel"}, "device": {"a": "b"}}
sentry-python-1.4.3/tests/test_serializer.py 0000664 0000000 0000000 00000003565 14125057761 0021327 0 ustar 00root root 0000000 0000000 import sys
import pytest
from sentry_sdk.serializer import serialize
try:
from hypothesis import given
import hypothesis.strategies as st
except ImportError:
pass
else:
def test_bytes_serialization_decode_many(message_normalizer):
@given(binary=st.binary(min_size=1))
def inner(binary):
result = message_normalizer(binary, should_repr_strings=False)
assert result == binary.decode("utf-8", "replace")
inner()
def test_bytes_serialization_repr_many(message_normalizer):
@given(binary=st.binary(min_size=1))
def inner(binary):
result = message_normalizer(binary, should_repr_strings=True)
assert result == repr(binary)
inner()
@pytest.fixture
def message_normalizer(validate_event_schema):
def inner(message, **kwargs):
event = serialize({"logentry": {"message": message}}, **kwargs)
validate_event_schema(event)
return event["logentry"]["message"]
return inner
@pytest.fixture
def extra_normalizer(validate_event_schema):
def inner(message, **kwargs):
event = serialize({"extra": {"foo": message}}, **kwargs)
validate_event_schema(event)
return event["extra"]["foo"]
return inner
def test_bytes_serialization_decode(message_normalizer):
binary = b"abc123\x80\xf0\x9f\x8d\x95"
result = message_normalizer(binary, should_repr_strings=False)
assert result == u"abc123\ufffd\U0001f355"
@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
def test_bytes_serialization_repr(message_normalizer):
binary = b"abc123\x80\xf0\x9f\x8d\x95"
result = message_normalizer(binary, should_repr_strings=True)
assert result == r"b'abc123\x80\xf0\x9f\x8d\x95'"
def test_serialize_sets(extra_normalizer):
result = extra_normalizer({1, 2, 3})
assert result == [1, 2, 3]
sentry-python-1.4.3/tests/test_sessions.py 0000664 0000000 0000000 00000006267 14125057761 0021026 0 ustar 00root root 0000000 0000000 import sentry_sdk
from sentry_sdk import Hub
from sentry_sdk.sessions import auto_session_tracking
def sorted_aggregates(item):
aggregates = item["aggregates"]
aggregates.sort(key=lambda item: (item["started"], item.get("did", "")))
return aggregates
def test_basic(sentry_init, capture_envelopes):
sentry_init(release="fun-release", environment="not-fun-env")
envelopes = capture_envelopes()
hub = Hub.current
hub.start_session()
try:
with hub.configure_scope() as scope:
scope.set_user({"id": "42"})
raise Exception("all is wrong")
except Exception:
hub.capture_exception()
hub.end_session()
hub.flush()
assert len(envelopes) == 2
assert envelopes[0].get_event() is not None
sess = envelopes[1]
assert len(sess.items) == 1
sess_event = sess.items[0].payload.json
assert sess_event["attrs"] == {
"release": "fun-release",
"environment": "not-fun-env",
}
assert sess_event["did"] == "42"
assert sess_event["init"]
assert sess_event["status"] == "exited"
assert sess_event["errors"] == 1
def test_aggregates(sentry_init, capture_envelopes):
sentry_init(
release="fun-release",
environment="not-fun-env",
)
envelopes = capture_envelopes()
hub = Hub.current
with auto_session_tracking(session_mode="request"):
with sentry_sdk.push_scope():
try:
with sentry_sdk.configure_scope() as scope:
scope.set_user({"id": "42"})
raise Exception("all is wrong")
except Exception:
sentry_sdk.capture_exception()
with auto_session_tracking(session_mode="request"):
pass
hub.start_session(session_mode="request")
hub.end_session()
sentry_sdk.flush()
assert len(envelopes) == 2
assert envelopes[0].get_event() is not None
sess = envelopes[1]
assert len(sess.items) == 1
sess_event = sess.items[0].payload.json
assert sess_event["attrs"] == {
"release": "fun-release",
"environment": "not-fun-env",
}
aggregates = sorted_aggregates(sess_event)
assert len(aggregates) == 1
assert aggregates[0]["exited"] == 2
assert aggregates[0]["errored"] == 1
def test_aggregates_explicitly_disabled_session_tracking_request_mode(
sentry_init, capture_envelopes
):
sentry_init(
release="fun-release", environment="not-fun-env", auto_session_tracking=False
)
envelopes = capture_envelopes()
hub = Hub.current
with auto_session_tracking(session_mode="request"):
with sentry_sdk.push_scope():
try:
raise Exception("all is wrong")
except Exception:
sentry_sdk.capture_exception()
with auto_session_tracking(session_mode="request"):
pass
hub.start_session(session_mode="request")
hub.end_session()
sentry_sdk.flush()
sess = envelopes[1]
assert len(sess.items) == 1
sess_event = sess.items[0].payload.json
aggregates = sorted_aggregates(sess_event)
assert len(aggregates) == 1
assert aggregates[0]["exited"] == 1
assert "errored" not in aggregates[0]
sentry-python-1.4.3/tests/test_transport.py 0000664 0000000 0000000 00000024541 14125057761 0021207 0 ustar 00root root 0000000 0000000 # coding: utf-8
import logging
import pickle
import gzip
import io
from datetime import datetime, timedelta
import pytest
from collections import namedtuple
from werkzeug.wrappers import Request, Response
from pytest_localserver.http import WSGIServer
from sentry_sdk import Hub, Client, add_breadcrumb, capture_message, Scope
from sentry_sdk.transport import _parse_rate_limits
from sentry_sdk.envelope import Envelope, parse_json
from sentry_sdk.integrations.logging import LoggingIntegration
CapturedData = namedtuple("CapturedData", ["path", "event", "envelope"])
class CapturingServer(WSGIServer):
def __init__(self, host="127.0.0.1", port=0, ssl_context=None):
WSGIServer.__init__(self, host, port, self, ssl_context=ssl_context)
self.code = 204
self.headers = {}
self.captured = []
def respond_with(self, code=200, headers=None):
self.code = code
if headers:
self.headers = headers
def clear_captured(self):
del self.captured[:]
def __call__(self, environ, start_response):
"""
This is the WSGI application.
"""
request = Request(environ)
event = envelope = None
if request.mimetype == "application/json":
event = parse_json(gzip.GzipFile(fileobj=io.BytesIO(request.data)).read())
else:
envelope = Envelope.deserialize_from(
gzip.GzipFile(fileobj=io.BytesIO(request.data))
)
self.captured.append(
CapturedData(path=request.path, event=event, envelope=envelope)
)
response = Response(status=self.code)
response.headers.extend(self.headers)
return response(environ, start_response)
@pytest.fixture
def capturing_server(request):
server = CapturingServer()
server.start()
request.addfinalizer(server.stop)
return server
@pytest.fixture
def make_client(request, capturing_server):
def inner(**kwargs):
return Client(
"http://foobar@{}/132".format(capturing_server.url[len("http://") :]),
**kwargs
)
return inner
@pytest.mark.forked
@pytest.mark.parametrize("debug", (True, False))
@pytest.mark.parametrize("client_flush_method", ["close", "flush"])
@pytest.mark.parametrize("use_pickle", (True, False))
def test_transport_works(
capturing_server,
request,
capsys,
caplog,
debug,
make_client,
client_flush_method,
use_pickle,
maybe_monkeypatched_threading,
):
caplog.set_level(logging.DEBUG)
client = make_client(debug=debug)
if use_pickle:
client = pickle.loads(pickle.dumps(client))
Hub.current.bind_client(client)
request.addfinalizer(lambda: Hub.current.bind_client(None))
add_breadcrumb(level="info", message="i like bread", timestamp=datetime.utcnow())
capture_message("löl")
getattr(client, client_flush_method)()
out, err = capsys.readouterr()
assert not err and not out
assert capturing_server.captured
assert any("Sending event" in record.msg for record in caplog.records) == debug
def test_transport_infinite_loop(capturing_server, request, make_client):
client = make_client(
debug=True,
# Make sure we cannot create events from our own logging
integrations=[LoggingIntegration(event_level=logging.DEBUG)],
)
with Hub(client):
capture_message("hi")
client.flush()
assert len(capturing_server.captured) == 1
NOW = datetime(2014, 6, 2)
@pytest.mark.parametrize(
"input,expected",
[
# Invalid rate limits
("", {}),
("invalid", {}),
(",,,", {}),
(
"42::organization, invalid, 4711:foobar;transaction;security:project",
{
None: NOW + timedelta(seconds=42),
"transaction": NOW + timedelta(seconds=4711),
"security": NOW + timedelta(seconds=4711),
# Unknown data categories
"foobar": NOW + timedelta(seconds=4711),
},
),
(
"4711:foobar;;transaction:organization",
{
"transaction": NOW + timedelta(seconds=4711),
# Unknown data categories
"foobar": NOW + timedelta(seconds=4711),
"": NOW + timedelta(seconds=4711),
},
),
],
)
def test_parse_rate_limits(input, expected):
assert dict(_parse_rate_limits(input, now=NOW)) == expected
def test_simple_rate_limits(capturing_server, capsys, caplog, make_client):
client = make_client()
capturing_server.respond_with(code=429, headers={"Retry-After": "4"})
client.capture_event({"type": "transaction"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/envelope/"
capturing_server.clear_captured()
assert set(client.transport._disabled_until) == set([None])
client.capture_event({"type": "transaction"})
client.capture_event({"type": "event"})
client.flush()
assert not capturing_server.captured
@pytest.mark.parametrize("response_code", [200, 429])
def test_data_category_limits(
capturing_server, capsys, caplog, response_code, make_client, monkeypatch
):
client = make_client(send_client_reports=False)
captured_outcomes = []
def record_lost_event(reason, data_category=None, item=None):
if data_category is None:
data_category = item.data_category
return captured_outcomes.append((reason, data_category))
monkeypatch.setattr(client.transport, "record_lost_event", record_lost_event)
capturing_server.respond_with(
code=response_code,
headers={"X-Sentry-Rate-Limits": "4711:transaction:organization"},
)
client.capture_event({"type": "transaction"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/envelope/"
capturing_server.clear_captured()
assert set(client.transport._disabled_until) == set(["transaction"])
client.capture_event({"type": "transaction"})
client.capture_event({"type": "transaction"})
client.flush()
assert not capturing_server.captured
client.capture_event({"type": "event"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/store/"
assert captured_outcomes == [
("ratelimit_backoff", "transaction"),
("ratelimit_backoff", "transaction"),
]
@pytest.mark.parametrize("response_code", [200, 429])
def test_data_category_limits_reporting(
capturing_server, capsys, caplog, response_code, make_client, monkeypatch
):
client = make_client(send_client_reports=True)
capturing_server.respond_with(
code=response_code,
headers={
"X-Sentry-Rate-Limits": "4711:transaction:organization, 4711:attachment:organization"
},
)
outcomes_enabled = False
real_fetch = client.transport._fetch_pending_client_report
def intercepting_fetch(*args, **kwargs):
if outcomes_enabled:
return real_fetch(*args, **kwargs)
monkeypatch.setattr(
client.transport, "_fetch_pending_client_report", intercepting_fetch
)
# get rid of threading making things hard to track
monkeypatch.setattr(client.transport._worker, "submit", lambda x: x() or True)
client.capture_event({"type": "transaction"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/envelope/"
capturing_server.clear_captured()
assert set(client.transport._disabled_until) == set(["attachment", "transaction"])
client.capture_event({"type": "transaction"})
client.capture_event({"type": "transaction"})
capturing_server.clear_captured()
# flush out the events but don't flush the client reports
client.flush()
client.transport._last_client_report_sent = 0
outcomes_enabled = True
scope = Scope()
scope.add_attachment(bytes=b"Hello World", filename="hello.txt")
client.capture_event({"type": "error"}, scope=scope)
client.flush()
# this goes out with an extra envelope because it's flushed after the last item
# that is normally in the queue. This is quite funny in a way beacuse it means
# that the envelope that caused its own over quota report (an error with an
# attachment) will include its outcome since it's pending.
assert len(capturing_server.captured) == 1
envelope = capturing_server.captured[0].envelope
assert envelope.items[0].type == "event"
assert envelope.items[1].type == "client_report"
report = parse_json(envelope.items[1].get_bytes())
assert sorted(report["discarded_events"], key=lambda x: x["quantity"]) == [
{"category": "transaction", "reason": "ratelimit_backoff", "quantity": 2},
{"category": "attachment", "reason": "ratelimit_backoff", "quantity": 11},
]
capturing_server.clear_captured()
# here we sent a normal event
client.capture_event({"type": "transaction"})
client.capture_event({"type": "error", "release": "foo"})
client.flush()
assert len(capturing_server.captured) == 2
event = capturing_server.captured[0].event
assert event["type"] == "error"
assert event["release"] == "foo"
envelope = capturing_server.captured[1].envelope
assert envelope.items[0].type == "client_report"
report = parse_json(envelope.items[0].get_bytes())
assert report["discarded_events"] == [
{"category": "transaction", "reason": "ratelimit_backoff", "quantity": 1},
]
@pytest.mark.parametrize("response_code", [200, 429])
def test_complex_limits_without_data_category(
capturing_server, capsys, caplog, response_code, make_client
):
client = make_client()
capturing_server.respond_with(
code=response_code,
headers={"X-Sentry-Rate-Limits": "4711::organization"},
)
client.capture_event({"type": "transaction"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/envelope/"
capturing_server.clear_captured()
assert set(client.transport._disabled_until) == set([None])
client.capture_event({"type": "transaction"})
client.capture_event({"type": "transaction"})
client.capture_event({"type": "event"})
client.flush()
assert len(capturing_server.captured) == 0
sentry-python-1.4.3/tests/tracing/ 0000775 0000000 0000000 00000000000 14125057761 0017163 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/tracing/test_deprecated.py 0000664 0000000 0000000 00000001077 14125057761 0022701 0 ustar 00root root 0000000 0000000 from sentry_sdk import start_span
from sentry_sdk.tracing import Span
def test_start_span_to_start_transaction(sentry_init, capture_events):
# XXX: this only exists for backwards compatibility with code before
# Transaction / start_transaction were introduced.
sentry_init(traces_sample_rate=1.0)
events = capture_events()
with start_span(transaction="/1/"):
pass
with start_span(Span(transaction="/2/")):
pass
assert len(events) == 2
assert events[0]["transaction"] == "/1/"
assert events[1]["transaction"] == "/2/"
sentry-python-1.4.3/tests/tracing/test_http_headers.py 0000664 0000000 0000000 00000025520 14125057761 0023252 0 ustar 00root root 0000000 0000000 import json
import pytest
import sentry_sdk
from sentry_sdk.tracing import Transaction, Span
from sentry_sdk.tracing_utils import (
compute_tracestate_value,
extract_sentrytrace_data,
extract_tracestate_data,
reinflate_tracestate,
)
from sentry_sdk.utils import from_base64, to_base64
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
def test_tracestate_computation(sentry_init):
sentry_init(
dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
environment="dogpark",
release="off.leash.park",
)
sentry_sdk.set_user({"id": 12312013, "segment": "bigs"})
transaction = Transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
trace_id="12312012123120121231201212312012",
)
# force lazy computation to create a value
transaction.to_tracestate()
computed_value = transaction._sentry_tracestate.replace("sentry=", "")
# we have to decode and reinflate the data because we can guarantee that the
# order of the entries in the jsonified dict will be the same here as when
# the tracestate is computed
reinflated_trace_data = json.loads(from_base64(computed_value))
assert reinflated_trace_data == {
"trace_id": "12312012123120121231201212312012",
"environment": "dogpark",
"release": "off.leash.park",
"public_key": "dogsarebadatkeepingsecrets",
"user": {"id": 12312013, "segment": "bigs"},
"transaction": "/interactions/other-dogs/new-dog",
}
def test_doesnt_add_new_tracestate_to_transaction_when_none_given(sentry_init):
sentry_init(
dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
environment="dogpark",
release="off.leash.park",
)
transaction = Transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
# sentry_tracestate=< value would be passed here >
)
assert transaction._sentry_tracestate is None
def test_adds_tracestate_to_transaction_when_to_traceparent_called(sentry_init):
sentry_init(
dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
environment="dogpark",
release="off.leash.park",
)
transaction = Transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
)
# no inherited tracestate, and none created in Transaction constructor
assert transaction._sentry_tracestate is None
transaction.to_tracestate()
assert transaction._sentry_tracestate is not None
def test_adds_tracestate_to_transaction_when_getting_trace_context(sentry_init):
sentry_init(
dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
environment="dogpark",
release="off.leash.park",
)
transaction = Transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
)
# no inherited tracestate, and none created in Transaction constructor
assert transaction._sentry_tracestate is None
transaction.get_trace_context()
assert transaction._sentry_tracestate is not None
@pytest.mark.parametrize(
"set_by", ["inheritance", "to_tracestate", "get_trace_context"]
)
def test_tracestate_is_immutable_once_set(sentry_init, monkeypatch, set_by):
monkeypatch.setattr(
sentry_sdk.tracing,
"compute_tracestate_entry",
mock.Mock(return_value="sentry=doGsaREgReaT"),
)
sentry_init(
dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
environment="dogpark",
release="off.leash.park",
)
# for each scenario, get to the point where tracestate has been set
if set_by == "inheritance":
transaction = Transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
sentry_tracestate=("sentry=doGsaREgReaT"),
)
else:
transaction = Transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
)
if set_by == "to_tracestate":
transaction.to_tracestate()
if set_by == "get_trace_context":
transaction.get_trace_context()
assert transaction._sentry_tracestate == "sentry=doGsaREgReaT"
# user data would be included in tracestate if it were recomputed at this point
sentry_sdk.set_user({"id": 12312013, "segment": "bigs"})
# value hasn't changed
assert transaction._sentry_tracestate == "sentry=doGsaREgReaT"
@pytest.mark.parametrize("sampled", [True, False, None])
def test_to_traceparent(sentry_init, sampled):
transaction = Transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
trace_id="12312012123120121231201212312012",
sampled=sampled,
)
traceparent = transaction.to_traceparent()
trace_id, parent_span_id, parent_sampled = traceparent.split("-")
assert trace_id == "12312012123120121231201212312012"
assert parent_span_id == transaction.span_id
assert parent_sampled == (
"1" if sampled is True else "0" if sampled is False else ""
)
def test_to_tracestate(sentry_init):
sentry_init(
dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
environment="dogpark",
release="off.leash.park",
)
# it correctly uses the value from the transaction itself or the span's
# containing transaction
transaction_no_third_party = Transaction(
trace_id="12312012123120121231201212312012",
sentry_tracestate="sentry=doGsaREgReaT",
)
non_orphan_span = Span()
non_orphan_span._containing_transaction = transaction_no_third_party
assert transaction_no_third_party.to_tracestate() == "sentry=doGsaREgReaT"
assert non_orphan_span.to_tracestate() == "sentry=doGsaREgReaT"
# it combines sentry and third-party values correctly
transaction_with_third_party = Transaction(
trace_id="12312012123120121231201212312012",
sentry_tracestate="sentry=doGsaREgReaT",
third_party_tracestate="maisey=silly",
)
assert (
transaction_with_third_party.to_tracestate()
== "sentry=doGsaREgReaT,maisey=silly"
)
# it computes a tracestate from scratch for orphan transactions
orphan_span = Span(
trace_id="12312012123120121231201212312012",
)
assert orphan_span._containing_transaction is None
assert orphan_span.to_tracestate() == "sentry=" + compute_tracestate_value(
{
"trace_id": "12312012123120121231201212312012",
"environment": "dogpark",
"release": "off.leash.park",
"public_key": "dogsarebadatkeepingsecrets",
}
)
@pytest.mark.parametrize("sampling_decision", [True, False])
def test_sentrytrace_extraction(sampling_decision):
sentrytrace_header = "12312012123120121231201212312012-0415201309082013-{}".format(
1 if sampling_decision is True else 0
)
assert extract_sentrytrace_data(sentrytrace_header) == {
"trace_id": "12312012123120121231201212312012",
"parent_span_id": "0415201309082013",
"parent_sampled": sampling_decision,
}
@pytest.mark.parametrize(
("incoming_header", "expected_sentry_value", "expected_third_party"),
[
# sentry only
("sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None),
# sentry only, invalid (`!` isn't a valid base64 character)
("sentry=doGsaREgReaT!", None, None),
# stuff before
("maisey=silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", "maisey=silly"),
# stuff after
("sentry=doGsaREgReaT,maisey=silly", "sentry=doGsaREgReaT", "maisey=silly"),
# stuff before and after
(
"charlie=goofy,sentry=doGsaREgReaT,maisey=silly",
"sentry=doGsaREgReaT",
"charlie=goofy,maisey=silly",
),
# multiple before
(
"charlie=goofy,maisey=silly,sentry=doGsaREgReaT",
"sentry=doGsaREgReaT",
"charlie=goofy,maisey=silly",
),
# multiple after
(
"sentry=doGsaREgReaT,charlie=goofy,maisey=silly",
"sentry=doGsaREgReaT",
"charlie=goofy,maisey=silly",
),
# multiple before and after
(
"charlie=goofy,maisey=silly,sentry=doGsaREgReaT,bodhi=floppy,cory=loyal",
"sentry=doGsaREgReaT",
"charlie=goofy,maisey=silly,bodhi=floppy,cory=loyal",
),
# only third-party data
("maisey=silly", None, "maisey=silly"),
# invalid third-party data, valid sentry data
("maisey_is_silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None),
# valid third-party data, invalid sentry data
("maisey=silly,sentry=doGsaREgReaT!", None, "maisey=silly"),
# nothing valid at all
("maisey_is_silly,sentry=doGsaREgReaT!", None, None),
],
)
def test_tracestate_extraction(
incoming_header, expected_sentry_value, expected_third_party
):
assert extract_tracestate_data(incoming_header) == {
"sentry_tracestate": expected_sentry_value,
"third_party_tracestate": expected_third_party,
}
# TODO (kmclb) remove this parameterization once tracestate is a real feature
@pytest.mark.parametrize("tracestate_enabled", [True, False])
def test_iter_headers(sentry_init, monkeypatch, tracestate_enabled):
monkeypatch.setattr(
Transaction,
"to_traceparent",
mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"),
)
monkeypatch.setattr(
Transaction,
"to_tracestate",
mock.Mock(return_value="sentry=doGsaREgReaT,charlie=goofy"),
)
monkeypatch.setattr(
sentry_sdk.tracing,
"has_tracestate_enabled",
mock.Mock(return_value=tracestate_enabled),
)
transaction = Transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
)
headers = dict(transaction.iter_headers())
assert (
headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0"
)
if tracestate_enabled:
assert "tracestate" in headers
assert headers["tracestate"] == "sentry=doGsaREgReaT,charlie=goofy"
else:
assert "tracestate" not in headers
@pytest.mark.parametrize(
"data",
[ # comes out with no trailing `=`
{"name": "Maisey", "birthday": "12/31/12"},
# comes out with one trailing `=`
{"dogs": "yes", "cats": "maybe"},
# comes out with two trailing `=`
{"name": "Charlie", "birthday": "11/21/12"},
],
)
def test_tracestate_reinflation(data):
encoded_tracestate = to_base64(json.dumps(data)).strip("=")
assert reinflate_tracestate(encoded_tracestate) == data
sentry-python-1.4.3/tests/tracing/test_integration_tests.py 0000664 0000000 0000000 00000012504 14125057761 0024343 0 ustar 00root root 0000000 0000000 import weakref
import gc
import pytest
from sentry_sdk import (
capture_message,
configure_scope,
Hub,
start_span,
start_transaction,
)
from sentry_sdk.transport import Transport
from sentry_sdk.tracing import Transaction
@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
def test_basic(sentry_init, capture_events, sample_rate):
sentry_init(traces_sample_rate=sample_rate)
events = capture_events()
with start_transaction(name="hi") as transaction:
transaction.set_status("ok")
with pytest.raises(ZeroDivisionError):
with start_span(op="foo", description="foodesc"):
1 / 0
with start_span(op="bar", description="bardesc"):
pass
if sample_rate:
assert len(events) == 1
event = events[0]
span1, span2 = event["spans"]
parent_span = event
assert span1["tags"]["status"] == "internal_error"
assert span1["op"] == "foo"
assert span1["description"] == "foodesc"
assert "status" not in span2.get("tags", {})
assert span2["op"] == "bar"
assert span2["description"] == "bardesc"
assert parent_span["transaction"] == "hi"
assert "status" not in event["tags"]
assert event["contexts"]["trace"]["status"] == "ok"
else:
assert not events
@pytest.mark.parametrize("sampled", [True, False, None])
@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate):
"""
Ensure data is actually passed along via headers, and that they are read
correctly.
"""
sentry_init(traces_sample_rate=sample_rate)
events = capture_events()
# make a parent transaction (normally this would be in a different service)
with start_transaction(
name="hi", sampled=True if sample_rate == 0 else None
) as parent_transaction:
with start_span() as old_span:
old_span.sampled = sampled
headers = dict(Hub.current.iter_trace_propagation_headers(old_span))
tracestate = parent_transaction._sentry_tracestate
# child transaction, to prove that we can read 'sentry-trace' and
# `tracestate` header data correctly
child_transaction = Transaction.continue_from_headers(headers, name="WRONG")
assert child_transaction is not None
assert child_transaction.parent_sampled == sampled
assert child_transaction.trace_id == old_span.trace_id
assert child_transaction.same_process_as_parent is False
assert child_transaction.parent_span_id == old_span.span_id
assert child_transaction.span_id != old_span.span_id
assert child_transaction._sentry_tracestate == tracestate
# add child transaction to the scope, to show that the captured message will
# be tagged with the trace id (since it happens while the transaction is
# open)
with start_transaction(child_transaction):
with configure_scope() as scope:
# change the transaction name from "WRONG" to make sure the change
# is reflected in the final data
scope.transaction = "ho"
capture_message("hello")
# in this case the child transaction won't be captured
if sampled is False or (sample_rate == 0 and sampled is None):
trace1, message = events
assert trace1["transaction"] == "hi"
else:
trace1, message, trace2 = events
assert trace1["transaction"] == "hi"
assert trace2["transaction"] == "ho"
assert (
trace1["contexts"]["trace"]["trace_id"]
== trace2["contexts"]["trace"]["trace_id"]
== child_transaction.trace_id
== message["contexts"]["trace"]["trace_id"]
)
assert message["message"] == "hello"
@pytest.mark.parametrize(
"args,expected_refcount",
[({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)],
)
def test_memory_usage(sentry_init, capture_events, args, expected_refcount):
sentry_init(**args)
references = weakref.WeakSet()
with start_transaction(name="hi"):
for i in range(100):
with start_span(op="helloworld", description="hi {}".format(i)) as span:
def foo():
pass
references.add(foo)
span.set_tag("foo", foo)
pass
del foo
del span
# required only for pypy (cpython frees immediately)
gc.collect()
assert len(references) == expected_refcount
def test_transactions_do_not_go_through_before_send(sentry_init, capture_events):
def before_send(event, hint):
raise RuntimeError("should not be called")
sentry_init(traces_sample_rate=1.0, before_send=before_send)
events = capture_events()
with start_transaction(name="/"):
pass
assert len(events) == 1
def test_start_span_after_finish(sentry_init, capture_events):
class CustomTransport(Transport):
def capture_envelope(self, envelope):
pass
def capture_event(self, event):
start_span(op="toolate", description="justdont")
pass
sentry_init(traces_sample_rate=1, transport=CustomTransport())
events = capture_events()
with start_transaction(name="hi"):
with start_span(op="bar", description="bardesc"):
pass
assert len(events) == 1
sentry-python-1.4.3/tests/tracing/test_misc.py 0000664 0000000 0000000 00000020350 14125057761 0021527 0 ustar 00root root 0000000 0000000 import pytest
import gc
import uuid
import os
import sentry_sdk
from sentry_sdk import Hub, start_span, start_transaction
from sentry_sdk.tracing import Span, Transaction
from sentry_sdk.tracing_utils import has_tracestate_enabled
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
def test_span_trimming(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3})
events = capture_events()
with start_transaction(name="hi"):
for i in range(10):
with start_span(op="foo{}".format(i)):
pass
(event,) = events
assert len(event["spans"]) == 3
span1, span2, span3 = event["spans"]
assert span1["op"] == "foo0"
assert span2["op"] == "foo1"
assert span3["op"] == "foo2"
def test_transaction_naming(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0)
events = capture_events()
# only transactions have names - spans don't
with pytest.raises(TypeError):
start_span(name="foo")
assert len(events) == 0
# default name in event if no name is passed
with start_transaction() as transaction:
pass
assert len(events) == 1
assert events[0]["transaction"] == ""
# the name can be set once the transaction's already started
with start_transaction() as transaction:
transaction.name = "name-known-after-transaction-started"
assert len(events) == 2
assert events[1]["transaction"] == "name-known-after-transaction-started"
# passing in a name works, too
with start_transaction(name="a"):
pass
assert len(events) == 3
assert events[2]["transaction"] == "a"
def test_start_transaction(sentry_init):
sentry_init(traces_sample_rate=1.0)
# you can have it start a transaction for you
result1 = start_transaction(
name="/interactions/other-dogs/new-dog", op="greeting.sniff"
)
assert isinstance(result1, Transaction)
assert result1.name == "/interactions/other-dogs/new-dog"
assert result1.op == "greeting.sniff"
# or you can pass it an already-created transaction
preexisting_transaction = Transaction(
name="/interactions/other-dogs/new-dog", op="greeting.sniff"
)
result2 = start_transaction(preexisting_transaction)
assert result2 is preexisting_transaction
def test_finds_transaction_on_scope(sentry_init):
sentry_init(traces_sample_rate=1.0)
transaction = start_transaction(name="dogpark")
scope = Hub.current.scope
# See note in Scope class re: getters and setters of the `transaction`
# property. For the moment, assigning to scope.transaction merely sets the
# transaction name, rather than putting the transaction on the scope, so we
# have to assign to _span directly.
scope._span = transaction
# Reading scope.property, however, does what you'd expect, and returns the
# transaction on the scope.
assert scope.transaction is not None
assert isinstance(scope.transaction, Transaction)
assert scope.transaction.name == "dogpark"
# If the transaction is also set as the span on the scope, it can be found
# by accessing _span, too.
assert scope._span is not None
assert isinstance(scope._span, Transaction)
assert scope._span.name == "dogpark"
def test_finds_transaction_when_descendent_span_is_on_scope(
sentry_init,
):
sentry_init(traces_sample_rate=1.0)
transaction = start_transaction(name="dogpark")
child_span = transaction.start_child(op="sniffing")
scope = Hub.current.scope
scope._span = child_span
# this is the same whether it's the transaction itself or one of its
# decedents directly attached to the scope
assert scope.transaction is not None
assert isinstance(scope.transaction, Transaction)
assert scope.transaction.name == "dogpark"
# here we see that it is in fact the span on the scope, rather than the
# transaction itself
assert scope._span is not None
assert isinstance(scope._span, Span)
assert scope._span.op == "sniffing"
def test_finds_orphan_span_on_scope(sentry_init):
# this is deprecated behavior which may be removed at some point (along with
# the start_span function)
sentry_init(traces_sample_rate=1.0)
span = start_span(op="sniffing")
scope = Hub.current.scope
scope._span = span
assert scope._span is not None
assert isinstance(scope._span, Span)
assert scope._span.op == "sniffing"
def test_finds_non_orphan_span_on_scope(sentry_init):
sentry_init(traces_sample_rate=1.0)
transaction = start_transaction(name="dogpark")
child_span = transaction.start_child(op="sniffing")
scope = Hub.current.scope
scope._span = child_span
assert scope._span is not None
assert isinstance(scope._span, Span)
assert scope._span.op == "sniffing"
def test_circular_references(monkeypatch, sentry_init, request):
# TODO: We discovered while writing this test about transaction/span
# reference cycles that there's actually also a circular reference in
# `serializer.py`, between the functions `_serialize_node` and
# `_serialize_node_impl`, both of which are defined inside of the main
# `serialize` function, and each of which calls the other one. For now, in
# order to avoid having those ref cycles give us a false positive here, we
# can mock out `serialize`. In the long run, though, we should probably fix
# that. (Whenever we do work on fixing it, it may be useful to add
#
# gc.set_debug(gc.DEBUG_LEAK)
# request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK))
#
# immediately after the initial collection below, so we can see what new
# objects the garbage collecter has to clean up once `transaction.finish` is
# called and the serializer runs.)
monkeypatch.setattr(
sentry_sdk.client,
"serialize",
mock.Mock(
return_value=None,
),
)
# In certain versions of python, in some environments (specifically, python
# 3.4 when run in GH Actions), we run into a `ctypes` bug which creates
# circular references when `uuid4()` is called, as happens when we're
# generating event ids. Mocking it with an implementation which doesn't use
# the `ctypes` function lets us avoid having false positives when garbage
# collecting. See https://bugs.python.org/issue20519.
monkeypatch.setattr(
uuid,
"uuid4",
mock.Mock(
return_value=uuid.UUID(bytes=os.urandom(16)),
),
)
gc.disable()
request.addfinalizer(gc.enable)
sentry_init(traces_sample_rate=1.0)
# Make sure that we're starting with a clean slate before we start creating
# transaction/span reference cycles
gc.collect()
dogpark_transaction = start_transaction(name="dogpark")
sniffing_span = dogpark_transaction.start_child(op="sniffing")
wagging_span = dogpark_transaction.start_child(op="wagging")
# At some point, you have to stop sniffing - there are balls to chase! - so finish
# this span while the dogpark transaction is still open
sniffing_span.finish()
# The wagging, however, continues long past the dogpark, so that span will
# NOT finish before the transaction ends. (Doing it in this order proves
# that both finished and unfinished spans get their cycles broken.)
dogpark_transaction.finish()
# Eventually you gotta sleep...
wagging_span.finish()
# assuming there are no cycles by this point, these should all be able to go
# out of scope and get their memory deallocated without the garbage
# collector having anything to do
del sniffing_span
del wagging_span
del dogpark_transaction
assert gc.collect() == 0
# TODO (kmclb) remove this test once tracestate is a real feature
@pytest.mark.parametrize("tracestate_enabled", [True, False, None])
def test_has_tracestate_enabled(sentry_init, tracestate_enabled):
experiments = (
{"propagate_tracestate": tracestate_enabled}
if tracestate_enabled is not None
else {}
)
sentry_init(_experiments=experiments)
if tracestate_enabled is True:
assert has_tracestate_enabled() is True
else:
assert has_tracestate_enabled() is False
sentry-python-1.4.3/tests/tracing/test_sampling.py 0000664 0000000 0000000 00000023066 14125057761 0022415 0 ustar 00root root 0000000 0000000 import random
import pytest
from sentry_sdk import Hub, start_span, start_transaction
from sentry_sdk.tracing import Transaction
from sentry_sdk.tracing_utils import is_valid_sample_rate
from sentry_sdk.utils import logger
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
def test_sampling_decided_only_for_transactions(sentry_init, capture_events):
sentry_init(traces_sample_rate=0.5)
with start_transaction(name="hi") as transaction:
assert transaction.sampled is not None
with start_span() as span:
assert span.sampled == transaction.sampled
with start_span() as span:
assert span.sampled is None
@pytest.mark.parametrize("sampled", [True, False])
def test_nested_transaction_sampling_override(sentry_init, sampled):
sentry_init(traces_sample_rate=1.0)
with start_transaction(name="outer", sampled=sampled) as outer_transaction:
assert outer_transaction.sampled is sampled
with start_transaction(
name="inner", sampled=(not sampled)
) as inner_transaction:
assert inner_transaction.sampled is not sampled
assert outer_transaction.sampled is sampled
def test_no_double_sampling(sentry_init, capture_events):
# Transactions should not be subject to the global/error sample rate.
# Only the traces_sample_rate should apply.
sentry_init(traces_sample_rate=1.0, sample_rate=0.0)
events = capture_events()
with start_transaction(name="/"):
pass
assert len(events) == 1
@pytest.mark.parametrize(
"rate",
[0.0, 0.1231, 1.0, True, False],
)
def test_accepts_valid_sample_rate(rate):
with mock.patch.object(logger, "warning", mock.Mock()):
result = is_valid_sample_rate(rate)
assert logger.warning.called is False
assert result is True
@pytest.mark.parametrize(
"rate",
[
"dogs are great", # wrong type
(0, 1), # wrong type
{"Maisey": "Charllie"}, # wrong type
[True, True], # wrong type
{0.2012}, # wrong type
float("NaN"), # wrong type
None, # wrong type
-1.121, # wrong value
1.231, # wrong value
],
)
def test_warns_on_invalid_sample_rate(rate, StringContaining): # noqa: N803
with mock.patch.object(logger, "warning", mock.Mock()):
result = is_valid_sample_rate(rate)
logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
assert result is False
@pytest.mark.parametrize("sampling_decision", [True, False])
def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision(
sentry_init, sampling_decision
):
sentry_init(traces_sample_rate=1.0)
with start_transaction(name="/", sampled=sampling_decision):
with start_span(op="child-span"):
with start_span(op="child-child-span"):
scope = Hub.current.scope
assert scope.span.op == "child-child-span"
assert scope.transaction.name == "/"
@pytest.mark.parametrize(
"traces_sample_rate,expected_decision",
[(0.0, False), (0.25, False), (0.75, True), (1.00, True)],
)
def test_uses_traces_sample_rate_correctly(
sentry_init,
traces_sample_rate,
expected_decision,
):
sentry_init(traces_sample_rate=traces_sample_rate)
with mock.patch.object(random, "random", return_value=0.5):
transaction = start_transaction(name="dogpark")
assert transaction.sampled is expected_decision
@pytest.mark.parametrize(
"traces_sampler_return_value,expected_decision",
[(0.0, False), (0.25, False), (0.75, True), (1.00, True)],
)
def test_uses_traces_sampler_return_value_correctly(
sentry_init,
traces_sampler_return_value,
expected_decision,
):
sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))
with mock.patch.object(random, "random", return_value=0.5):
transaction = start_transaction(name="dogpark")
assert transaction.sampled is expected_decision
@pytest.mark.parametrize("traces_sampler_return_value", [True, False])
def test_tolerates_traces_sampler_returning_a_boolean(
sentry_init, traces_sampler_return_value
):
sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))
transaction = start_transaction(name="dogpark")
assert transaction.sampled is traces_sampler_return_value
@pytest.mark.parametrize("sampling_decision", [True, False])
def test_only_captures_transaction_when_sampled_is_true(
sentry_init, sampling_decision, capture_events
):
sentry_init(traces_sampler=mock.Mock(return_value=sampling_decision))
events = capture_events()
transaction = start_transaction(name="dogpark")
transaction.finish()
assert len(events) == (1 if sampling_decision else 0)
@pytest.mark.parametrize(
"traces_sample_rate,traces_sampler_return_value", [(0, True), (1, False)]
)
def test_prefers_traces_sampler_to_traces_sample_rate(
sentry_init,
traces_sample_rate,
traces_sampler_return_value,
):
# make traces_sample_rate imply the opposite of traces_sampler, to prove
# that traces_sampler takes precedence
traces_sampler = mock.Mock(return_value=traces_sampler_return_value)
sentry_init(
traces_sample_rate=traces_sample_rate,
traces_sampler=traces_sampler,
)
transaction = start_transaction(name="dogpark")
assert traces_sampler.called is True
assert transaction.sampled is traces_sampler_return_value
@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_ignores_inherited_sample_decision_when_traces_sampler_defined(
sentry_init, parent_sampling_decision
):
# make traces_sampler pick the opposite of the inherited decision, to prove
# that traces_sampler takes precedence
traces_sampler = mock.Mock(return_value=not parent_sampling_decision)
sentry_init(traces_sampler=traces_sampler)
transaction = start_transaction(
name="dogpark", parent_sampled=parent_sampling_decision
)
assert transaction.sampled is not parent_sampling_decision
@pytest.mark.parametrize("explicit_decision", [True, False])
def test_traces_sampler_doesnt_overwrite_explicitly_passed_sampling_decision(
sentry_init, explicit_decision
):
# make traces_sampler pick the opposite of the explicit decision, to prove
# that the explicit decision takes precedence
traces_sampler = mock.Mock(return_value=not explicit_decision)
sentry_init(traces_sampler=traces_sampler)
transaction = start_transaction(name="dogpark", sampled=explicit_decision)
assert transaction.sampled is explicit_decision
@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_inherits_parent_sampling_decision_when_traces_sampler_undefined(
sentry_init, parent_sampling_decision
):
# make sure the parent sampling decision is the opposite of what
# traces_sample_rate would produce, to prove the inheritance takes
# precedence
sentry_init(traces_sample_rate=0.5)
mock_random_value = 0.25 if parent_sampling_decision is False else 0.75
with mock.patch.object(random, "random", return_value=mock_random_value):
transaction = start_transaction(
name="dogpark", parent_sampled=parent_sampling_decision
)
assert transaction.sampled is parent_sampling_decision
@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_passes_parent_sampling_decision_in_sampling_context(
sentry_init, parent_sampling_decision
):
sentry_init(traces_sample_rate=1.0)
sentry_trace_header = (
"12312012123120121231201212312012-1121201211212012-{sampled}".format(
sampled=int(parent_sampling_decision)
)
)
transaction = Transaction.continue_from_headers(
headers={"sentry-trace": sentry_trace_header}, name="dogpark"
)
spy = mock.Mock(wraps=transaction)
start_transaction(transaction=spy)
# there's only one call (so index at 0) and kwargs are always last in a call
# tuple (so index at -1)
sampling_context = spy._set_initial_sampling_decision.mock_calls[0][-1][
"sampling_context"
]
assert "parent_sampled" in sampling_context
# because we passed in a spy, attribute access requires unwrapping
assert sampling_context["parent_sampled"]._mock_wraps is parent_sampling_decision
def test_passes_custom_samling_context_from_start_transaction_to_traces_sampler(
sentry_init, DictionaryContaining # noqa: N803
):
traces_sampler = mock.Mock()
sentry_init(traces_sampler=traces_sampler)
start_transaction(custom_sampling_context={"dogs": "yes", "cats": "maybe"})
traces_sampler.assert_any_call(
DictionaryContaining({"dogs": "yes", "cats": "maybe"})
)
@pytest.mark.parametrize(
"traces_sampler_return_value",
[
"dogs are great", # wrong type
(0, 1), # wrong type
{"Maisey": "Charllie"}, # wrong type
[True, True], # wrong type
{0.2012}, # wrong type
float("NaN"), # wrong type
None, # wrong type
-1.121, # wrong value
1.231, # wrong value
],
)
def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value(
sentry_init, traces_sampler_return_value, StringContaining # noqa: N803
):
sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))
with mock.patch.object(logger, "warning", mock.Mock()):
transaction = start_transaction(name="dogpark")
logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
assert transaction.sampled is False
sentry-python-1.4.3/tests/utils/ 0000775 0000000 0000000 00000000000 14125057761 0016674 5 ustar 00root root 0000000 0000000 sentry-python-1.4.3/tests/utils/__init__.py 0000664 0000000 0000000 00000000050 14125057761 0021000 0 ustar 00root root 0000000 0000000 # Make this a module for test_abs_path.
sentry-python-1.4.3/tests/utils/test_contextvars.py 0000664 0000000 0000000 00000001432 14125057761 0022665 0 ustar 00root root 0000000 0000000 import pytest
import random
import time
@pytest.mark.forked
def test_leaks(maybe_monkeypatched_threading):
import threading
# Need to explicitly call _get_contextvars because the SDK has already
# decided upon gevent on import.
from sentry_sdk import utils
_, ContextVar = utils._get_contextvars() # noqa: N806
ts = []
var = ContextVar("test_contextvar_leaks")
success = []
def run():
value = int(random.random() * 1000)
var.set(value)
for _ in range(100):
time.sleep(0)
assert var.get(None) == value
success.append(1)
for _ in range(20):
t = threading.Thread(target=run)
t.start()
ts.append(t)
for t in ts:
t.join()
assert len(success) == 20
sentry-python-1.4.3/tests/utils/test_general.py 0000664 0000000 0000000 00000014212 14125057761 0021722 0 ustar 00root root 0000000 0000000 # coding: utf-8
import sys
import os
import pytest
from sentry_sdk.utils import (
BadDsn,
Dsn,
safe_repr,
exceptions_from_error_tuple,
filename_for_module,
handle_in_app_impl,
iter_event_stacktraces,
to_base64,
from_base64,
)
from sentry_sdk._compat import text_type, string_types
try:
from hypothesis import given
import hypothesis.strategies as st
except ImportError:
pass
else:
any_string = st.one_of(st.binary(), st.text())
@given(x=any_string)
def test_safe_repr_never_broken_for_strings(x):
r = safe_repr(x)
assert isinstance(r, text_type)
assert u"broken repr" not in r
def test_safe_repr_regressions():
assert u"лошадь" in safe_repr(u"лошадь")
@pytest.mark.xfail(
sys.version_info < (3,),
reason="Fixing this in Python 2 would break other behaviors",
)
@pytest.mark.parametrize("prefix", (u"", u"abcd", u"лошадь"))
@pytest.mark.parametrize("character", u"\x00\x07\x1b\n")
def test_safe_repr_non_printable(prefix, character):
"""Check that non-printable characters are escaped"""
string = prefix + character
assert character not in safe_repr(string)
assert character not in safe_repr(string.encode("utf-8"))
def test_abs_path():
"""Check if abs_path is actually an absolute path. This can happen either
with eval/exec like here, or when the file in the frame is relative to
__main__"""
code = compile("1/0", "test.py", "exec")
try:
exec(code, {})
except Exception:
exceptions = exceptions_from_error_tuple(sys.exc_info())
(exception,) = exceptions
frame1, frame2 = frames = exception["stacktrace"]["frames"]
for frame in frames:
assert os.path.abspath(frame["abs_path"]) == frame["abs_path"]
assert frame1["filename"] == "tests/utils/test_general.py"
assert frame2["filename"] == "test.py"
def test_filename():
x = filename_for_module
assert x("bogus", "bogus") == "bogus"
assert x("os", os.__file__) == "os.py"
import sentry_sdk.utils
assert x("sentry_sdk.utils", sentry_sdk.utils.__file__) == "sentry_sdk/utils.py"
@pytest.mark.parametrize(
"given,expected_store,expected_envelope",
[
(
"https://foobar@sentry.io/123",
"https://sentry.io/api/123/store/",
"https://sentry.io/api/123/envelope/",
),
(
"https://foobar@sentry.io/bam/123",
"https://sentry.io/bam/api/123/store/",
"https://sentry.io/bam/api/123/envelope/",
),
(
"https://foobar@sentry.io/bam/baz/123",
"https://sentry.io/bam/baz/api/123/store/",
"https://sentry.io/bam/baz/api/123/envelope/",
),
],
)
def test_parse_dsn_paths(given, expected_store, expected_envelope):
dsn = Dsn(given)
auth = dsn.to_auth()
assert auth.store_api_url == expected_store
assert auth.get_api_url("store") == expected_store
assert auth.get_api_url("envelope") == expected_envelope
@pytest.mark.parametrize(
"dsn",
[
"https://foobar@sentry.io"
"https://foobar@sentry.io/"
"https://foobar@sentry.io/asdf"
"https://foobar@sentry.io/asdf/"
"https://foobar@sentry.io/asdf/123/"
],
)
def test_parse_invalid_dsn(dsn):
with pytest.raises(BadDsn):
dsn = Dsn(dsn)
@pytest.mark.parametrize("empty", [None, []])
def test_in_app(empty):
assert (
handle_in_app_impl(
[{"module": "foo"}, {"module": "bar"}],
in_app_include=["foo"],
in_app_exclude=empty,
)
== [{"module": "foo", "in_app": True}, {"module": "bar"}]
)
assert (
handle_in_app_impl(
[{"module": "foo"}, {"module": "bar"}],
in_app_include=["foo"],
in_app_exclude=["foo"],
)
== [{"module": "foo", "in_app": True}, {"module": "bar"}]
)
assert (
handle_in_app_impl(
[{"module": "foo"}, {"module": "bar"}],
in_app_include=empty,
in_app_exclude=["foo"],
)
== [{"module": "foo", "in_app": False}, {"module": "bar", "in_app": True}]
)
def test_iter_stacktraces():
assert (
set(
iter_event_stacktraces(
{
"threads": {"values": [{"stacktrace": 1}]},
"stacktrace": 2,
"exception": {"values": [{"stacktrace": 3}]},
}
)
)
== {1, 2, 3}
)
@pytest.mark.parametrize(
("original", "base64_encoded"),
[
# ascii only
("Dogs are great!", "RG9ncyBhcmUgZ3JlYXQh"),
# emoji
(u"🐶", "8J+Qtg=="),
# non-ascii
(
u"Καλό κορίτσι, Μάιζεϊ!",
"zprOsc67z4wgzrrOv8+Bzq/PhM+DzrksIM6czqzOuc62zrXPiiE=",
),
# mix of ascii and non-ascii
(
u"Of margir hundar! Ég geri ráð fyrir að ég þurfi stærra rúm.",
"T2YgbWFyZ2lyIGh1bmRhciEgw4lnIGdlcmkgcsOhw7AgZnlyaXIgYcOwIMOpZyDDvnVyZmkgc3TDpnJyYSByw7ptLg==",
),
],
)
def test_successful_base64_conversion(original, base64_encoded):
# all unicode characters should be handled correctly
assert to_base64(original) == base64_encoded
assert from_base64(base64_encoded) == original
# "to" and "from" should be inverses
assert from_base64(to_base64(original)) == original
assert to_base64(from_base64(base64_encoded)) == base64_encoded
@pytest.mark.parametrize(
"input",
[
1231, # incorrect type
True, # incorrect type
[], # incorrect type
{}, # incorrect type
None, # incorrect type
"yayfordogs", # wrong length
"#dog", # invalid ascii character
"🐶", # non-ascii character
],
)
def test_failed_base64_conversion(input):
# conversion from base64 should fail if given input of the wrong type or
# input which isn't a valid base64 string
assert from_base64(input) is None
# any string can be converted to base64, so only type errors will cause
# failures
if type(input) not in string_types:
assert to_base64(input) is None
sentry-python-1.4.3/tests/utils/test_transaction.py 0000664 0000000 0000000 00000000776 14125057761 0022644 0 ustar 00root root 0000000 0000000 from sentry_sdk.utils import transaction_from_function
class MyClass:
def myfunc(self):
pass
def myfunc():
pass
def test_transaction_from_function():
x = transaction_from_function
assert x(MyClass) == "tests.utils.test_transaction.MyClass"
assert x(MyClass.myfunc) == "tests.utils.test_transaction.MyClass.myfunc"
assert x(myfunc) == "tests.utils.test_transaction.myfunc"
assert x(None) is None
assert x(42) is None
assert x(lambda: None).endswith("")
sentry-python-1.4.3/tox.ini 0000664 0000000 0000000 00000022565 14125057761 0015717 0 ustar 00root root 0000000 0000000 # Tox (http://codespeak.net/~hpk/tox/) is a tool for running tests
# in multiple virtualenvs. This configuration file will run the
# test suite on all supported python versions. To use it, "pip install tox"
# and then run "tox" from this directory.
[tox]
envlist =
# === Core ===
py{2.7,3.4,3.5,3.6,3.7,3.8,3.9}
pypy
# === Integrations ===
# General format is {pythonversion}-{integrationname}-{frameworkversion}
# 1 blank line between different integrations
# Each framework version should only be mentioned once. I.e:
# {py2.7,py3.7}-django-{1.11}
# {py3.7}-django-{2.2}
# instead of:
# {py2.7}-django-{1.11}
# {py2.7,py3.7}-django-{1.11,2.2}
{pypy,py2.7}-django-{1.6,1.7}
{pypy,py2.7,py3.5}-django-{1.8,1.9,1.10}
{pypy,py2.7}-django-{1.8,1.9,1.10,1.11}
{py3.5,py3.6,py3.7}-django-{2.0,2.1}
{py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1,3.2}
{pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12,1.0}
{pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-1.1
{py3.6,py3.8,py3.9}-flask-2.0
{pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-bottle-0.12
{pypy,py2.7,py3.5,py3.6,py3.7}-falcon-1.4
{pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-falcon-2.0
{py3.5,py3.6,py3.7}-sanic-{0.8,18}
{py3.6,py3.7}-sanic-19
{py3.6,py3.7,py3.8}-sanic-20
{py3.7,py3.8,py3.9}-sanic-21
# TODO: Add py3.9
{pypy,py2.7}-celery-3
{pypy,py2.7,py3.5,py3.6}-celery-{4.1,4.2}
{pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4}
{py3.6,py3.7,py3.8}-celery-5.0
py3.7-beam-{2.12,2.13}
# The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
py3.7-aws_lambda
py3.7-gcp
{pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-pyramid-{1.6,1.7,1.8,1.9,1.10}
{pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11}
{pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3}
{py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{1.4,1.5}
py3.7-aiohttp-3.5
{py3.7,py3.8,py3.9}-aiohttp-3.6
{py3.7,py3.8,py3.9}-tornado-{5,6}
{py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,5.0,5.2}
{py3.6,py3.7,py3.8,py3.9}-trytond-{5.4}
{py2.7,py3.8,py3.9}-requests
{py2.7,py3.7,py3.8,py3.9}-redis
{py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2}
py{3.7,3.8,3.9}-asgi
{py2.7,py3.7,py3.8,py3.9}-sqlalchemy-{1.2,1.3}
{py3.5,py3.6,py3.7,py3.8,py3.9}-pure_eval
{py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20}
{py2.7,py3.6,py3.7,py3.8}-boto3-{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
{py3.6,py3.7,py3.8,py3.9}-httpx-{0.16,0.17}
[testenv]
deps =
# if you change test-requirements.txt and your change is not being reflected
# in what's installed by tox (when running tox locally), try running tox
# with the -r flag
-r test-requirements.txt
django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
{py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels>2
{py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
{py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
django-{1.6,1.7}: pytest-django<3.0
django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
django-{2.2,3.0,3.1,3.2}: pytest-django>=4.0
django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0
django-1.6: Django>=1.6,<1.7
django-1.7: Django>=1.7,<1.8
django-1.8: Django>=1.8,<1.9
django-1.9: Django>=1.9,<1.10
django-1.10: Django>=1.10,<1.11
django-1.11: Django>=1.11,<1.12
django-2.0: Django>=2.0,<2.1
django-2.1: Django>=2.1,<2.2
django-2.2: Django>=2.2,<2.3
django-3.0: Django>=3.0,<3.1
django-3.1: Django>=3.1,<3.2
flask: flask-login
flask-0.10: Flask>=0.10,<0.11
flask-0.11: Flask>=0.11,<0.12
flask-0.12: Flask>=0.12,<0.13
flask-1.0: Flask>=1.0,<1.1
flask-1.1: Flask>=1.1,<1.2
flask-2.0: Flask>=2.0,<2.1
bottle-0.12: bottle>=0.12,<0.13
falcon-1.4: falcon>=1.4,<1.5
falcon-2.0: falcon>=2.0.0rc3,<3.0
sanic-0.8: sanic>=0.8,<0.9
sanic-18: sanic>=18.0,<19.0
sanic-19: sanic>=19.0,<20.0
sanic-20: sanic>=20.0,<21.0
sanic-21: sanic>=21.0,<22.0
{py3.7,py3.8,py3.9}-sanic-21: sanic_testing
{py3.5,py3.6}-sanic: aiocontextvars==0.2.1
sanic: aiohttp
py3.5-sanic: ujson<4
beam-2.12: apache-beam>=2.12.0, <2.13.0
beam-2.13: apache-beam>=2.13.0, <2.14.0
beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
celery: redis
celery-3: Celery>=3.1,<4.0
celery-4.1: Celery>=4.1,<4.2
celery-4.2: Celery>=4.2,<4.3
celery-4.3: Celery>=4.3,<4.4
# https://github.com/celery/vine/pull/29#issuecomment-689498382
celery-4.3: vine<5.0.0
# https://github.com/celery/celery/issues/6153
celery-4.4: Celery>=4.4,<4.5,!=4.4.4
celery-5.0: Celery>=5.0,<5.1
py3.5-celery: newrelic<6.0.0
{pypy,py2.7,py3.6,py3.7,py3.8,py3.9}-celery: newrelic
requests: requests>=2.0
aws_lambda: boto3
pyramid-1.6: pyramid>=1.6,<1.7
pyramid-1.7: pyramid>=1.7,<1.8
pyramid-1.8: pyramid>=1.8,<1.9
pyramid-1.9: pyramid>=1.9,<1.10
pyramid-1.10: pyramid>=1.10,<1.11
# https://github.com/jamesls/fakeredis/issues/245
rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
rq-{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0
rq-0.6: rq>=0.6,<0.7
rq-0.7: rq>=0.7,<0.8
rq-0.8: rq>=0.8,<0.9
rq-0.9: rq>=0.9,<0.10
rq-0.10: rq>=0.10,<0.11
rq-0.11: rq>=0.11,<0.12
rq-0.12: rq>=0.12,<0.13
rq-0.13: rq>=0.13,<0.14
rq-1.0: rq>=1.0,<1.1
rq-1.1: rq>=1.1,<1.2
rq-1.2: rq>=1.2,<1.3
rq-1.3: rq>=1.3,<1.4
rq-1.4: rq>=1.4,<1.5
rq-1.5: rq>=1.5,<1.6
aiohttp-3.4: aiohttp>=3.4.0,<3.5.0
aiohttp-3.5: aiohttp>=3.5.0,<3.6.0
aiohttp: pytest-aiohttp
tornado-5: tornado>=5,<6
tornado-6: tornado>=6.0a1
trytond-5.4: trytond>=5.4,<5.5
trytond-5.2: trytond>=5.2,<5.3
trytond-5.0: trytond>=5.0,<5.1
trytond-4.6: trytond>=4.6,<4.7
trytond-{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0
redis: fakeredis
rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0
rediscluster-2: redis-py-cluster>=2.0.0,<3.0.0
asgi: starlette
asgi: requests
sqlalchemy-1.2: sqlalchemy>=1.2,<1.3
sqlalchemy-1.3: sqlalchemy>=1.3,<1.4
linters: -r linter-requirements.txt
py3.8: hypothesis
pure_eval: pure_eval
chalice-1.16: chalice>=1.16.0,<1.17.0
chalice-1.17: chalice>=1.17.0,<1.18.0
chalice-1.18: chalice>=1.18.0,<1.19.0
chalice-1.19: chalice>=1.19.0,<1.20.0
chalice-1.20: chalice>=1.20.0,<1.21.0
chalice: pytest-chalice==0.0.5
boto3-1.9: boto3>=1.9,<1.10
boto3-1.10: boto3>=1.10,<1.11
boto3-1.11: boto3>=1.11,<1.12
boto3-1.12: boto3>=1.12,<1.13
boto3-1.13: boto3>=1.13,<1.14
boto3-1.14: boto3>=1.14,<1.15
boto3-1.15: boto3>=1.15,<1.16
boto3-1.16: boto3>=1.16,<1.17
httpx-0.16: httpx>=0.16,<0.17
httpx-0.17: httpx>=0.17,<0.18
setenv =
PYTHONDONTWRITEBYTECODE=1
TESTPATH=tests
beam: TESTPATH=tests/integrations/beam
django: TESTPATH=tests/integrations/django
flask: TESTPATH=tests/integrations/flask
bottle: TESTPATH=tests/integrations/bottle
falcon: TESTPATH=tests/integrations/falcon
celery: TESTPATH=tests/integrations/celery
requests: TESTPATH=tests/integrations/requests
aws_lambda: TESTPATH=tests/integrations/aws_lambda
gcp: TESTPATH=tests/integrations/gcp
sanic: TESTPATH=tests/integrations/sanic
pyramid: TESTPATH=tests/integrations/pyramid
rq: TESTPATH=tests/integrations/rq
aiohttp: TESTPATH=tests/integrations/aiohttp
tornado: TESTPATH=tests/integrations/tornado
trytond: TESTPATH=tests/integrations/trytond
redis: TESTPATH=tests/integrations/redis
rediscluster: TESTPATH=tests/integrations/rediscluster
asgi: TESTPATH=tests/integrations/asgi
sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
pure_eval: TESTPATH=tests/integrations/pure_eval
chalice: TESTPATH=tests/integrations/chalice
boto3: TESTPATH=tests/integrations/boto3
httpx: TESTPATH=tests/integrations/httpx
COVERAGE_FILE=.coverage-{envname}
passenv =
SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID
SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY
SENTRY_PYTHON_TEST_AWS_IAM_ROLE
SENTRY_PYTHON_TEST_POSTGRES_USER
SENTRY_PYTHON_TEST_POSTGRES_PASSWORD
SENTRY_PYTHON_TEST_POSTGRES_NAME
usedevelop = True
extras =
flask: flask
bottle: bottle
falcon: falcon
basepython =
py2.7: python2.7
py3.4: python3.4
py3.5: python3.5
py3.6: python3.6
py3.7: python3.7
py3.8: python3.8
py3.9: python3.9
# Python version is pinned here because flake8 actually behaves differently
# depending on which version is used. You can patch this out to point to
# some random Python 3 binary, but then you get guaranteed mismatches with
# CI. Other tools such as mypy and black have options that pin the Python
# version.
linters: python3.9
pypy: pypy
commands =
django-{1.6,1.7}: pip install pytest<4
; https://github.com/pytest-dev/pytest/issues/5532
{py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12}: pip install pytest<5
{py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2
py.test {env:TESTPATH} {posargs}
[testenv:linters]
commands =
flake8 tests examples sentry_sdk
black --check tests examples sentry_sdk
mypy examples sentry_sdk