pax_global_header00006660000000000000000000000064147666157370014540gustar00rootroot0000000000000052 comment=63b05e52b6687e7371551d57a525f090e65ec73a prometheus-fastapi-instrumentator-7.1.0/000077500000000000000000000000001476661573700204615ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/.commitlintrc.yaml000066400000000000000000000002241476661573700241250ustar00rootroot00000000000000extends: ["@commitlint/config-conventional"] rules: subject-case: - 2 - always - [sentence-case, start-case, pascal-case, upper-case] prometheus-fastapi-instrumentator-7.1.0/.editorconfig000066400000000000000000000003001476661573700231270ustar00rootroot00000000000000root = true [*] charset = utf-8 end_of_line = lf indent_size = 2 indent_style = space insert_final_newline = true trim_trailing_whitespace = true [*.py] indent_style = space indent_size = 4 prometheus-fastapi-instrumentator-7.1.0/.flake8000066400000000000000000000001351476661573700216330ustar00rootroot00000000000000[flake8] exclude = .git,.lock max-complexity = 12 max-line-length = 90 ignore=E501,W503,E231 prometheus-fastapi-instrumentator-7.1.0/.github/000077500000000000000000000000001476661573700220215ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000020421476661573700256200ustar00rootroot00000000000000 ## What does this do? > Add a brief description of what the feature or update does. ## Why do we need it? > Add a description of the problem the feature is trying to solve. ## Who is this for? > Add information on what kind of persona the feature is for. ## Linked issues > Resolves #X, Fixes #X, Addresses #X ## Reviewer notes > Add special notes for your reviewer. prometheus-fastapi-instrumentator-7.1.0/.github/dependabot.yml000066400000000000000000000003051476661573700246470ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: github-actions directory: / schedule: interval: monthly - package-ecosystem: pip directory: / schedule: interval: monthly prometheus-fastapi-instrumentator-7.1.0/.github/workflows/000077500000000000000000000000001476661573700240565ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/.github/workflows/ci.yaml000066400000000000000000000053421476661573700253410ustar00rootroot00000000000000name: CI # # Primary workflow for continues integration. # on: workflow_dispatch: pull_request: push: branches: [master, dev] schedule: # At 04:04 on Monday. - cron: 4 4 * * 1 env: poetry_version: "2.0.1" jobs: ci: name: CI strategy: fail-fast: false matrix: python-version: ["3.12", "3.9"] runs-on: ubuntu-latest steps: - name: Checkout repository uses: actions/checkout@v4 # ------------------------------------------------------------------------ # Python & Poetry. - name: Setup Python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Cache Poetry installation uses: actions/cache@v4 with: path: | ~/.local/bin/poetry ~/.local/share/pypoetry key: |- ${{ runner.os }}-poetry-installation-${{ matrix.python-version }}-${{ env.poetry_version }}-0 - name: Install Poetry run: curl -sSL https://install.python-poetry.org | python - env: POETRY_VERSION: ${{ env.poetry_version }} - name: Cache Poetry cache uses: actions/cache@v4 with: path: ~/.cache/pypoetry key: |- ${{ runner.os }}-poetry-cache-${{ matrix.python-version }}-${{ env.poetry_version }}-${{ hashFiles('poetry.lock') }}-0 restore-keys: | ${{ runner.os }}-poetry-cache-${{ matrix.python-version }}-${{ env.poetry_version }}- ${{ runner.os }}-poetry-cache-${{ matrix.python-version }}- - name: Install deps with Poetry run: poetry install --no-interaction # ------------------------------------------------------------------------ # Tests. - name: Run tests with Pytest run: | poetry run pytest --cov-report=term-missing --cov-report=xml --cov=src - name: Run multi process tests with Pytest run: | export PROMETHEUS_MULTIPROC_DIR=/tmp/pfi-tests/multiproc rm -rf $PROMETHEUS_MULTIPROC_DIR mkdir -p $PROMETHEUS_MULTIPROC_DIR poetry run pytest -k test_multiproc \ --cov-append --cov-report=term-missing --cov-report=xml --cov=src - name: Upload coverage to Codecov if: strategy.job-index == 0 uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: true - name: Check version equality run: | pattern="__version__ = \"$(poetry version --short)\"" if ! grep -q -R -F --include="__init__.py" "$pattern" src; then echo "::error::Version set with Poetry does not match __version__ variable."; exit 1 fi prometheus-fastapi-instrumentator-7.1.0/.github/workflows/codeql.yaml000066400000000000000000000012261476661573700262120ustar00rootroot00000000000000name: CodeQL # # Scans code with CodeQL on a scheduled basis. # # - https://codeql.github.com # - https://github.com/github/codeql-action # on: workflow_dispatch: schedule: # At 06:01 on day-of-month 8. - cron: 1 6 8 * * jobs: codeql: name: CodeQL strategy: fail-fast: false matrix: language: [python] runs-on: ubuntu-latest permissions: security-events: write steps: - name: Checkout repository uses: actions/checkout@v4 - name: Initialize CodeQL uses: github/codeql-action/init@v2 - name: Perform CodeQL analysis uses: github/codeql-action/analyze@v2 prometheus-fastapi-instrumentator-7.1.0/.github/workflows/lint-pr-title.yaml000066400000000000000000000006721476661573700274530ustar00rootroot00000000000000name: Lint PR Title # # Ensures that PR title matches the Conventional Commits spec. # # - https://github.com/amannn/action-semantic-pull-request # on: pull_request_target: types: - edited - opened - synchronize jobs: lint-pr-title: name: Lint PR Title runs-on: ubuntu-latest steps: - uses: amannn/action-semantic-pull-request@v5 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} prometheus-fastapi-instrumentator-7.1.0/.github/workflows/pre-commit.yaml000066400000000000000000000045721476661573700270260ustar00rootroot00000000000000name: Pre-commit # # Runs pre-commit hooks and auto commit fixes. # # - https://pre-commit.ci/lite.html # - https://github.com/pre-commit-ci/lite-action # - https://github.com/apps/pre-commit-ci-lite # # Relies on the pre-commit.ci lite GitHub app to be configured with this repo. # # Why is the non-lite pre-commit.ci not used instead? It does not support local # hooks using "system" as language which is heavily used within this project. # # Unlike the full version, lite does not perform automatic upgrades of hooks. # on: workflow_dispatch: pull_request: push: branches: [master, dev] schedule: # At 04:04 on Monday. - cron: 4 4 * * 1 env: poetry_version: "2.0.1" python_version: "3.12" jobs: pre-commit: name: Pre-commit runs-on: ubuntu-latest steps: - name: Checkout repository uses: actions/checkout@v4 # ------------------------------------------------------------------------ # Python & Poetry. - name: Setup Python uses: actions/setup-python@v5 with: python-version: ${{ env.python_version }} - name: Cache Poetry installation uses: actions/cache@v4 with: path: | ~/.local/bin/poetry ~/.local/share/pypoetry key: |- ${{ runner.os }}-poetry-installation-${{ env.python_version }}-${{ env.poetry_version }}-0 - name: Install Poetry run: curl -sSL https://install.python-poetry.org | python - env: POETRY_VERSION: ${{ env.poetry_version }} - name: Cache Poetry cache uses: actions/cache@v4 with: path: ~/.cache/pypoetry key: |- ${{ runner.os }}-poetry-cache-${{ env.python_version }}-${{ env.poetry_version }}-${{ hashFiles('poetry.lock') }}-0 restore-keys: | ${{ runner.os }}-poetry-cache-${{ env.python_version }}-${{ env.poetry_version }}- ${{ runner.os }}-poetry-cache-${{ env.python_version }}- - name: Install deps with Poetry run: poetry install --no-interaction # ------------------------------------------------------------------------ # Pre-commit. - name: Install, cache, and run pre-commit uses: pre-commit/action@v3.0.0 - uses: pre-commit-ci/lite-action@v1.0.1 if: always() with: msg: "ci(pre-commit): Apply hook auto fixes" prometheus-fastapi-instrumentator-7.1.0/.github/workflows/release.yaml000066400000000000000000000044501476661573700263650ustar00rootroot00000000000000name: Release # # Releases new version of this project. # # Make sure to prepare project for a new release (see docs for more). After # publishing package a GitHub release is drafted. Will fail if the the version # is already available on PyPI. # # Project page on PyPI: https://pypi.org/project/prometheus-fastapi-instrumentator # on: push: tags: - v*.*.* permissions: contents: write env: poetry_version: "2.0.1" python_version: "3.12" jobs: release: name: Release runs-on: ubuntu-latest steps: - name: Checkout repository uses: actions/checkout@v4 # ------------------------------------------------------------------------ # Python & Poetry. - name: Setup Python uses: actions/setup-python@v5 with: python-version: ${{ env.python_version }} - name: Cache Poetry installation uses: actions/cache@v4 with: path: | ~/.local/bin/poetry ~/.local/share/pypoetry key: |- ${{ runner.os }}-poetry-installation-${{ env.python_version }}-${{ env.poetry_version }}-0 - name: Install Poetry run: curl -sSL https://install.python-poetry.org | python - env: POETRY_VERSION: ${{ env.poetry_version }} # ------------------------------------------------------------------------ # Publishing & draft release. - name: Check version equality between Poetry and __version__ run: | pattern="__version__ = \"$(poetry version --short)\"" if ! grep -q -R -F --include="__init__.py" "$pattern" src; then echo "::error::No version equality between Poetry and __version__."; exit 1 fi - name: Check version equality between Poetry and Git tag run: | poetry_version=$(poetry version --short) git_tag=${GITHUB_REF#refs/*/} if [[ "v$poetry_version" != "$git_tag" ]]; then echo "::error::No version equality between Poetry and Git tag."; exit 1 fi - name: Publish to PyPI with Poetry run: poetry publish --build -n -u __token__ -p "$TOKEN" env: TOKEN: ${{ secrets.PYPI_TOKEN }} - name: Draft GitHub release uses: softprops/action-gh-release@v2 with: draft: true prometheus-fastapi-instrumentator-7.1.0/.github/workflows/testpypi.yaml000066400000000000000000000047131476661573700266300ustar00rootroot00000000000000name: TestPyPI # # Publishes package to TestPyPI using Poetry. # # - Can be used safely on all types of commit references. # - Will fail if the the version is already available on TestPyPI. # - No tests, sanity checks, or anything like that before release. # # Project page on TestPyPI: https://test.pypi.org/project/prometheus-fastapi-instrumentator # on: workflow_dispatch: inputs: version: description: Unique version to release to TestPyPI. type: string default: "" env: poetry_version: "2.0.1" python_version: "3.12" jobs: testpypi: name: TestPyPI runs-on: ubuntu-latest steps: - name: Checkout repository uses: actions/checkout@v4 # ------------------------------------------------------------------------ # Python & Poetry. - name: Setup Python uses: actions/setup-python@v5 with: python-version: ${{ env.python_version }} - name: Cache Poetry installation uses: actions/cache@v4 with: path: | ~/.local/bin/poetry ~/.local/share/pypoetry key: |- ${{ runner.os }}-poetry-installation-${{ env.python_version }}-${{ env.poetry_version }}-0 - name: Install Poetry run: curl -sSL https://install.python-poetry.org | python - env: POETRY_VERSION: ${{ env.poetry_version }} # ------------------------------------------------------------------------ # Release to TestPyPI. - name: Adjust package version if: ${{ inputs.version != '' }} run: poetry version -- ${{ inputs.version }} - name: Adjust __version__ variable if: ${{ inputs.version != '' }} run: | pversion="$(poetry version --short)" sed -i "/^__version__/c\__version__ = \"$pversion\"" src/*/__init__.py - name: Check version equality between Poetry and __version__ run: | pattern="__version__ = \"$(poetry version --short)\"" if ! grep -q -R -F --include="__init__.py" "$pattern" src; then echo "::error::No version equality between Poetry and __version__."; exit 1 fi - name: Configure Poetry repo for TestPyPI run: poetry config repositories.testpypi https://test.pypi.org/legacy/ - name: Publish to TestPyPI with Poetry run: poetry publish --build -n -r testpypi -u __token__ -p "$TOKEN" env: TOKEN: ${{ secrets.TESTPYPI_TOKEN }} prometheus-fastapi-instrumentator-7.1.0/.gitignore000066400000000000000000000064621476661573700224610ustar00rootroot00000000000000# ------------------------------------------------------------------------------ # Entries specific to this repository. .vscode/ tmp/ # ------------------------------------------------------------------------------ # Sourced from: https://github.com/github/gitignore/blob/main/Python.gitignore # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ cover/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder .pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv # For a library or package, you might want to ignore these files since the code is # intended to run in multiple environments; otherwise, check them in: # .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # poetry # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. # This is especially recommended for binary packages to ensure reproducibility, and is more # commonly ignored for libraries. # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control #poetry.lock # pdm # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. #pdm.lock # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it # in version control. # https://pdm.fming.dev/#use-with-ide .pdm.toml # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ # pytype static type analyzer .pytype/ # Cython debug symbols cython_debug/ # PyCharm # JetBrains specific template is maintained in a separate JetBrains.gitignore that can # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ prometheus-fastapi-instrumentator-7.1.0/.markdownlint.yaml000066400000000000000000000007031476661573700241340ustar00rootroot00000000000000# Default state for all rules. default: true # MD024/no-duplicate-heading/no-duplicate-header - Multiple headings with the same content. MD024: # Only check sibling headings. siblings_only: true # MD013/line-length: Line length. MD013: false # MD041/first-line-heading/first-line-h1: First line in a file should be a top-level heading. MD041: false # MD040/fenced-code-language Fenced code blocks should have a language specified MD040: false prometheus-fastapi-instrumentator-7.1.0/.pre-commit-config.yaml000066400000000000000000000035111476661573700247420ustar00rootroot00000000000000repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: check-ast - id: check-executables-have-shebangs - id: check-merge-conflict - id: check-shebang-scripts-are-executable - id: check-symlinks - id: end-of-file-fixer - id: fix-byte-order-marker - id: trailing-whitespace - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook rev: v9.13.0 hooks: - id: commitlint stages: [commit-msg] additional_dependencies: ["@commitlint/config-conventional"] - repo: https://github.com/pre-commit/mirrors-prettier rev: v4.0.0-alpha.8 hooks: - id: prettier types_or: [json, yaml, markdown] args: [--prose-wrap=always] - repo: https://github.com/DavidAnson/markdownlint-cli2 rev: v0.12.1 hooks: - id: markdownlint-cli2 - repo: local hooks: - id: black name: black language: system entry: poetry run black require_serial: true types: [python] - id: flake8 name: flake8 language: system entry: poetry run flake8 --config .flake8 --statistics require_serial: true types: [python] - id: isort name: isort language: system entry: poetry run isort require_serial: true types: [python] - id: mypy name: mypy language: system entry: poetry run dmypy run --timeout 3600 src require_serial: true pass_filenames: false files: ^src/.+$ exclude: ^tests/.+$ exclude: | (?x)^( poetry.lock )$ ci: autofix_commit_msg: "refactor(pre-commit): Apply hook auto fixes" autoupdate_commit_msg: "build(pre-commit): Apply hook rev updates" skip: - black - flake8 - isort - mypy prometheus-fastapi-instrumentator-7.1.0/CHANGELOG.md000066400000000000000000000667401476661573700223070ustar00rootroot00000000000000# Changelog All notable changes to this project are documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0), and adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0). ## Unreleased Nothing. ## [7.1.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v7.0.2...v7.1.0) / 2025-03-19 ### Added - Included metrics closure creators have new optional parameter `custom_labels` to set additional custom static labels. Parameter has not been added to the `Instrumentator()` constructor. Thanks to [@iocentos](https://github.com/iocentos) for requesting this in [#279](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/279) and [@martian711](https://github.com/martian711) in [#287](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/287) as well as [@Budlee](https://github.com/Budlee) in [#326](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/326) for implementing it. ## [7.0.2](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v7.0.1...v7.0.2) / 2025-01-14 ### Fixed - **Replaced incorrect license identifier in `pyproject.toml`.** Problem introduced with the migration to Poetry 2.0 in the last patch release. ## [7.0.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v7.0.0...v7.0.1) / 2025-01-14 ### Changed - **Migrated `pyproject.toml` to support PEP 621.** This comes with a migration to Poetry 2.0. Thanks to [@alexted](https://github.com/alexted) bringing this up in [#323](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/323). ## [7.0.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v6.1.0...v7.0.0) / 2023-07-15 Major release with a single breaking change: Python 3.7 is not supported anymore. Beyond that, three improvements based on various pull requests. ### Added - **Instrumentator now works without FastAPI**. This is possible because every FastAPI app is also a Starlette app (but not the other way around). Or to be more specific: FastAPI uses Starlette for things like routing and middleware this package relies on. The change is backwards compatible, even type checkers like mypy should continue working. Thanks to [@mvanderlee](https://github.com/mvanderlee) for proposing this in [#280](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/280) and implementing it in [#288](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/288). - **Middleware also records duration without streaming** in addition to the already existing total latency (i.e. the time consumed for streaming is not included in the duration value). The differentiation can be valuable as it shows the time to first byte. This mode is opt-in and can be enabled / used in several ways: The `Instrumentator()` constructor, the `metrics.default()` closure, and the `metrics.latency()` closure now come with the flag `should_exclude_streaming_duration`. The attribute `modified_duration_without_streaming` has been added to the `metrics.Info` class. Instances of `metrics.Info` are passed to instrumentation functions, where the added value can be used to set metrics. Thanks to [@dosuken123](https://github.com/dosuken123) for proposing this in [#291](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/291) and implementing it in [#290](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/290). - **Relaxed type of `get_route_name` argument to `HTTPConnection`**. This allows developers to use the `get_route_name` function for getting the name of websocket routes as well. Thanks to [@pajowu](https://github.com/pajowu) for proposing and implementing this feature in [#276](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/276). ### Removed - **BREAKING:** Dropped support for Python 3.7 which is has reached end-of-life. ## [6.1.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v6.0.0...v6.1.0) / 2023-07-15 ### Added - Added label `method` to metric `http_request_duration_seconds` from default metrics. Thanks to [@alcidesmig](https://github.com/alcidesmig) for implementing this in [#251](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/251). ## [6.0.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.11.2...v6.0.0) / 2023-03-20 Small release with a small breaking change leading to an increase of the major version according to semantic versioning. Breaking change only affects users that have custom instrumentations that access `info.response.body`, a feature introduced with [5.10.0](#5100--2023-02-26) few weeks ago. See below for more information. Ask or discuss anything quick about the release in the discussion [#239](https://github.com/trallnag/prometheus-fastapi-instrumentator/discussions/239). ### Added - **BREAKING:** Disabled passing response body to instrumentation functions. Moved behind whitelist that is empty by default. Changes a feature introduced with [5.10.0](#5100--2023-02-26). Only affects users that have custom instrumentations that access `info.response.body`. Opt-in via new parameter `body_handlers` added to instrumentator constructor. Parameter takes list of pattern strings to match handlers. For old behavior, pass argument `[r".*"]` to match all handlers: ```python instrumentator = Instrumentator(body_handlers=[r".*"]) ``` Motivation for change: Collecting body negatively impacts performance of responses with largish body. Thanks to [@bbeattie-phxlabs](@bbeattie-phxlabs) for raising this issue in [#234](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/234) and implementing it in [#233](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/233) / [#238](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/238). ## [5.11.2](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.11.1...v5.11.2) / 2023-03-19 ## Fixed - Fixed `info.response.body` in instrumentation functions being wrongfully empty if response is not streamed. Affects a feature that was introduced with release [5.10.0](#5100--2023-02-26) few weeks ago. Closed issue [#236](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/236) and implemented in pull request [#237](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/237). ## [5.11.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.11.0...v5.11.1) / 2023-03-11 ### Fixed - Improved typing hints and enabled stricter rules for MyPy. Thanks to [@tomtom103](https://github.com/tomtom103) for implementing this in [#231](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/231). ## [5.11.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.10.0...v5.11.0) / 2023-03-08 Minor release containing several fixes and a small enhancement. Fixes are related to multi process mode, a regression introduced with the previous release, and errors that started to occur with current versions of Starlette and FastAPI. Ask or discuss anything quick about the release in the discussion [#221](https://github.com/trallnag/prometheus-fastapi-instrumentator/discussions/221). ### Added - Adjusted the `add()` method to accept an arbitrary number of instrumentation functions as arguments instead of a single one. Non-breaking change. Implemented in pull request [#230](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/230). ### Fixed - Fixed multi process mode in `expose()` method that handles the `/metrics` endpoint. Due to reusing the registry assigned to the instrumentator it could lead to duplicated metrics. Now the endpoint follows recommendation from Prometheus client library documentation. Also improved multi process unit tests. Closed issue [#228](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/228) and [#227](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/227). Fixed in pull request [#229](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/229). - Fixed `NameError` and "Duplicated timeseries..." errors that started to occur with latest versions of Starlette / FastAPI in combination with multiple middlewares. Instrumentation closures are now optional and the instrumentator handles this accordingly. Thanks to [@alexted](https://github.com/alexted) and others for reporting errors. Thanks to [@frankie567](https://github.com/frankie567) for pointing out the change in Starlette. Related to pull request [#153](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/153) and issue [#214](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/214). Closed issue [#219](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/219). Done in pull request [#220](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/220). - Added missing `registry` parameter to remaining metrics functions. This enables passing custom registry to other metrics functions than default. Related to pull request [#153](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/153). Closed issue [#219](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/219). Done in pull request [#220](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/220). ## [5.10.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.9.1...v5.10.0) / 2023-02-26 First release in several months. Includes new features and fixes from various contributors. Notable changes that might have an impact on existing setups is the automatic instrumentation of mounted apps and the deprecation of the lowercase `prometheus_multiproc_dir` environment variable. Ask or discuss anything quick about the release in the discussion [#221](https://github.com/trallnag/prometheus-fastapi-instrumentator/discussions/221). ### Added - Added smart **handling of mounted apps**. Previously the URL handler logic did not handle mounted apps and always returned just the prefix in that case. Based on code from [elastic/apm-agent-python](https://github.com/elastic/apm-agent-python) licensed under the permissive BSD-3-Clause License. Thanks to [@LordGaav](https://github.com/LordGaav) for proposing this enhancement / fix and implementing it in [#208](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/208). Related to issues [#31](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/31) and [#121](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/121). - Added optional parameters `metric_namespace` and `metric_subsystem` to `instrument()` method to **configure namespace and subsystem** for all metric names. Check the [`README.md`](README.md#specify-namespace-and-subsystem) for more information. Thanks to [@phbernardes](https://github.com/phbernardes) for proposing this enhancement and implementing it in [#193](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/193). - Added **passthrough of body** to `info.response`. This enables metrics that work based on data in the body. Thanks to everyone who brought this up in [#76](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/76) and to [@HadilD](https://github.com/HadilD) for implementing it in [#203](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/203). - Allowed **passing a custom registry** to be used instead of using the default one. This would be useful in particular when testing multiple FastAPI apps (e.g. microservices) in the same tests run. Note that there are issues with the current implementation in certain corner cases. Thanks to [@tiangolo](https://github.com/tiangolo) for proposing this enhancement and implementing it in [#153](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/153). - Environment variable used by `should_respect_env_var` (default `ENABLE_METRICS`) now **accepts truthy values** like `1` and `true` and not just `True`. Thanks to [@chbndrhnns](https://github.com/chbndrhnns) for proposing this enhancement in [#27](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/27) and implementing it in [#28](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/28). - Added support for **asynchronous instrumentation functions**. The `add()` method now accepts them in addition to "normal" functions and the instrumentator middleware will await them appropriately. Thanks to [@AndreasPB](https://github.com/AndreasPB) for proposing this enhancement and implementing it in [#61](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/61). Thanks to [@Skeen](https://github.com/Skeen) for contributing to the discussion. ### Changed - Licensed part of the project under the BSD-3-Clause License. This is due to code being used from a repo licensed under BSD-3-Clause (see the "Added" section). The default ISC License and the BSD-3-Clause License are both permissive. So there should be no user impact. ### Fixed - Fixed status code in metric being "Hxx" when `http.HTTPStatus` enumeration is used in combination with grouping of status codes. Thanks to [@Leem0sh](https://github.com/Leem0sh) and others for raising the issue in [#190](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/190). Thanks to [@nikstuckenbrock](https://github.com/nikstuckenbrock) and [@blag](https://github.com/blag) for fixing it in [#192](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/192). - Fixed documentation in main README on how to use prometheus-fastapi-instrumentator with current versions of FastAPI. Related to issues [#214](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/214) and [#80](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/80). Thanks to [@alfaro28](https://github.com/alfaro28) and [@harochau](https://github.com/harochau). ### Deprecated - Deprecated environment variable `prometheus_multiproc_dir` and replaced it with `PROMETHEUS_MULTIPROC_DIR`. This matches the behavior of the Prometheus Python client library. This fixes [#89](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/89) and [#50](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/50). Thanks to all the people who brought this up. Thanks to [@michaelusner](https://github.com/michaelusner) for implementing the deprecation in [#42](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/42) / [#217](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/217). ## [5.9.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.9.0...v5.9.1) / 2022-08-23 ### Fixed - Corrected documention on how to use package. Instrumentation should happen in a function decorated with `@app.on_event("startup")` to prevent crashes on startup in certain situations. Done in [#168](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/168). Thanks to [@mdczaplicki](https://github.com/mdczaplicki) and others. ## [5.9.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.8.2...v5.9.0) / 2022-08-23 ### Added - Added `py.typed` file to package to improve typing annotations. Done in [#137](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/137). Thanks to [@mmaslowskicc](https://github.com/mmaslowskicc) for proposing and implementing this. ### Changed - Changed license from MIT to ISC, which is just like MIT but shorter. - Coding style improvements. Replaced a few for loops with list comprehensions. Defaulting an argument to `None` instead of an empty list. Done in [#155](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/155). Thanks to [@yezz123](https://github.com/yezz123). - Several improvements to the documentation. Thanks to [@jabertuhin](https://github.com/jabertuhin), [@frodrigo](https://github.com/frodrigo), and [@murphp15](https://github.com/murphp15). ### Fixed - Removed print statement polluting logs. Done in [#157](https://github.com/trallnag/prometheus-fastapi-instrumentator/pull/157). Thanks to [@nikstuckenbrock](https://github.com/nikstuckenbrock) and others. ## [5.8.2](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.8.1...v5.8.2) / 2022-06-12 ### Changed - Refactored the middleware to an ASGI implementation. Related to [#139](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/139). Thanks to [@Kludex](https://github.com/Kludex) and [@adriangb](https://github.com/adriangb) for the proposal and implementation. ## [5.8.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.8.0...v5.8.1) / 2022-05-03 ### Fixed - Fixed a regression that made the required FastAPI version too strict for no reason. Related to [#136](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/136). Thanks to [@graipher](https://github.com/graipher) for raising this issue. ## [5.8.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.7.1...v5.8.0) / 2022-05-01 ### Removed - **BREAKING:** Dropped support for Python 3.6 which is has reached end-of-life. ## [5.7.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.7.0...v5.7.1) / 2021-01-12 ### Fixed - Updated `prometheus-client` dependency version constraint `^0.8.0` that only allows versions in the range `[0.8.0, 0.9.0[`. This is not correct and leads to conflicts when you want to install the newest prometheus client library version and this package. Switched to explicit contraints to ensure this does not happen again. ## [5.7.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.6.0...v5.7.0) / 2020-12-13 ## Added - Added passthrough of Kwargs to FastAPI route that exposes metrics. ## [5.6.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.5.1...v5.6.0) / 2020-12-03 ### Added - Added parameter `tags` to method `expose()`. Passthrough to FastAPI to support tagging. Related to [#17](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/17). Thanks to [@chisaipete](https://github.com/chisaipete) for proposing this enhancement. ## [5.5.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.5.0...v5.5.1) / 2020-11-13 ### Fixed - Fixed error "Duplicate mime type charset=utf-8 on Response Header". Done by changing the way the content type header is set. Seems like when Starlette's `media_type` parameter is used to provide content type, the charset is appended again automatically even if it already is part of `Content-Type`. Thanks to [@flobaader](https://github.com/flobaader) for raising this issue in [#16](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/16). ## [5.5.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.4.1...v5.5.0) / 2020-11-01 ### Added - Added new metrics closure `requests`. Thanks to [@jpslopes](https://github.com/jpslopes) for proposing this enhancement in [#15](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/15). ### Changed - Adjusted docstrings. ## [5.4.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.4.0...v5.4.1) / 2020-10-22 ### Fixed - Fixed dependency regression introduced in 5.4.0 by pinning FastAPI dependency to `fastapi = "0.38.1, <=1.0.0"` instead of `fastapi = ">=0.38.1, <=1.0.0"`. Thanks to [@PaulFlanaganGenscape](https://github.com/PaulFlanaganGenscape) for raising this issue in [#14](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/14). ## [5.4.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.3.1...v5.4.0) / 2020-10-20 ### Added - Added new metric that monitors the number of requests in progress. Can be configured to have the labels `handler` and `method`. It can be activated with `should_instrument_requests_inprogress` and configured with `inprogress_name` and `inprogress_labels`. ## [5.3.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.3.0...v5.3.1) / 2020-09-25 ### Fixed - Fixed `expose` method in the instrumentator ignoring the `endpoint` argument and always creating the endpoint with on the `/metrics` path. Variable was missing. Thanks to [@funkybase](https://github.com/funkybase) for raising this issue in [#9](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/9). ## [5.3.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.2.3...v5.3.0) / 2020-09-09 ### Added - Added parameter `should_gzip` to `expose` method. It will check for `gzip` in the `Accepted-Encoding` header and gzip the metrics data. You can expect a reduction of around 90 % in bytes. ## [5.2.3](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.2.2...v5.2.3) / 2020-09-03 ### Changed - Improved `README.md`. ## [5.2.2](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.2.1...v5.2.2) / 2020-09-03 ### Changed - Improved `README.md`. ## [5.2.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.2.0...v5.2.1) / 2020-08-27 ### Fixed - Fixed lack of error wrapping of a runtime error is raised inside the endpoint. In addition this instrumentator assumed that `info.response` will always contain the `headers` attribute which is not the case if a runtime error is thrown. Now the metrics check if the response is `None` and that the `headers` attribute actually exists. Tests have been added as well. Thanks to [@stepf](https://github.com/stepf) for raising this issue in [#7](https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/7). ### Changed - Adjusted behavior Metrics `response_size` and `combined_size` no longer skip if content length is not found. Now the content length will default no zero bytes. ## [5.2.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.1.0...v5.2.0) / 2020-08-22 ### Added - Added parameter `should_only_respect_2xx_for_highr` to `default` metrics. Allows you to only put successful requests into the high resolution bucket. ## [5.1.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v5.0.0...v5.1.0) / 2020-08-19 ### Added - Added parameters to set namespace and subsystem to all available metrics. ## [5.0.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v4.0.0...v5.0.0) / 2020-08-18 ### Added - Added labels `handler`, `status`, `method` to `http_requests_total` in `default`. - Added label `handler` to `http_request_size_bytes`. - Added label `handler` to `http_response_size_bytes`. ### Changed - **BREAKING:** Renamed instrumentation `full` to `default`. - **BREAKING:** Renamed `http_in_bytes_total` to `http_request_size_bytes`. - **BREAKING:** Renamed `http_out_bytes_total` to `http_response_size_bytes`. - **BREAKING:** Renamed `http_highr_request_duration_seconds` to `http_request_duration_highr_seconds`. - **BREAKING:** Renamed `http_lowr_request_duration_seconds` to `http_request_duration_seconds`. - **BREAKING:** Turned `http_request_size_bytes` and `http_response_size_bytes` into summaries. ### Removed - **BREAKING:** Removed labels `method` and `status` from `http_request_duration_seconds`. ## [4.0.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v3.0.0...v4.0.0) / 2020-08-16 ### Added - Added proper documentation. ### Changed - **BREAKING:** Switched the default fast track metric to a more advanced one. - **BREAKING:** Reworked available metrics. Made them more modular while improving code structure. ## [3.0.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v2.0.1...v3.0.0) / 2020-08-15 A lot of breaking changes in this release. Prometheus FastAPI Instrumentator is now more modular than before and there are multiple different metrics one can choose from out of the box or add custom metrics that will be automatically applied to the FastAPI. If you just use the default instrumentator without setting any parameters, nothing changes. The defaults stay the same. If you use any of the paramters that were available in the Instrumentator constructor you have to check if they are still available or not. Some of them have been moved to the corresponding `metric` closure / function. ### Changed - **BREAKING:** Endpoint `/metrics` is not excluded by default anymore. - **BREAKING:** Rework instrumentator layout. ## [2.0.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v2.0.0...v2.0.1) / 2020-08-14 ### Changed - Fixed wrong var name in `README.md`. ## [2.0.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v1.3.0...v2.0.0) / 2020-08-14 ### Added - Added option to exclude optional `/metrics` endpoint from schema. ### Changed - **BREAKING:** Renamed `should_respect_env_var_existence` to `should_respect_env_var`. - **BREAKING:** If `should_respect_env_var` is `True`, the respective env var must be `true` and not just any random value. - **BREAKING:** Renamed default env var if `should_respect_env_var` from `PROMETHEUS` to `ENABLE_METRICS`. ## [1.3.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v1.2.0...v1.3.0) / 2020-08-12 ### Added - Added option `should_respect_env_var_existence`. This makes it possible to only instrument and expose your FastAPI if a given environment variable is set. Use case: A base FastAPI app that is used by multiple distinct apps. The apps only have to set the variable to be instrumented. Deactivated by default and the default env var is `PROMETHEUS`. ## [1.2.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v1.1.1...v1.2.0) / 2020-08-06 ### Added - The observed latency values can now be rounded to a certain number of decimals as an opt-in feature. This can improve bytes per sample required in storage solutions like VictoriaMetrics. ## [1.1.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v1.1.0...v1.1.1) / 2020-07-19 Nothing. Dummy release. ## [1.1.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v1.0.2...v1.1.0) / 2020-07-16 ### Changed - Renamed project from _Prometheus FastAPI Exporter_ to _Prometheus FastAPI Instrumentator_. Reasoning behind this change: Focus of this project is the instrumentation, not exposition of metrics. ## [1.0.2](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v1.0.1...v1.0.2) / 2020-07-15 ### Changed - Updated README.md ## [1.0.1](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/v1.0.0...v1.0.1) / 2020-07-15 ### Changed - Updated README.md ## [1.0.0](https://github.com/trallnag/prometheus-fastapi-instrumentator/compare/1d4421f66e0e3600e3607f353cf183096bc09304...v1.0.0) / 2020-07-15 ### Added - Explicit method to expose metrics by adding endpoint to an FastAPI app. ### Changed - **BREAKING:** Switched to semantic versioning. All older versions have been yanked. - **BREAKING:** Split instrumentation and exposition into two parts. Why? There exist many ways to expose metrics. Now this package enables the instrumentation of FastAPI without enforcing a certain method of exposition. It is still possible with the new method `expose()`. - **BREAKING:** Moved pass of FastAPI object from constructor to `instrument()` method. ### Removed - **BREAKING:** Exposition of metrics endpoint from `ìnstrument()` call. prometheus-fastapi-instrumentator-7.1.0/CONTRIBUTING.md000066400000000000000000000032551476661573700227170ustar00rootroot00000000000000# Contributing Thank you for your interest in improving this project. Your contributions are appreciated. This document is a guide to help you through the process of contributing. ## Become a contributor You can contribute in several ways. Here are some examples: - Contribute to the codebase. - Report and triage bugs. - Write documentation for users and devs. - Help others by answering questions. For more ways to contribute, check out the [Open Source Guides](https://opensource.guide/how-to-contribute). ## Submit changes If you have a trivial fix or improvement, go ahead and create a pull request. If you plan something more involved, first raise an issue to discuss. Should you wish to work on an issue, please claim it first by commenting on it. When submitting changes, respect the following guidelines: - Branch from the master branch and, if needed, rebase to the master branch before submitting your pull request. - Update [`CHANGELOG.md`](CHANGELOG.md) if the contribution is relevant for the changelog according to the [Keep a Changelog](https://keepachangelog.com/en/1.1.0) specification. - Ensure that your code has an appropriate set of tests which all pass. - Ensure that your code adheres to the existing style in the code to which you are contributing. - Title your pull request following [Conventional Commits](https://www.conventionalcommits.org/) styling. ## Where to go from here? There are several other documents to check out: - Consult [`DEVELOPMENT.md`](DEVELOPMENT.md) for guidance regarding development. - Read [`RELEASE.md`](RELEASE.md) for details about the release process. If some things are unclear, feel free to reach out via GitHub Discussions. prometheus-fastapi-instrumentator-7.1.0/DEVELOPMENT.md000066400000000000000000000033431476661573700225700ustar00rootroot00000000000000# Development This document is targeted at project developers. It helps people to make their first steps. It also serves as a general entry to development documentation like tooling configuration and usage. ## Requirements Your environment should fulfill the following basic requirements: - [Python](https://www.python.org). See [`pyproject.toml`](pyproject.toml) for the minimum required version. - [Poetry](https://python-poetry.org). Python packaging and dependency managed tool. - [Pre-commit](https://pre-commit.com). For managing and maintaining pre-commit Git hooks. Optional. - [Task](https://taskfile.dev). Task runner as simple alternative to Make. Optional. - Unix-like. Not required by itself, but assumed as the standard. In addition to the following sections in this document, note that the [`devel`](devel) directory contains more documentation including further information about the tooling listed above. ## Initial Setup ### Python Environment Ensure that [Python Poetry](https://python-poetry.org) is installed. After cloning this repo (probably your own fork) execute: ```sh poetry install ``` Read [`devel/poetry.md`](devel/poetry.md) for more info. ### Pre-commit Hooks Ensure that [pre-commit](https://pre-commit.com) is installed globally. Setup the pre-commit hooks: ```sh pre-commit install --install-hooks pre-commit install --install-hooks --hook-type commit-msg ``` Run all hooks to make sure things are alright: ```sh pre-commit run -a ``` Read [`devel/pre-commit.md`](devel/pre-commit.md) for more info. ### Running Tests Ensure that [Task](https://taskfile.dev) is installed. Run tests to make sure everything is setup correctly: ```sh task test ``` Read [`devel/task.md`](devel/task.md) for more info. prometheus-fastapi-instrumentator-7.1.0/LICENSE000066400000000000000000000013721476661573700214710ustar00rootroot00000000000000ISC License Copyright (c) 2022 Tim Schwenke Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. prometheus-fastapi-instrumentator-7.1.0/README.md000066400000000000000000000301371476661573700217440ustar00rootroot00000000000000# Prometheus FastAPI Instrumentator [![pypi-version](https://badge.fury.io/py/prometheus-fastapi-instrumentator.svg)](https://pypi.python.org/pypi/prometheus-fastapi-instrumentator) [![python-versions](https://img.shields.io/pypi/pyversions/prometheus-fastapi-instrumentator.svg)](https://pypi.python.org/pypi/prometheus-fastapi-instrumentator) [![downloads](https://pepy.tech/badge/prometheus-fastapi-instrumentator/month)](https://pepy.tech/project/prometheus-fastapi-instrumentator/month) [![build](https://img.shields.io/github/actions/workflow/status/trallnag/kubestatus2cloudwatch/ci.yaml?branch=master)](https://github.com/trallnag/kubestatus2cloudwatch/actions) [![codecov](https://codecov.io/gh/trallnag/prometheus-fastapi-instrumentator/branch/master/graph/badge.svg)](https://codecov.io/gh/trallnag/prometheus-fastapi-instrumentator) A configurable and modular Prometheus Instrumentator for your FastAPI. Install `prometheus-fastapi-instrumentator` from [PyPI](https://pypi.python.org/pypi/prometheus-fastapi-instrumentator/). Here is the fast track to get started with a pre-configured instrumentator. Import the instrumentator class: ```python from prometheus_fastapi_instrumentator import Instrumentator ``` Instrument your app with default metrics and expose the metrics: ```python Instrumentator().instrument(app).expose(app) ``` Depending on your code you might have to use the following instead: ```python instrumentator = Instrumentator().instrument(app) @app.on_event("startup") async def _startup(): instrumentator.expose(app) ``` With this, your FastAPI is instrumented and metrics are ready to be scraped. The defaults give you: - Counter `http_requests_total` with `handler`, `status` and `method`. Total number of requests. - Summary `http_request_size_bytes` with `handler`. Added up total of the content lengths of all incoming requests. - Summary `http_response_size_bytes` with `handler`. Added up total of the content lengths of all outgoing responses. - Histogram `http_request_duration_seconds` with `handler` and `method`. Only a few buckets to keep cardinality low. - Histogram `http_request_duration_highr_seconds` without any labels. Large number of buckets (>20). In addition, following behavior is active: - Status codes are grouped into `2xx`, `3xx` and so on. - Requests without a matching template are grouped into the handler `none`. If one of these presets does not suit your needs you can do one of multiple things: - Pick one of the already existing closures from [`metrics`](./src/prometheus_fastapi_instrumentator/metrics.py) and pass it to the instrumentator instance. See [here](#adding-metrics) how to do that. - Create your own instrumentation function that you can pass to an instrumentator instance. See [here](#creating-new-metrics) to learn how more. - Don't use this package at all and just use the source code as inspiration on how to instrument your FastAPI. ## Table of Contents - [Disclaimer](#disclaimer) - [Features](#features) - [Advanced Usage](#advanced-usage) - [Creating the Instrumentator](#creating-the-instrumentator) - [Adding metrics](#adding-metrics) - [Creating new metrics](#creating-new-metrics) - [Perform instrumentation](#perform-instrumentation) - [Specify namespace and subsystem](#specify-namespace-and-subsystem) - [Exposing endpoint](#exposing-endpoint) - [Contributing](#contributing) - [Licensing](#licensing) ## Disclaimer Not made for generic Prometheus instrumentation in Python. Use the Prometheus client library for that. This packages uses it as well. All the generic middleware and instrumentation code comes with a cost in performance that can become noticeable. ## Features Beyond the fast track, this instrumentator is **highly configurable** and it is very easy to customize and adapt to your specific use case. Here is a list of some of these options you may opt-in to: - Regex patterns to ignore certain routes. - Completely ignore untemplated routes. - Control instrumentation and exposition with an env var. - Rounding of latencies to a certain decimal number. - Renaming of labels and the metric. - Metrics endpoint can compress data with gzip. - Opt-in metric to monitor the number of requests in progress. It also features a **modular approach to metrics** that should instrument all FastAPI endpoints. You can either choose from a set of already existing metrics or create your own. And every metric function by itself can be configured as well. ## Advanced Usage This chapter contains an example on the advanced usage of the Prometheus FastAPI Instrumentator to showcase most of it's features. ### Creating the Instrumentator We start by creating an instance of the Instrumentator. Notice the additional `metrics` import. This will come in handy later. ```python from prometheus_fastapi_instrumentator import Instrumentator, metrics instrumentator = Instrumentator( should_group_status_codes=False, should_ignore_untemplated=True, should_respect_env_var=True, should_instrument_requests_inprogress=True, excluded_handlers=[".*admin.*", "/metrics"], env_var_name="ENABLE_METRICS", inprogress_name="inprogress", inprogress_labels=True, custom_labels={"service": "example-label"} ) ``` Unlike in the fast track example, now the instrumentation and exposition will only take place if the environment variable `ENABLE_METRICS` is `true` at run-time. This can be helpful in larger deployments with multiple services depending on the same base FastAPI. ### Adding metrics Let's say we also want to instrument the size of requests and responses. For this we use the `add()` method. This method does nothing more than taking a function and adding it to a list. Then during run-time every time FastAPI handles a request all functions in this list will be called while giving them a single argument that stores useful information like the request and response objects. If no `add()` at all is used, the default metric gets added in the background. This is what happens in the fast track example. All instrumentation functions are stored as closures in the `metrics` module. Closures come in handy here because it allows us to configure the functions within. ```python instrumentator.add(metrics.latency(buckets=(1, 2, 3,))) ``` This simply adds the metric you also get in the fast track example with a modified buckets argument. But we would also like to record the size of all requests and responses. ```python instrumentator.add( metrics.request_size( should_include_handler=True, should_include_method=False, should_include_status=True, metric_namespace="a", metric_subsystem="b", custom_labels={"service": "example-label"} ) ).add( metrics.response_size( should_include_handler=True, should_include_method=False, should_include_status=True, metric_namespace="namespace", metric_subsystem="subsystem", custom_labels={"service": "example-label"} ) ) ``` You can add as many metrics you like to the instrumentator. ### Creating new metrics As already mentioned, it is possible to create custom functions to pass on to `add()`. This is also how the default metrics are implemented. The basic idea is that the instrumentator creates an `info` object that contains everything necessary for instrumentation based on the configuration of the instrumentator. This includes the raw request and response objects but also the modified handler, grouped status code and duration. Next, all registered instrumentation functions are called. They get `info` as their single argument. Let's say we want to count the number of times a certain language has been requested. ```python from typing import Callable from prometheus_fastapi_instrumentator.metrics import Info from prometheus_client import Counter def http_requested_languages_total() -> Callable[[Info], None]: METRIC = Counter( "http_requested_languages_total", "Number of times a certain language has been requested.", labelnames=("langs",) ) def instrumentation(info: Info) -> None: langs = set() lang_str = info.request.headers["Accept-Language"] for element in lang_str.split(","): element = element.split(";")[0].strip().lower() langs.add(element) for language in langs: METRIC.labels(language).inc() return instrumentation ``` The function `http_requested_languages_total` is used for persistent elements that are stored between all instrumentation executions (for example the metric instance itself). Next comes the closure. This function must adhere to the shown interface. It will always get an `Info` object that contains the request, response and a few other modified informations. For example the (grouped) status code or the handler. Finally, the closure is returned. **Important:** The response object inside `info` can either be the response object or `None`. In addition, errors thrown in the handler are not caught by the instrumentator. I recommend to check the documentation and/or the source code before creating your own metrics. To use it, we hand over the closure to the instrumentator object. ```python instrumentator.add(http_requested_languages_total()) ``` ### Perform instrumentation Up to this point, the FastAPI has not been touched at all. Everything has been stored in the `instrumentator` only. To actually register the instrumentation with FastAPI, the `instrument()` method has to be called. ```python instrumentator.instrument(app) ``` Notice that this will do nothing if `should_respect_env_var` has been set during construction of the instrumentator object and the respective env var is not found. ### Specify namespace and subsystem You can specify the namespace and subsystem of the metrics by passing them in the instrument method. ```python from prometheus_fastapi_instrumentator import Instrumentator @app.on_event("startup") async def startup(): Instrumentator().instrument(app, metric_namespace='myproject', metric_subsystem='myservice').expose(app) ``` Then your metrics will contain the namespace and subsystem in the metric name. ```sh # TYPE myproject_myservice_http_request_duration_highr_seconds histogram myproject_myservice_http_request_duration_highr_seconds_bucket{le="0.01"} 0.0 ``` ### Exposing endpoint To expose an endpoint for the metrics either follow [Prometheus Python Client](https://github.com/prometheus/client_python) and add the endpoint manually to the FastAPI or serve it on a separate server. You can also use the included `expose` method. It will add an endpoint to the given FastAPI. With `should_gzip` you can instruct the endpoint to compress the data as long as the client accepts gzip encoding. Prometheus for example does by default. Beware that network bandwith is often cheaper than CPU cycles. ```python instrumentator.expose(app, include_in_schema=False, should_gzip=True) ``` Notice that this will to nothing if `should_respect_env_var` has been set during construction of the instrumentator object and the respective env var is not found. ## Contributing Please refer to [`CONTRIBUTING.md`](CONTRIBUTING). Consult [`DEVELOPMENT.md`](DEVELOPMENT.md) for guidance regarding development. Read [`RELEASE.md`](RELEASE.md) for details about the release process. ## Licensing The default license for this project is the [ISC License](https://choosealicense.com/licenses/isc). A permissive license functionally equivalent to the BSD 2-Clause and MIT licenses, removing some language that is no longer necessary. See [`LICENSE`](LICENSE) for the license text. The [BSD 3-Clause License](https://choosealicense.com/licenses/bsd-3-clause) is used as the license for the [`routing`](src/prometheus_fastapi_instrumentator/routing.py) module. This is due to it containing code from [elastic/apm-agent-python](https://github.com/elastic/apm-agent-python). BSD 3-Clause is a permissive license similar to the BSD 2-Clause License, but with a 3rd clause that prohibits others from using the name of the copyright holder or its contributors to promote derived products without written consent. The license text is included in the module itself. prometheus-fastapi-instrumentator-7.1.0/RELEASE.md000066400000000000000000000045441476661573700220720ustar00rootroot00000000000000# Release This document describes the release process and is targeted at maintainers. ## Preparation Pick a name for the new release. It must follow [Semantic Versioning](https://semver.org): ```shell VERSION=1.0.1 ``` Make sure that the "Unreleased" section in the [changelog](CHANGELOG.md) is up-to-date. Feel free to adjust entries for example by adding additional examples or highlighting breaking changes. Move the content of the "Unreleased" section that will be included in the new release to a new section with an appropriate title for the release. Should the "Unreleased" section now be empty, add "Nothing." to it. Set the `__version__` variable in `__init__.py` to `$VERSION`: ```shell sed -i "/^__version__/c\__version__ = \"$VERSION\"" src/*/__init__.py grep -n -H __version__ src/*/__init__.py ``` Bump the version using Poetry: ```shell poetry version $VERSION ``` Continue with the next section. ## Trigger Commit the changes. Make sure to sign the commit: ```shell git add CHANGELOG.md src/*/__init__.py pyproject.toml git commit -S -m "chore: Prepare release v$VERSION" git log --show-signature -1 ``` Push changes: ```shell git push origin master ``` Check [workflow runs](https://github.com/trallnag/prometheus-fastapi-instrumentator/actions?query=branch%3Amaster) in GitHub Actions and ensure everything is fine. Tag the latest commit with an annotated and signed tag: ```shell git tag -s v$VERSION -m "" git show v$VERSION ``` Make sure that the tree looks good: ```shell git log --graph --oneline --all -n 5 ``` Push the tag itself: ```shell git push origin v$VERSION ``` This triggers the [release workflow](https://github.com/trallnag/prometheus-fastapi-instrumentator/actions/workflows/release.yaml) which will build a package, publish it to PyPI, and draft a GitHub release. Monitor the workflow run: ```shell gh workflow view release --web ``` ## Wrap Up Ensure that the new package version has been published to PyPI [here](https://pypi.org/project/prometheus-fastapi-instrumentator). Go to the release page of this project on GitHub [here](https://github.com/trallnag/prometheus-fastapi-instrumentator/releases) and review the automatically created release draft. Set the release title to "$VERSION / $DATE". For example "1.0.0 / 2023-01-01". Add release notes by extracting them from the [changelog](CHANGELOG.md). Publish the release draft. prometheus-fastapi-instrumentator-7.1.0/Taskfile.yaml000066400000000000000000000022671476661573700231160ustar00rootroot00000000000000version: 3 tasks: default: - task: init - task: fmt - task: lint - task: test - task: test-mp init: desc: Init environment. cmds: - poetry install - pre-commit install --install-hooks - pre-commit install --install-hooks --hook-type commit-msg update: desc: Update environment. cmds: - poetry update - pre-commit autoupdate fmt: desc: Run formatters. cmds: - poetry run black . - poetry run isort . - pre-commit run -a prettier lint: desc: Run linters. cmds: - poetry run flake8 --config .flake8 - poetry run dmypy run --timeout 3600 src test: desc: Run tests. cmds: - poetry run pytest {{ .COVERAGE }} vars: COVERAGE: |- --cov-report=term-missing --cov-report=xml --cov=src test-mp: desc: Run multi process tests. cmds: - rm -rf $PROMETHEUS_MULTIPROC_DIR - mkdir -p $PROMETHEUS_MULTIPROC_DIR - poetry run pytest -k test_multiproc {{ .COVERAGE }} vars: COVERAGE: |- --cov-append --cov-report=term-missing --cov-report=xml --cov=src env: PROMETHEUS_MULTIPROC_DIR: /tmp/pfi-tests/multiproc prometheus-fastapi-instrumentator-7.1.0/codecov.yaml000066400000000000000000000000221476661573700227610ustar00rootroot00000000000000ignore: - tests prometheus-fastapi-instrumentator-7.1.0/devel/000077500000000000000000000000001476661573700215605ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/devel/examples/000077500000000000000000000000001476661573700233765ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/devel/examples/README.md000066400000000000000000000011341476661573700246540ustar00rootroot00000000000000# Examples - **[metrics-diff-port-uvicorn](./metrics-diff-port-uvicorn/):** Instrumented FastAPI app run with Uvicorn, but `/metrics` endpoint is exposed on a separate endpoint using Prometheus' `start_http_server` function. - **[default-metrics-diff-labels](./default-metrics-diff-labels/):** Usage of PFI with a custom instrumentation function that mimics the default metrics but with custom label names. - **[prom-multi-proc-gunicorn](./prom-multi-proc-gunicorn/):** How to use FastAPI app run with Gunivorn in combination with Prometheus client library. Focus on multiprocessing mode. prometheus-fastapi-instrumentator-7.1.0/devel/examples/default-metrics-diff-labels/000077500000000000000000000000001476661573700306345ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/devel/examples/default-metrics-diff-labels/README.md000066400000000000000000000007541476661573700321210ustar00rootroot00000000000000# Example `default-metrics-diff-labels` Example that shows usage of PFI with a custom instrumentation function that mimics the default metrics but with custom label names. To run the example, you must have run `poetry install` and `poetry shell` in the root of this repository. The following commands are executed relative to this directory. Start app with Uvicorn: ```python uvicorn main:app ``` Interact with the app: ```shell curl localhost:8000/ping curl localhost:8000/metrics ``` prometheus-fastapi-instrumentator-7.1.0/devel/examples/default-metrics-diff-labels/main.py000066400000000000000000000114131476661573700321320ustar00rootroot00000000000000from typing import Callable, Optional, Sequence, Union from fastapi import FastAPI from prometheus_client import REGISTRY, CollectorRegistry, Counter, Histogram, Summary from prometheus_fastapi_instrumentator import Instrumentator from prometheus_fastapi_instrumentator.metrics import Info PING_TOTAL = Counter("ping", "Number of pings calls.") def my_metrics( latency_highr_buckets: Sequence[Union[float, str]] = ( 0.01, 0.025, 0.05, 0.075, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 7.5, 10, 30, 60, ), latency_lowr_buckets: Sequence[Union[float, str]] = (0.1, 0.5, 1), registry: CollectorRegistry = REGISTRY, ) -> Optional[Callable[[Info], None]]: def is_duplicated_time_series(error: ValueError) -> bool: return any( map( error.args[0].__contains__, [ "Duplicated timeseries in CollectorRegistry:", "Duplicated time series in CollectorRegistry:", ], ) ) if latency_highr_buckets[-1] != float("inf"): latency_highr_buckets = [*latency_highr_buckets, float("inf")] if latency_lowr_buckets[-1] != float("inf"): latency_lowr_buckets = [*latency_lowr_buckets, float("inf")] # Starlette will call app.build_middleware_stack() with every new middleware # added, which will call all this again, which will make the registry # complain about duplicated metrics. # # The Python Prometheus client currently doesn't seem to have a way to # verify if adding a metric will cause errors or not, so the only way to # handle it seems to be with this try block. try: TOTAL = Counter( name="http_requests_total", documentation="Total number of requests by method, status and handler.", labelnames=( "my_method", "my_status", "my_handler", ), registry=registry, ) IN_SIZE = Summary( name="http_request_size_bytes", documentation=( "Content length of incoming requests by handler. " "Only value of header is respected. Otherwise ignored. " "No percentile calculated. " ), labelnames=("my_handler",), registry=registry, ) OUT_SIZE = Summary( name="http_response_size_bytes", documentation=( "Content length of outgoing responses by handler. " "Only value of header is respected. Otherwise ignored. " "No percentile calculated. " ), labelnames=("my_handler",), registry=registry, ) LATENCY_HIGHR = Histogram( name="http_request_duration_highr_seconds", documentation=( "Latency with many buckets but no API specific labels. " "Made for more accurate percentile calculations. " ), buckets=latency_highr_buckets, registry=registry, ) LATENCY_LOWR = Histogram( name="http_request_duration_seconds", documentation=( "Latency with only few buckets by handler. " "Made to be only used if aggregation by handler is important. " ), buckets=latency_lowr_buckets, labelnames=( "my_method", "my_handler", ), registry=registry, ) def instrumentation(info: Info) -> None: TOTAL.labels(info.method, info.modified_status, info.modified_handler).inc() IN_SIZE.labels(info.modified_handler).observe( int(info.request.headers.get("Content-Length", 0)) ) if info.response and hasattr(info.response, "headers"): OUT_SIZE.labels(info.modified_handler).observe( int(info.response.headers.get("Content-Length", 0)) ) else: OUT_SIZE.labels(info.modified_handler).observe(0) if info.modified_status.startswith("2"): LATENCY_HIGHR.observe(info.modified_duration) LATENCY_LOWR.labels(info.modified_handler, info.method).observe( info.modified_duration ) return instrumentation except ValueError as e: if not is_duplicated_time_series(e): raise e return None app = FastAPI() Instrumentator().instrument(app).add(my_metrics()).expose(app) @app.get("/ping") def get_ping(): PING_TOTAL.inc() return "pong" prometheus-fastapi-instrumentator-7.1.0/devel/examples/metrics-diff-port-uvicorn/000077500000000000000000000000001476661573700304175ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/devel/examples/metrics-diff-port-uvicorn/README.md000066400000000000000000000013521476661573700316770ustar00rootroot00000000000000# Example `metrics-diff-port-uvicorn` Minimal example that shows usage of Prometheus client library and Prometheus FastAPI Instrumentator with FastAPI and Uvicorn where the `/metrics` endpoint is exposed on another port and not on the FastAPI app itself. Note that this does not work with multiproc mode. To run the example, you must have run `poetry install` and `poetry shell` in the root of this repository. The following commands are executed relative to this directory. Start app with Uvicorn: ```python uvicorn main:app ``` This will start two servers: - FastAPI app listening on port `8000`. - Prometheus `/metrics` endpoint on port `9000`. Interact with the app: ```shell curl localhost:8000/ping curl localhost:9000/metrics ``` prometheus-fastapi-instrumentator-7.1.0/devel/examples/metrics-diff-port-uvicorn/main.py000066400000000000000000000005561476661573700317230ustar00rootroot00000000000000from fastapi import FastAPI from prometheus_client import Counter, start_http_server from prometheus_fastapi_instrumentator import Instrumentator start_http_server(9000) PING_TOTAL = Counter("ping", "Number of pings calls.") app = FastAPI() Instrumentator().instrument(app).expose(app) @app.get("/ping") def get_ping(): PING_TOTAL.inc() return "pong" prometheus-fastapi-instrumentator-7.1.0/devel/examples/prom-multi-proc-gunicorn/000077500000000000000000000000001476661573700302665ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/devel/examples/prom-multi-proc-gunicorn/README.md000066400000000000000000000034331476661573700315500ustar00rootroot00000000000000# Example `prom-multi-proc-gunicorn` Minimal example that shows integration of FastAPI (Gunicorn) with the Prometheus client library in multi process mode without Prometheus FastAPI Instrumentator. Highlights missing metrics that are not supported in multi process mode. To run the example, you must have run `poetry install` and `poetry shell` in the root of this repository. The following commands are executed relative to this directory. Set environment variable to an unused location: ```shell export PROMETHEUS_MULTIPROC_DIR=/tmp/python-testing-pfi/560223ba-887f-429a-9c48-933df56a68ba ``` Start the app with Gunicorn using two Uvicorn workers: ```shell rm -rf "$PROMETHEUS_MULTIPROC_DIR" mkdir -p "$PROMETHEUS_MULTIPROC_DIR" gunicorn main:app \ --config gunicorn.conf.py \ --workers 2 \ --worker-class uvicorn.workers.UvicornWorker \ --bind 0.0.0.0:8080 ``` Interact with app: ```shell for i in {1..5}; do curl localhost:8080/ping; done curl localhost:8080/metrics ``` You should see something like this: ```txt # TYPE ping_total counter ping_total 5.0 # HELP metrics_total Number of metrics calls. # TYPE metrics_total counter metrics_total 1.0 # HELP main_total Counts of main executions. # TYPE main_total counter main_total 2.0 ``` Check the returned metrics: - `main_total` is `2`, because Gunicorn is using two workers. - There are no `created_by` metrics. These are not supported by the Prometheus client library in multi process mode. - No metrics for things like CPU and memory. They come from components like the `ProcessCollector` and `PlatformCollector` which are not supported by the Prometheus client library in multi process mode. Links: - - prometheus-fastapi-instrumentator-7.1.0/devel/examples/prom-multi-proc-gunicorn/gunicorn.conf.py000066400000000000000000000001741476661573700334120ustar00rootroot00000000000000from prometheus_client import multiprocess def child_exit(server, worker): multiprocess.mark_process_dead(worker.pid) prometheus-fastapi-instrumentator-7.1.0/devel/examples/prom-multi-proc-gunicorn/main.py000066400000000000000000000023071476661573700315660ustar00rootroot00000000000000import os from fastapi import FastAPI from prometheus_client import ( CONTENT_TYPE_LATEST, CollectorRegistry, Counter, generate_latest, multiprocess, ) from starlette.responses import Response if "PROMETHEUS_MULTIPROC_DIR" not in os.environ: raise ValueError("PROMETHEUS_MULTIPROC_DIR must be set to existing empty dir.") PING_TOTAL = Counter("ping", "Number of pings calls.") METRICS_TOTAL = Counter("metrics", "Number of metrics calls.") MAIN_TOTAL = Counter("main", "Counts of main executions.") MAIN_TOTAL.inc() app = FastAPI() @app.get("/ping") def get_ping(): PING_TOTAL.inc() return "pong" @app.get("/metrics") def get_metrics(): METRICS_TOTAL.inc() # Note the ephemeral registry being used here. This follows the Prometheus # client library documentation. It comes with multiple caveats. Using a # persistent registry might work on first glance but it will lead to issues. # For a long time PFI used a persistent registry, which was wrong. registry = CollectorRegistry() multiprocess.MultiProcessCollector(registry) resp = Response(content=generate_latest(registry)) resp.headers["Content-Type"] = CONTENT_TYPE_LATEST return resp prometheus-fastapi-instrumentator-7.1.0/devel/poetry.md000066400000000000000000000024631476661573700234310ustar00rootroot00000000000000# Poetry Used for Python dependency management and packaging. Must be installed manually. - [python-poetry.org](https://python-poetry.org) - [github.com/python-poetry/poetry](https://github.com/python-poetry/poetry) Whenever this repository is cloned, the environment needs to be installed. Same goes for pulling changes that include dependency updates. The following command installs dependencies including development dependencies: ``` poetry install ``` You can jump into the created environment: ``` poetry shell ``` Another alternative is to prepend commands with `poetry run`. Poetry is configured via [`../pyproject.toml`](../pyproject.toml). In general Poetry related configuration should be done via the Poetry CLI. The lockfile [`../poetry.lock`](../poetry.lock) should never be adjusted by hand. ## Housekeeping ### Update dependencies To automatically update dependencies and bump versions in `pyproject.toml`, plugins can be used. For example [poetry-plugin-up](https://github.com/MousaZeidBaker/poetry-plugin-up). To install the plugin, execute: ``` poetry self add poetry-plugin-up ``` Now it can be used like this: ``` poetry up ``` ### Update Poetry itself ``` poetry self update ``` ## Cheat Sheet ### Setup shell ``` poetry shell ``` ### Run arbitrary commands ``` poetry run ``` prometheus-fastapi-instrumentator-7.1.0/devel/pre-commit.md000066400000000000000000000016031476661573700241560ustar00rootroot00000000000000# Pre-Commit Used for maintaining Git hooks. Must be installed globally on the respective system. As it is written in Python, for example [`pipx`](https://github.com/pypa/pipx) can be used to install it. - [pre-commit.com](https://pre-commit.com) - [github.com/pre-commit/pre-commit](https://github.com/pre-commit/pre-commit) Whenever this repository is initially cloned, the following should be executed: ``` pre-commit install --install-hooks pre-commit install --install-hooks --hook-type commit-msg ``` Pre-commit should now run on every commit. It is also used in GitHub Actions. Pre-commit is configured via [`.pre-commit-config.yaml`](../.pre-commit-config.yaml). ## Housekeeping ### Update hooks ``` pre-commit autoupdate ``` ## Cheat Sheet ### Run pre-commit against all files ``` pre-commit run -a ``` ### Run specific hook against all files ``` pre-commit run -a ``` prometheus-fastapi-instrumentator-7.1.0/devel/task.md000066400000000000000000000013761476661573700230530ustar00rootroot00000000000000# Task Task is a task runner / build tool that aims to be simpler and easier to use than, for example, GNU Make. - [taskfile.dev](https://taskfile.dev) - [github.com/go-task/task](https://github.com/go-task/task) Having Task installed is not a hard-requirement for developing. It is mainly used to collect common scripts / commands. It can be installed Homebrew (other options are available as well). ``` brew install go-task ``` Task is configured via [`Taskfile.yaml`](../Taskfile.yaml). When adding new tasks to the task file, try to keep individual tasks simple and small. More complicated things should be put into individual scripts and then just called from Task. ## Cheat Sheet ### List tasks ``` task --list ``` ### Run task ``` task ``` prometheus-fastapi-instrumentator-7.1.0/poetry.lock000066400000000000000000002455511476661573700226710ustar00rootroot00000000000000# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "annotated-types" version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "anyio" version = "4.5.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] [[package]] name = "asgiref" version = "3.8.1" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, ] [package.dependencies] typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "asttokens" version = "2.4.1" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" groups = ["dev"] files = [ {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ] [package.dependencies] six = ">=1.12.0" [package.extras] astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "black" version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" groups = ["dev"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] name = "charset-normalizer" version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["dev"] markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "coverage" version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "devtools" version = "0.10.0" description = "Python's missing debug print command, and more." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "devtools-0.10.0-py3-none-any.whl", hash = "sha256:b0bc02043bb032cdfb93e227226e2fea1aaea8f5a31fca25fabc4eadca22f228"}, {file = "devtools-0.10.0.tar.gz", hash = "sha256:6eb7c4fa7c4b90e5cfe623537a9961d1dc3199d8be0981802c6931cd8f02418f"}, ] [package.dependencies] asttokens = ">=2.0.0,<3.0.0" executing = ">=1.1.1" [package.extras] pygments = ["pygments (>=2.2.0)"] [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main", "dev"] markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "executing" version = "2.1.0" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, ] [package.extras] tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "fastapi" version = "0.110.3" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "fastapi-0.110.3-py3-none-any.whl", hash = "sha256:fd7600612f755e4050beb74001310b5a7e1796d149c2ee363124abdfa0289d32"}, {file = "fastapi-0.110.3.tar.gz", hash = "sha256:555700b0159379e94fdbfc6bb66a0f1c43f4cf7060f25239af3d84b63a656626"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" starlette = ">=0.37.2,<0.38.0" typing-extensions = ">=4.8.0" [package.extras] all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "flake8" version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" groups = ["dev"] markers = "python_full_version >= \"3.8.1\"" files = [ {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "gunicorn" version = "21.2.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.5" groups = ["dev"] files = [ {file = "gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0"}, {file = "gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033"}, ] [package.dependencies] packaging = "*" [package.extras] eventlet = ["eventlet (>=0.24.1)"] gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] tornado = ["tornado (>=0.2)"] [[package]] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] [[package]] name = "httpcore" version = "0.16.3" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, ] [package.dependencies] anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] [[package]] name = "httpx" version = "0.23.3" description = "The next generation HTTP client." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, ] [package.dependencies] certifi = "*" httpcore = ">=0.15.0,<0.17.0" rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] colors = ["colorama (>=0.4.6)"] [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" groups = ["dev"] markers = "python_full_version >= \"3.8.1\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] [[package]] name = "mypy" version = "1.14.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" groups = ["main"] files = [ {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, ] [package.extras] twisted = ["twisted"] [[package]] name = "pycodestyle" version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" groups = ["dev"] markers = "python_full_version >= \"3.8.1\"" files = [ {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] name = "pydantic" version = "2.10.5" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, ] [package.dependencies] annotated-types = ">=0.6.0" pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] timezone = ["tzdata"] [[package]] name = "pydantic-core" version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pyflakes" version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" groups = ["dev"] markers = "python_full_version >= \"3.8.1\"" files = [ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] name = "pytest" version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" version = "0.24.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytoolconfig" version = "1.3.1" description = "Python tool configuration" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pytoolconfig-1.3.1-py3-none-any.whl", hash = "sha256:5d8cea8ae1996938ec3eaf44567bbc5ef1bc900742190c439a44a704d6e1b62b"}, {file = "pytoolconfig-1.3.1.tar.gz", hash = "sha256:51e6bd1a6f108238ae6aab6a65e5eed5e75d456be1c2bf29b04e5c1e7d7adbae"}, ] [package.dependencies] packaging = ">=23.2" platformdirs = {version = ">=3.11.0", optional = true, markers = "extra == \"global\""} tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["sphinx (>=7.1.2)", "tabulate (>=0.9.0)"] gendocs = ["pytoolconfig[doc]", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-rtd-theme (>=2.0.0)"] global = ["platformdirs (>=3.11.0)"] validation = ["pydantic (>=2.5.3)"] [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rfc3986" version = "1.5.0" description = "Validating URI References per RFC 3986" optional = false python-versions = "*" groups = ["dev"] files = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] [package.dependencies] idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} [package.extras] idna2008 = ["idna"] [[package]] name = "rope" version = "1.13.0" description = "a python refactoring library..." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "rope-1.13.0-py3-none-any.whl", hash = "sha256:b435a0c0971244fdcd8741676a9fae697ae614c20cc36003678a7782f25c0d6c"}, {file = "rope-1.13.0.tar.gz", hash = "sha256:51437d2decc8806cd5e9dd1fd9c1306a6d9075ecaf78d191af85fc1dfface880"}, ] [package.dependencies] pytoolconfig = {version = ">=1.2.2", extras = ["global"]} [package.extras] dev = ["build (>=0.7.0)", "pre-commit (>=2.20.0)", "pytest (>=7.0.1)", "pytest-cov (>=4.1.0)", "pytest-timeout (>=2.1.0)"] doc = ["pytoolconfig[doc]", "sphinx (>=4.5.0)", "sphinx-autodoc-typehints (>=1.18.1)", "sphinx-rtd-theme (>=1.0.0)"] release = ["pip-tools (>=6.12.1)", "toml (>=0.10.2)", "twine (>=4.0.2)"] [[package]] name = "six" version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" groups = ["main", "dev"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] name = "starlette" version = "0.37.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, ] [package.dependencies] anyio = ">=3.4.0,<5" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] [[package]] name = "tomli" version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev"] markers = "python_full_version <= \"3.11.0a6\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] markers = {main = "python_version < \"3.11\""} [[package]] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" version = "0.33.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "uvicorn-0.33.0-py3-none-any.whl", hash = "sha256:2c30de4aeea83661a520abab179b24084a0019c0c1bbe137e5409f741cbde5f8"}, {file = "uvicorn-0.33.0.tar.gz", hash = "sha256:3577119f82b7091cf4d3d4177bfda0bae4723ed92ab1439e8d779de880c9cc59"}, ] [package.dependencies] click = ">=7.0" h11 = ">=0.8" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [metadata] lock-version = "2.1" python-versions = ">=3.8" content-hash = "82239e7265d51f4fcf9ba32cfe1c89a4b6373b3d3c247c643926eeeed5272082" prometheus-fastapi-instrumentator-7.1.0/pyproject.toml000066400000000000000000000025701476661573700234010ustar00rootroot00000000000000[build-system] requires = ["poetry-core>=2.0"] build-backend = "poetry.core.masonry.api" [project] name = "prometheus-fastapi-instrumentator" version = "7.1.0" description = "Instrument your FastAPI app with Prometheus metrics" authors = [ {name = "Tim Schwenke", email = "tim@trallnag.com"} ] readme = "README.md" requires-python = ">=3.8" license = { text = "ISC" } keywords = ["prometheus", "instrumentation", "fastapi", "exporter", "metrics"] dependencies = [ "starlette (>=0.30.0,<1.0.0)", "prometheus-client (>=0.8.0,<1.0.0)" ] [project.urls] homepage = 'https://github.com/trallnag/prometheus-fastapi-instrumentator' [tool.poetry.group.dev.dependencies] httpx = "^0.23.3" black = "^24.2.0" flake8 = { version = "^7.0.0", python = ">=3.8.1" } requests = "^2.31.0" pytest = "^8.1.1" pytest-cov = "^4.1.0" rope = "^1.12.0" isort = "^5.13.2" mypy = "^1.9.0" devtools = "^0.10.0" asgiref = "^3.7.2" uvicorn = ">=0.28.0" gunicorn = "^21.2.0" pytest-asyncio = ">=0.23.5.post1" fastapi = "^0.110.0" [tool.black] line-length = 90 [tool.isort] profile = "black" [tool.mypy] allow_redefinition = true disallow_untyped_defs = true disallow_any_unimported = true no_implicit_optional = true check_untyped_defs = true warn_unused_ignores = true show_error_codes = true [tool.pytest.ini_options] norecursedirs = "tests/helpers" markers = ["slow: mark test as slow."] asyncio_mode = "auto" prometheus-fastapi-instrumentator-7.1.0/src/000077500000000000000000000000001476661573700212505ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/src/prometheus_fastapi_instrumentator/000077500000000000000000000000001476661573700303305ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/src/prometheus_fastapi_instrumentator/__init__.py000066400000000000000000000002061476661573700324370ustar00rootroot00000000000000from .instrumentation import PrometheusFastApiInstrumentator __version__ = "7.1.0" Instrumentator = PrometheusFastApiInstrumentator prometheus-fastapi-instrumentator-7.1.0/src/prometheus_fastapi_instrumentator/instrumentation.py000066400000000000000000000326661476661573700341620ustar00rootroot00000000000000import asyncio import gzip import importlib.util import os import re import warnings from enum import Enum from typing import Any, Awaitable, Callable, List, Optional, Sequence, Union, cast from prometheus_client import ( CONTENT_TYPE_LATEST, REGISTRY, CollectorRegistry, generate_latest, multiprocess, ) from starlette.applications import Starlette from starlette.requests import Request from starlette.responses import Response from prometheus_fastapi_instrumentator import metrics from prometheus_fastapi_instrumentator.middleware import ( PrometheusInstrumentatorMiddleware, ) class PrometheusFastApiInstrumentator: def __init__( self, should_group_status_codes: bool = True, should_ignore_untemplated: bool = False, should_group_untemplated: bool = True, should_round_latency_decimals: bool = False, should_respect_env_var: bool = False, should_instrument_requests_inprogress: bool = False, should_exclude_streaming_duration: bool = False, excluded_handlers: List[str] = [], body_handlers: List[str] = [], round_latency_decimals: int = 4, env_var_name: str = "ENABLE_METRICS", inprogress_name: str = "http_requests_inprogress", inprogress_labels: bool = False, registry: Union[CollectorRegistry, None] = None, ) -> None: """Create a Prometheus FastAPI (and Starlette) Instrumentator. Args: should_group_status_codes (bool): Should status codes be grouped into `2xx`, `3xx` and so on? Defaults to `True`. should_ignore_untemplated (bool): Should requests without a matching template be ignored? Defaults to `False`. This means that by default a request like `curl -X GET localhost:80/doesnotexist` will be ignored. should_group_untemplated (bool): Should requests without a matching template be grouped to handler `none`? Defaults to `True`. should_round_latency_decimals: Should recorded latencies be rounded to a certain number of decimals? should_respect_env_var (bool): Should the instrumentator only work - for example the methods `instrument()` and `expose()` - if a certain environment variable is set to `true`? Usecase: A base FastAPI app that is used by multiple distinct apps. The apps only have to set the variable to be instrumented. Defaults to `False`. should_instrument_requests_inprogress (bool): Enables a gauge that shows the inprogress requests. See also the related args starting with `inprogress`. Defaults to `False`. should_exclude_streaming_duration: Should the streaming duration be excluded? Only relevant if default metrics are used. Defaults to `False`. excluded_handlers (List[str]): List of strings that will be compiled to regex patterns. All matches will be skipped and not instrumented. Defaults to `[]`. body_handlers (List[str]): List of strings that will be compiled to regex patterns to match handlers for the middleware to pass through response bodies to instrumentations. So only relevant for instrumentations that access `info.response.body`. Note that this has a noticeable negative impact on performance with responses larger than a few MBs. Defaults to `[]`. round_latency_decimals (int): Number of decimals latencies should be rounded to. Ignored unless `should_round_latency_decimals` is `True`. Defaults to `4`. env_var_name (str): Any valid os environment variable name that will be checked for existence before instrumentation. Ignored unless `should_respect_env_var` is `True`. Defaults to `"ENABLE_METRICS"`. inprogress_name (str): Name of the gauge. Defaults to `http_requests_inprogress`. Ignored unless `should_instrument_requests_inprogress` is `True`. inprogress_labels (bool): Should labels `method` and `handler` be part of the inprogress label? Ignored unless `should_instrument_requests_inprogress` is `True`. Defaults to `False`. registry (CollectorRegistry): A custom Prometheus registry to use. If not provided, the default `REGISTRY` will be used. This can be useful if you need to run multiple apps at the same time, with their own registries, for example during testing. Raises: ValueError: If `PROMETHEUS_MULTIPROC_DIR` env var is found but doesn't point to a valid directory. """ self.should_group_status_codes = should_group_status_codes self.should_ignore_untemplated = should_ignore_untemplated self.should_group_untemplated = should_group_untemplated self.should_round_latency_decimals = should_round_latency_decimals self.should_respect_env_var = should_respect_env_var self.should_instrument_requests_inprogress = should_instrument_requests_inprogress self.should_exclude_streaming_duration = should_exclude_streaming_duration self.round_latency_decimals = round_latency_decimals self.env_var_name = env_var_name self.inprogress_name = inprogress_name self.inprogress_labels = inprogress_labels self.excluded_handlers = [re.compile(path) for path in excluded_handlers] self.body_handlers = [re.compile(path) for path in body_handlers] self.instrumentations: List[Callable[[metrics.Info], None]] = [] self.async_instrumentations: List[Callable[[metrics.Info], Awaitable[None]]] = [] if ( "prometheus_multiproc_dir" in os.environ and "PROMETHEUS_MULTIPROC_DIR" not in os.environ ): os.environ["PROMETHEUS_MULTIPROC_DIR"] = os.environ[ "prometheus_multiproc_dir" ] warnings.warn( "prometheus_multiproc_dir variable has been deprecated in favor of the upper case naming PROMETHEUS_MULTIPROC_DIR", DeprecationWarning, ) if registry: self.registry = registry else: self.registry = REGISTRY if "PROMETHEUS_MULTIPROC_DIR" in os.environ: pmd = os.environ["PROMETHEUS_MULTIPROC_DIR"] if not os.path.isdir(pmd): raise ValueError( f"Env var PROMETHEUS_MULTIPROC_DIR='{pmd}' not a directory." ) def instrument( self, app: Starlette, metric_namespace: str = "", metric_subsystem: str = "", should_only_respect_2xx_for_highr: bool = False, latency_highr_buckets: Sequence[Union[float, str]] = ( 0.01, 0.025, 0.05, 0.075, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 7.5, 10, 30, 60, ), latency_lowr_buckets: Sequence[Union[float, str]] = (0.1, 0.5, 1), ) -> "PrometheusFastApiInstrumentator": """Performs the instrumentation by adding middleware. The middleware iterates through all `instrumentations` and executes them. Args: app: Starlette app instance. Note that every FastAPI app is a Starlette app. Raises: e: Only raised if app itself throws an exception. Returns: self: Instrumentator. Builder Pattern. """ if self.should_respect_env_var and not self._should_instrumentate(): return self app.add_middleware( PrometheusInstrumentatorMiddleware, should_group_status_codes=self.should_group_status_codes, should_ignore_untemplated=self.should_ignore_untemplated, should_group_untemplated=self.should_group_untemplated, should_round_latency_decimals=self.should_round_latency_decimals, should_respect_env_var=self.should_respect_env_var, should_instrument_requests_inprogress=self.should_instrument_requests_inprogress, should_exclude_streaming_duration=self.should_exclude_streaming_duration, round_latency_decimals=self.round_latency_decimals, env_var_name=self.env_var_name, inprogress_name=self.inprogress_name, inprogress_labels=self.inprogress_labels, instrumentations=self.instrumentations, async_instrumentations=self.async_instrumentations, excluded_handlers=self.excluded_handlers, body_handlers=self.body_handlers, metric_namespace=metric_namespace, metric_subsystem=metric_subsystem, should_only_respect_2xx_for_highr=should_only_respect_2xx_for_highr, latency_highr_buckets=latency_highr_buckets, latency_lowr_buckets=latency_lowr_buckets, registry=self.registry, ) return self def expose( self, app: Starlette, should_gzip: bool = False, endpoint: str = "/metrics", include_in_schema: bool = True, tags: Optional[List[Union[str, Enum]]] = None, **kwargs: Any, ) -> "PrometheusFastApiInstrumentator": """Exposes endpoint for metrics. Args: app: App instance. Endpoint will be added to this app. This can be a Starlette app or a FastAPI app. If it is a Starlette app, `tags` `kwargs` will be ignored. should_gzip: Should the endpoint return compressed data? It will also check for `gzip` in the `Accept-Encoding` header. Compression consumes more CPU cycles. In most cases it's best to just leave this option off since network bandwidth is usually cheaper than CPU cycles. Defaults to `False`. endpoint: Endpoint on which metrics should be exposed. include_in_schema: Should the endpoint show up in the documentation? tags (List[str], optional): If you manage your routes with tags. Defaults to None. Only passed to FastAPI app. kwargs: Will be passed to app. Only passed to FastAPI app. Returns: self: Instrumentator. Builder Pattern. """ if self.should_respect_env_var and not self._should_instrumentate(): return self def metrics(request: Request) -> Response: """Endpoint that serves Prometheus metrics.""" ephemeral_registry = self.registry if "PROMETHEUS_MULTIPROC_DIR" in os.environ: ephemeral_registry = CollectorRegistry() multiprocess.MultiProcessCollector(ephemeral_registry) if should_gzip and "gzip" in request.headers.get("Accept-Encoding", ""): resp = Response( content=gzip.compress(generate_latest(ephemeral_registry)) ) resp.headers["Content-Type"] = CONTENT_TYPE_LATEST resp.headers["Content-Encoding"] = "gzip" else: resp = Response(content=generate_latest(ephemeral_registry)) resp.headers["Content-Type"] = CONTENT_TYPE_LATEST return resp route_configured = False if importlib.util.find_spec("fastapi"): from fastapi import FastAPI if isinstance(app, FastAPI): fastapi_app: FastAPI = app fastapi_app.get( endpoint, include_in_schema=include_in_schema, tags=tags, **kwargs )(metrics) route_configured = True if not route_configured: app.add_route( path=endpoint, route=metrics, include_in_schema=include_in_schema ) return self def add( self, *instrumentation_function: Optional[ Callable[[metrics.Info], Union[None, Awaitable[None]]] ], ) -> "PrometheusFastApiInstrumentator": """Adds function to list of instrumentations. Args: instrumentation_function: Function that will be executed during every request handler call (if not excluded). See above for detailed information on the interface of the function. Returns: self: Instrumentator. Builder Pattern. """ for func in instrumentation_function: if func: if asyncio.iscoroutinefunction(func): self.async_instrumentations.append( cast( Callable[[metrics.Info], Awaitable[None]], func, ) ) else: self.instrumentations.append( cast(Callable[[metrics.Info], None], func) ) return self def _should_instrumentate(self) -> bool: """Checks if instrumentation should be performed based on env var.""" return os.getenv(self.env_var_name, "False").lower() in ["true", "1"] prometheus-fastapi-instrumentator-7.1.0/src/prometheus_fastapi_instrumentator/metrics.py000066400000000000000000000721521476661573700323570ustar00rootroot00000000000000""" This module contains ready-to-use functions that can be passed on to the instrumentator instance with the `add()` method. The idea behind this is to make the types of metrics you want to export with the instrumentation easily customizable. The default instrumentation function `default` can also be found here. If your requirements are really specific or very extensive it makes sense to create your own instrumentation function instead of combining several functions from this module. """ from typing import Callable, List, Optional, Sequence, Tuple, Union from prometheus_client import REGISTRY, CollectorRegistry, Counter, Histogram, Summary from starlette.requests import Request from starlette.responses import Response # ------------------------------------------------------------------------------ class Info: def __init__( self, request: Request, response: Optional[Response], method: str, modified_handler: str, modified_status: str, modified_duration: float, modified_duration_without_streaming: float = 0.0, ): """Creates Info object that is used for instrumentation functions. This is the only argument that is passed to the instrumentation functions. Args: request (Request): Python Requests request object. response (Response or None): Python Requests response object. method (str): Unmodified method of the request. modified_handler (str): Handler representation after processing by instrumentator. For example grouped to `none` if not templated. modified_status (str): Status code representation after processing by instrumentator. For example grouping into `2xx`, `3xx` and so on. modified_duration (float): Latency representation after processing by instrumentator. For example rounding of decimals. Seconds. modified_duration_without_streaming (float): Latency between request arrival and response starts (i.e. first chunk duration). Excluding the streaming duration. Defaults to 0. """ self.request = request self.response = response self.method = method self.modified_handler = modified_handler self.modified_status = modified_status self.modified_duration = modified_duration self.modified_duration_without_streaming = modified_duration_without_streaming def _build_label_attribute_names( should_include_handler: bool, should_include_method: bool, should_include_status: bool, ) -> Tuple[List[str], List[str]]: """Builds up tuple with to be used label and attribute names. Args: should_include_handler (bool): Should the `handler` label be part of the metric? should_include_method (bool): Should the `method` label be part of the metric? should_include_status (bool): Should the `status` label be part of the metric? Returns: Tuple with two list elements. First element: List with all labels to be used. Second element: List with all attribute names to be used from the `Info` object. Done like this to enable dynamic on / off of labels. """ label_names = [] info_attribute_names = [] if should_include_handler: label_names.append("handler") info_attribute_names.append("modified_handler") if should_include_method: label_names.append("method") info_attribute_names.append("method") if should_include_status: label_names.append("status") info_attribute_names.append("modified_status") return label_names, info_attribute_names def _is_duplicated_time_series(error: ValueError) -> bool: return any( map( error.args[0].__contains__, [ "Duplicated timeseries in CollectorRegistry:", "Duplicated time series in CollectorRegistry:", ], ) ) # ------------------------------------------------------------------------------ # Instrumentation / Metrics functions def latency( metric_name: str = "http_request_duration_seconds", metric_doc: str = "Duration of HTTP requests in seconds", metric_namespace: str = "", metric_subsystem: str = "", should_include_handler: bool = True, should_include_method: bool = True, should_include_status: bool = True, should_exclude_streaming_duration: bool = False, buckets: Sequence[Union[float, str]] = Histogram.DEFAULT_BUCKETS, registry: CollectorRegistry = REGISTRY, custom_labels: dict = {}, ) -> Optional[Callable[[Info], None]]: """Default metric for the Prometheus Starlette Instrumentator. Args: metric_name (str, optional): Name of the metric to be created. Must be unique. Defaults to "http_request_duration_seconds". metric_doc (str, optional): Documentation of the metric. Defaults to "Duration of HTTP requests in seconds". metric_namespace (str, optional): Namespace of all metrics in this metric function. Defaults to "". metric_subsystem (str, optional): Subsystem of all metrics in this metric function. Defaults to "". should_include_handler: Should the `handler` label be part of the metric? Defaults to `True`. should_include_method: Should the `method` label be part of the metric? Defaults to `True`. should_include_status: Should the `status` label be part of the metric? Defaults to `True`. should_exclude_streaming_duration: Should the streaming duration be excluded? Defaults to `False`. buckets: Buckets for the histogram. Defaults to Prometheus default. Defaults to default buckets from Prometheus client library. Returns: Function that takes a single parameter `Info`. """ if buckets[-1] != float("inf"): buckets = [*buckets, float("inf")] label_names, info_attribute_names = _build_label_attribute_names( should_include_handler, should_include_method, should_include_status ) for key in custom_labels: label_names.append(key) info_attribute_names.append(key) # Starlette will call app.build_middleware_stack() with every new middleware # added, which will call all this again, which will make the registry # complain about duplicated metrics. # # The Python Prometheus client currently doesn't seem to have a way to # verify if adding a metric will cause errors or not, so the only way to # handle it seems to be with this try block. try: if label_names: METRIC = Histogram( metric_name, metric_doc, labelnames=label_names, buckets=buckets, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) else: METRIC = Histogram( metric_name, metric_doc, buckets=buckets, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) def instrumentation(info: Info) -> None: duration = info.modified_duration if should_exclude_streaming_duration: duration = info.modified_duration_without_streaming else: duration = info.modified_duration if label_names: label_values = [ getattr(info, attribute_name) for attribute_name in info_attribute_names ] METRIC.labels(*label_values).observe(duration) else: METRIC.observe(duration) return instrumentation except ValueError as e: if not _is_duplicated_time_series(e): raise e return None def request_size( metric_name: str = "http_request_size_bytes", metric_doc: str = "Content bytes of requests.", metric_namespace: str = "", metric_subsystem: str = "", should_include_handler: bool = True, should_include_method: bool = True, should_include_status: bool = True, registry: CollectorRegistry = REGISTRY, custom_labels: dict = {}, ) -> Optional[Callable[[Info], None]]: """Record the content length of incoming requests. If content length is missing 0 will be assumed. Args: metric_name (str, optional): Name of the metric to be created. Must be unique. Defaults to "http_request_size_bytes". metric_doc (str, optional): Documentation of the metric. Defaults to "Content bytes of requests.". metric_namespace (str, optional): Namespace of all metrics in this metric function. Defaults to "". metric_subsystem (str, optional): Subsystem of all metrics in this metric function. Defaults to "". should_include_handler: Should the `handler` label be part of the metric? Defaults to `True`. should_include_method: Should the `method` label be part of the metric? Defaults to `True`. should_include_status: Should the `status` label be part of the metric? Defaults to `True`. Returns: Function that takes a single parameter `Info`. """ label_names, info_attribute_names = _build_label_attribute_names( should_include_handler, should_include_method, should_include_status ) for key in custom_labels: label_names.append(key) info_attribute_names.append(key) # Starlette will call app.build_middleware_stack() with every new middleware # added, which will call all this again, which will make the registry # complain about duplicated metrics. # # The Python Prometheus client currently doesn't seem to have a way to # verify if adding a metric will cause errors or not, so the only way to # handle it seems to be with this try block. try: if label_names: METRIC = Summary( metric_name, metric_doc, labelnames=label_names, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) else: METRIC = Summary( metric_name, metric_doc, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) def instrumentation(info: Info) -> None: content_length = info.request.headers.get("Content-Length", 0) if label_names: label_values = [ getattr(info, attribute_name) for attribute_name in info_attribute_names ] METRIC.labels(*label_values).observe(int(content_length)) else: METRIC.observe(int(content_length)) return instrumentation except ValueError as e: if not _is_duplicated_time_series(e): raise e return None def response_size( metric_name: str = "http_response_size_bytes", metric_doc: str = "Content bytes of responses.", metric_namespace: str = "", metric_subsystem: str = "", should_include_handler: bool = True, should_include_method: bool = True, should_include_status: bool = True, registry: CollectorRegistry = REGISTRY, custom_labels: dict = {}, ) -> Optional[Callable[[Info], None]]: """Record the content length of outgoing responses. If content length is missing 0 will be assumed. Args: metric_name (str, optional): Name of the metric to be created. Must be unique. Defaults to "http_response_size_bytes". metric_doc (str, optional): Documentation of the metric. Defaults to "Content bytes of responses.". metric_namespace (str, optional): Namespace of all metrics in this metric function. Defaults to "". metric_subsystem (str, optional): Subsystem of all metrics in this metric function. Defaults to "". should_include_handler: Should the `handler` label be part of the metric? Defaults to `True`. should_include_method: Should the `method` label be part of the metric? Defaults to `True`. should_include_status: Should the `status` label be part of the metric? Defaults to `True`. Returns: Function that takes a single parameter `Info`. """ label_names, info_attribute_names = _build_label_attribute_names( should_include_handler, should_include_method, should_include_status ) for key in custom_labels: label_names.append(key) info_attribute_names.append(key) # Starlette will call app.build_middleware_stack() with every new middleware # added, which will call all this again, which will make the registry # complain about duplicated metrics. # # The Python Prometheus client currently doesn't seem to have a way to # verify if adding a metric will cause errors or not, so the only way to # handle it seems to be with this try block. try: if label_names: METRIC = Summary( metric_name, metric_doc, labelnames=label_names, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) else: METRIC = Summary( metric_name, metric_doc, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) def instrumentation(info: Info) -> None: if info.response and hasattr(info.response, "headers"): content_length = info.response.headers.get("Content-Length", 0) else: content_length = 0 if label_names: label_values = [ getattr(info, attribute_name) for attribute_name in info_attribute_names ] METRIC.labels(*label_values).observe(int(content_length)) else: METRIC.observe(int(content_length)) return instrumentation except ValueError as e: if not _is_duplicated_time_series(e): raise e return None def combined_size( metric_name: str = "http_combined_size_bytes", metric_doc: str = "Content bytes of requests and responses.", metric_namespace: str = "", metric_subsystem: str = "", should_include_handler: bool = True, should_include_method: bool = True, should_include_status: bool = True, registry: CollectorRegistry = REGISTRY, custom_labels: dict = {}, ) -> Optional[Callable[[Info], None]]: """Record the combined content length of requests and responses. If content length is missing 0 will be assumed. Args: metric_name (str, optional): Name of the metric to be created. Must be unique. Defaults to "http_combined_size_bytes". metric_doc (str, optional): Documentation of the metric. Defaults to "Content bytes of requests and responses.". metric_namespace (str, optional): Namespace of all metrics in this metric function. Defaults to "". metric_subsystem (str, optional): Subsystem of all metrics in this metric function. Defaults to "". should_include_handler: Should the `handler` label be part of the metric? Defaults to `True`. should_include_method: Should the `method` label be part of the metric? Defaults to `True`. should_include_status: Should the `status` label be part of the metric? Defaults to `True`. Returns: Function that takes a single parameter `Info`. """ label_names, info_attribute_names = _build_label_attribute_names( should_include_handler, should_include_method, should_include_status ) for key in custom_labels: label_names.append(key) info_attribute_names.append(key) # Starlette will call app.build_middleware_stack() with every new middleware # added, which will call all this again, which will make the registry # complain about duplicated metrics. # # The Python Prometheus client currently doesn't seem to have a way to # verify if adding a metric will cause errors or not, so the only way to # handle it seems to be with this try block. try: if label_names: METRIC = Summary( metric_name, metric_doc, labelnames=label_names, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) else: METRIC = Summary( metric_name, metric_doc, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) def instrumentation(info: Info) -> None: request_cl = info.request.headers.get("Content-Length", 0) if info.response and hasattr(info.response, "headers"): response_cl = info.response.headers.get("Content-Length", 0) else: response_cl = 0 content_length = int(request_cl) + int(response_cl) if label_names: label_values = [ getattr(info, attribute_name) for attribute_name in info_attribute_names ] METRIC.labels(*label_values).observe(int(content_length)) else: METRIC.observe(int(content_length)) return instrumentation except ValueError as e: if not _is_duplicated_time_series(e): raise e return None def requests( metric_name: str = "http_requests_total", metric_doc: str = "Total number of requests by method, status and handler.", metric_namespace: str = "", metric_subsystem: str = "", should_include_handler: bool = True, should_include_method: bool = True, should_include_status: bool = True, registry: CollectorRegistry = REGISTRY, custom_labels: dict = {}, ) -> Optional[Callable[[Info], None]]: """Record the number of requests. Args: metric_name (str, optional): Name of the metric to be created. Must be unique. Defaults to "http_requests_total". metric_doc (str, optional): Documentation of the metric. Defaults to "Total number of requests by method, status and handler.". metric_namespace (str, optional): Namespace of all metrics in this metric function. Defaults to "". metric_subsystem (str, optional): Subsystem of all metrics in this metric function. Defaults to "". should_include_handler (bool, optional): Should the `handler` label be part of the metric? Defaults to `True`. should_include_method (bool, optional): Should the `method` label be part of the metric? Defaults to `True`. should_include_status (bool, optional): Should the `status` label be part of the metric? Defaults to `True`. Returns: Function that takes a single parameter `Info`. """ label_names, info_attribute_names = _build_label_attribute_names( should_include_handler, should_include_method, should_include_status ) for key in custom_labels: label_names.append(key) info_attribute_names.append(key) # Starlette will call app.build_middleware_stack() with every new middleware # added, which will call all this again, which will make the registry # complain about duplicated metrics. # # The Python Prometheus client currently doesn't seem to have a way to # verify if adding a metric will cause errors or not, so the only way to # handle it seems to be with this try block. try: if label_names: METRIC = Counter( metric_name, metric_doc, labelnames=label_names, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) else: METRIC = Counter( metric_name, metric_doc, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) def instrumentation(info: Info) -> None: if label_names: label_values = [ getattr(info, attribute_name) for attribute_name in info_attribute_names ] METRIC.labels(*label_values).inc() else: METRIC.inc() return instrumentation except ValueError as e: if not _is_duplicated_time_series(e): raise e return None def _map_label_name_value(label_name: tuple) -> list[str]: attribute_names = [] mapping = { "handler": "modified_handler", "status": "modified_status", "duration": "modified_duration", } for item in label_name: if item in mapping: attribute_names.append(mapping[item]) else: attribute_names.append(item) return attribute_names def default( metric_namespace: str = "", metric_subsystem: str = "", should_only_respect_2xx_for_highr: bool = False, should_exclude_streaming_duration: bool = False, latency_highr_buckets: Sequence[Union[float, str]] = ( 0.01, 0.025, 0.05, 0.075, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 7.5, 10, 30, 60, ), latency_lowr_buckets: Sequence[Union[float, str]] = (0.1, 0.5, 1), registry: CollectorRegistry = REGISTRY, custom_labels: dict = {}, ) -> Optional[Callable[[Info], None]]: """Contains multiple metrics to cover multiple things. Combines several metrics into a single function. Also more efficient than multiple separate instrumentation functions that do more or less the same. You get the following: * `http_requests_total` (`handler`, `status`, `method`): Total number of requests by handler, status and method. * `http_request_size_bytes` (`handler`): Total number of incoming content length bytes by handler. * `http_response_size_bytes` (`handler`): Total number of outgoing content length bytes by handler. * `http_request_duration_highr_seconds` (no labels): High number of buckets leading to more accurate calculation of percentiles. * `http_request_duration_seconds` (`handler`, `method`): Kepp the bucket count very low. Only put in SLIs. Args: metric_namespace (str, optional): Namespace of all metrics in this metric function. Defaults to "". metric_subsystem (str, optional): Subsystem of all metrics in this metric function. Defaults to "". should_only_respect_2xx_for_highr (str, optional): Should the metric `http_request_duration_highr_seconds` only include latencies of requests / responses that have a status code starting with `2`? Defaults to `False`. should_exclude_streaming_duration: Should the streaming duration be excluded? Defaults to `False`. latency_highr_buckets (tuple[float], optional): Buckets tuple for high res histogram. Can be large because no labels are used. Defaults to (0.01, 0.025, 0.05, 0.075, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 7.5, 10, 30, 60). latency_lowr_buckets (tuple[float], optional): Buckets tuple for low res histogram. Should be very small as all possible labels are included. Defaults to `(0.1, 0.5, 1)`. Returns: Function that takes a single parameter `Info`. """ if latency_highr_buckets[-1] != float("inf"): latency_highr_buckets = [*latency_highr_buckets, float("inf")] if latency_lowr_buckets[-1] != float("inf"): latency_lowr_buckets = [*latency_lowr_buckets, float("inf")] # Starlette will call app.build_middleware_stack() with every new middleware # added, which will call all this again, which will make the registry # complain about duplicated metrics. # # The Python Prometheus client currently doesn't seem to have a way to # verify if adding a metric will cause errors or not, so the only way to # handle it seems to be with this try block. additional_label_names = tuple([key for key in custom_labels]) try: total_label_names = ( "method", "status", "handler", ) TOTAL = Counter( name="http_requests_total", documentation="Total number of requests by method, status and handler.", labelnames=total_label_names + additional_label_names, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) in_size_names = ("handler",) IN_SIZE = Summary( name="http_request_size_bytes", documentation=( "Content length of incoming requests by handler. " "Only value of header is respected. Otherwise ignored. " "No percentile calculated. " ), labelnames=in_size_names + additional_label_names, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) out_size_names = ("handler",) OUT_SIZE = Summary( name="http_response_size_bytes", documentation=( "Content length of outgoing responses by handler. " "Only value of header is respected. Otherwise ignored. " "No percentile calculated. " ), labelnames=out_size_names + additional_label_names, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) LATENCY_HIGHR = Histogram( name="http_request_duration_highr_seconds", documentation=( "Latency with many buckets but no API specific labels. " "Made for more accurate percentile calculations. " ), buckets=latency_highr_buckets, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) latency_lower_names = ( "method", "handler", ) LATENCY_LOWR = Histogram( name="http_request_duration_seconds", documentation=( "Latency with only few buckets by handler. " "Made to be only used if aggregation by handler is important. " ), buckets=latency_lowr_buckets, labelnames=latency_lower_names + additional_label_names, namespace=metric_namespace, subsystem=metric_subsystem, registry=registry, ) def instrumentation(info: Info) -> None: duration = info.modified_duration if should_exclude_streaming_duration: duration = info.modified_duration_without_streaming else: duration = info.modified_duration label_values = [ getattr(info, attribute_name) for attribute_name in _map_label_name_value(total_label_names) ] + list(custom_labels.values()) TOTAL.labels(*label_values).inc() label_values = [ getattr(info, attribute_name) for attribute_name in _map_label_name_value(in_size_names) ] + list(custom_labels.values()) IN_SIZE.labels(*label_values).observe( int(info.request.headers.get("Content-Length", 0)) ) label_values = [ getattr(info, attribute_name) for attribute_name in _map_label_name_value(out_size_names) ] + list(custom_labels.values()) if info.response and hasattr(info.response, "headers"): OUT_SIZE.labels(*label_values).observe( int(info.response.headers.get("Content-Length", 0)) ) else: OUT_SIZE.labels(*label_values).observe(0) if not should_only_respect_2xx_for_highr or info.modified_status.startswith( "2" ): LATENCY_HIGHR.observe(duration) label_values = [ getattr(info, attribute_name) for attribute_name in _map_label_name_value(latency_lower_names) ] + list(custom_labels.values()) LATENCY_LOWR.labels(*label_values).observe(duration) return instrumentation except ValueError as e: if not _is_duplicated_time_series(e): raise e return None prometheus-fastapi-instrumentator-7.1.0/src/prometheus_fastapi_instrumentator/middleware.py000066400000000000000000000226061476661573700330250ustar00rootroot00000000000000from __future__ import annotations import asyncio import re from http import HTTPStatus from timeit import default_timer from typing import Awaitable, Callable, Optional, Sequence, Tuple, Union from prometheus_client import REGISTRY, CollectorRegistry, Gauge from starlette.applications import Starlette from starlette.datastructures import Headers from starlette.requests import Request from starlette.responses import Response from starlette.types import Message, Receive, Scope, Send from prometheus_fastapi_instrumentator import metrics, routing class PrometheusInstrumentatorMiddleware: def __init__( self, app: Starlette, *, should_group_status_codes: bool = True, should_ignore_untemplated: bool = False, should_group_untemplated: bool = True, should_round_latency_decimals: bool = False, should_respect_env_var: bool = False, should_instrument_requests_inprogress: bool = False, should_exclude_streaming_duration: bool = False, excluded_handlers: Sequence[str] = (), body_handlers: Sequence[str] = (), round_latency_decimals: int = 4, env_var_name: str = "ENABLE_METRICS", inprogress_name: str = "http_requests_inprogress", inprogress_labels: bool = False, instrumentations: Sequence[Callable[[metrics.Info], None]] = (), async_instrumentations: Sequence[Callable[[metrics.Info], Awaitable[None]]] = (), metric_namespace: str = "", metric_subsystem: str = "", should_only_respect_2xx_for_highr: bool = False, latency_highr_buckets: Sequence[Union[float, str]] = ( 0.01, 0.025, 0.05, 0.075, 0.1, 0.25, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 7.5, 10, 30, 60, ), latency_lowr_buckets: Sequence[Union[float, str]] = (0.1, 0.5, 1), registry: CollectorRegistry = REGISTRY, custom_labels: dict = {}, ) -> None: self.app = app self.should_group_status_codes = should_group_status_codes self.should_ignore_untemplated = should_ignore_untemplated self.should_group_untemplated = should_group_untemplated self.should_round_latency_decimals = should_round_latency_decimals self.should_respect_env_var = should_respect_env_var self.should_instrument_requests_inprogress = should_instrument_requests_inprogress self.round_latency_decimals = round_latency_decimals self.env_var_name = env_var_name self.inprogress_name = inprogress_name self.inprogress_labels = inprogress_labels self.registry = registry self.custom_labels = custom_labels self.excluded_handlers = [re.compile(path) for path in excluded_handlers] self.body_handlers = [re.compile(path) for path in body_handlers] if instrumentations: self.instrumentations = instrumentations else: default_instrumentation = metrics.default( metric_namespace=metric_namespace, metric_subsystem=metric_subsystem, should_only_respect_2xx_for_highr=should_only_respect_2xx_for_highr, should_exclude_streaming_duration=should_exclude_streaming_duration, latency_highr_buckets=latency_highr_buckets, latency_lowr_buckets=latency_lowr_buckets, registry=self.registry, custom_labels=custom_labels, ) if default_instrumentation: self.instrumentations = [default_instrumentation] else: self.instrumentations = [] self.async_instrumentations = async_instrumentations self.inprogress: Optional[Gauge] = None if self.should_instrument_requests_inprogress: labels = ( ( "method", "handler", ) if self.inprogress_labels else () ) self.inprogress = Gauge( name=self.inprogress_name, documentation="Number of HTTP requests in progress.", labelnames=labels, multiprocess_mode="livesum", ) async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: if scope["type"] != "http": return await self.app(scope, receive, send) request = Request(scope) start_time = default_timer() handler, is_templated = self._get_handler(request) is_excluded = self._is_handler_excluded(handler, is_templated) handler = ( "none" if not is_templated and self.should_group_untemplated else handler ) if not is_excluded and self.inprogress: if self.inprogress_labels: inprogress = self.inprogress.labels(request.method, handler) else: inprogress = self.inprogress inprogress.inc() status_code = 500 headers = [] body = b"" response_start_time = None # Message body collected for handlers matching body_handlers patterns. if any(pattern.search(handler) for pattern in self.body_handlers): async def send_wrapper(message: Message) -> None: if message["type"] == "http.response.start": nonlocal status_code, headers, response_start_time headers = message["headers"] status_code = message["status"] response_start_time = default_timer() elif message["type"] == "http.response.body" and message["body"]: nonlocal body body += message["body"] await send(message) else: async def send_wrapper(message: Message) -> None: if message["type"] == "http.response.start": nonlocal status_code, headers, response_start_time headers = message["headers"] status_code = message["status"] response_start_time = default_timer() await send(message) try: await self.app(scope, receive, send_wrapper) except Exception as exc: raise exc finally: status = ( str(status_code.value) if isinstance(status_code, HTTPStatus) else str(status_code) ) if not is_excluded: duration = max(default_timer() - start_time, 0.0) duration_without_streaming = 0.0 if response_start_time: duration_without_streaming = max( response_start_time - start_time, 0.0 ) if self.should_instrument_requests_inprogress: inprogress.dec() if self.should_round_latency_decimals: duration = round(duration, self.round_latency_decimals) duration_without_streaming = round( duration_without_streaming, self.round_latency_decimals ) if self.should_group_status_codes: status = status[0] + "xx" response = Response( content=body, headers=Headers(raw=headers), status_code=status_code ) info = metrics.Info( request=request, response=response, method=request.method, modified_handler=handler, modified_status=status, modified_duration=duration, modified_duration_without_streaming=duration_without_streaming, ) for instrumentation in self.instrumentations: instrumentation(info) await asyncio.gather( *[ instrumentation(info) for instrumentation in self.async_instrumentations ] ) def _get_handler(self, request: Request) -> Tuple[str, bool]: """Extracts either template or (if no template) path. Args: request (Request): Python Requests request object. Returns: Tuple[str, bool]: Tuple with two elements. First element is either template or if no template the path. Second element tells you if the path is templated or not. """ route_name = routing.get_route_name(request) return route_name or request.url.path, True if route_name else False def _is_handler_excluded(self, handler: str, is_templated: bool) -> bool: """Determines if the handler should be ignored. Args: handler (str): Handler that handles the request. is_templated (bool): Shows if the request is templated. Returns: bool: `True` if excluded, `False` if not. """ if not is_templated and self.should_ignore_untemplated: return True if any(pattern.search(handler) for pattern in self.excluded_handlers): return True return False prometheus-fastapi-instrumentator-7.1.0/src/prometheus_fastapi_instrumentator/py.typed000066400000000000000000000000001476661573700320150ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/src/prometheus_fastapi_instrumentator/routing.py000066400000000000000000000076741476661573700324070ustar00rootroot00000000000000# BSD 3-Clause License # # Copyright (c) 2012, the Sentry Team, see AUTHORS for more details # Copyright (c) 2019, Elasticsearch BV # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE """Helper module for routing. The two functions in this module are licensed under the BSD 3-Clause License instead of the ISC License like the rest of the project. Therefore the code is contained in a dedicated module. Based on code from [elastic/apm-agent-python](https://github.com/elastic/apm-agent-python/blob/527f62c0c50842f94ef90fda079853372539319a/elasticapm/contrib/starlette/__init__.py). """ from typing import List, Optional from starlette.requests import HTTPConnection from starlette.routing import Match, Mount, Route from starlette.types import Scope def _get_route_name( scope: Scope, routes: List[Route], route_name: Optional[str] = None ) -> Optional[str]: """Gets route name for given scope taking mounts into account.""" for route in routes: match, child_scope = route.matches(scope) if match == Match.FULL: route_name = route.path child_scope = {**scope, **child_scope} if isinstance(route, Mount) and route.routes: child_route_name = _get_route_name(child_scope, route.routes, route_name) if child_route_name is None: route_name = None else: route_name += child_route_name return route_name elif match == Match.PARTIAL and route_name is None: route_name = route.path return None def get_route_name(request: HTTPConnection) -> Optional[str]: """Gets route name for given request taking mounts into account.""" app = request.app scope = request.scope routes = app.routes route_name = _get_route_name(scope, routes) # Starlette magically redirects requests if the path matches a route name # with a trailing slash appended or removed. To not spam the transaction # names list, we do the same here and put these redirects all in the # same "redirect trailing slashes" transaction name. if not route_name and app.router.redirect_slashes and scope["path"] != "/": redirect_scope = dict(scope) if scope["path"].endswith("/"): redirect_scope["path"] = scope["path"][:-1] trim = True else: redirect_scope["path"] = scope["path"] + "/" trim = False route_name = _get_route_name(redirect_scope, routes) if route_name is not None: route_name = route_name + "/" if trim else route_name[:-1] return route_name prometheus-fastapi-instrumentator-7.1.0/tests/000077500000000000000000000000001476661573700216235ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/tests/conftest.py000066400000000000000000000037561476661573700240350ustar00rootroot00000000000000import os import pathlib import shutil from typing import Optional import pytest from devtools import debug class Helpers: """Contains (possibly) useful random utilities Combined with the fixture that returns this class it is easy to share common functions across multiple tests. Simply use `helpers` as a parameter for the respective test function. Helpers should be static methods generally. """ separator = "-" * 80 should_debug = True @staticmethod def wrapped_debug(element, description: Optional[str] = None) -> None: """Calls devtools `debug` and adds horizontal lines and description. Args: element: Whatever that should be printed by devtools `debug`. description (Optional[str], optional): Description. Defaults to None. """ if Helpers.should_debug: print(f"\n{Helpers.separator}\n") if description: print(f"{description}\n") debug(element) print(f"\n{Helpers.separator}\n") @pytest.fixture def helpers(): """Fixture that returns `Helpers` class. Returns: Helpers: Helpers class. """ return Helpers FILE = __file__ @pytest.fixture def data_path(tmp_path: pathlib.Path): """Fixture that returns a temporary path with data. If the directory `data` exists, its content will be copied to a temporary location. Args: tmp_path (pathlib.Path): Path to temporary location. Returns: pathlib.Path: Path to temporary location. """ source = pathlib.Path(FILE).parent.joinpath("data") destination = tmp_path if source.is_dir(): for item in os.listdir(source): s = os.path.join(source, item) print(s) d = os.path.join(destination, item) print(d) if os.path.isdir(s): shutil.copytree(s, d, symlinks=False, ignore=None) else: shutil.copy2(s, d) return tmp_path prometheus-fastapi-instrumentator-7.1.0/tests/helpers/000077500000000000000000000000001476661573700232655ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/tests/helpers/__init__.py000066400000000000000000000000001476661573700253640ustar00rootroot00000000000000prometheus-fastapi-instrumentator-7.1.0/tests/helpers/utils.py000066400000000000000000000030041476661573700247740ustar00rootroot00000000000000import os import shutil from prometheus_client import REGISTRY def reset_collectors() -> None: """Resets collectors in the default Prometheus registry. Modifies the `REGISTRY` registry. Supposed to be called at the beginning of individual test functions. Else registry is reused across test functions and so we can run into issues like duplicate metrics or unexpected values for metrics. """ # Unregister all collectors. collectors = list(REGISTRY._collector_to_names.keys()) print(f"before unregister collectors={collectors}") for collector in collectors: REGISTRY.unregister(collector) # Import default collectors. from prometheus_client import gc_collector, platform_collector, process_collector # Re-register default collectors. process_collector.ProcessCollector() platform_collector.PlatformCollector() gc_collector.GCCollector() print(f"after re-register collectors={list(REGISTRY._collector_to_names.keys())}") def is_prometheus_multiproc_valid() -> bool: """Checks if PROMETHEUS_MULTIPROC_DIR is set and a directory.""" if "PROMETHEUS_MULTIPROC_DIR" in os.environ: pmd = os.environ["PROMETHEUS_MULTIPROC_DIR"] if os.path.isdir(pmd): return True else: return False def delete_dir_content(dirpath): for filename in os.listdir(dirpath): filepath = os.path.join(dirpath, filename) try: shutil.rmtree(filepath) except OSError: os.remove(filepath) prometheus-fastapi-instrumentator-7.1.0/tests/test_expose.py000066400000000000000000000057721476661573700245520ustar00rootroot00000000000000import asyncio from typing import Any, Dict, Optional from fastapi import FastAPI, HTTPException from starlette.applications import Starlette from starlette.responses import PlainTextResponse from starlette.routing import Route from starlette.testclient import TestClient from prometheus_fastapi_instrumentator import Instrumentator # ------------------------------------------------------------------------------ # Setup def create_fastapi_app() -> FastAPI: app = FastAPI() @app.get("/") def read_root(): return "Hello World!" @app.get("/sleep") async def sleep(seconds: float): await asyncio.sleep(seconds) return f"I have slept for {seconds}s" @app.get("/always_error") def read_always_error(): raise HTTPException(status_code=404, detail="Not really error") @app.get("/ignore") def read_ignore(): return "Should be ignored" @app.get("/items/{item_id}") def read_item(item_id: int, q: Optional[str] = None): return {"item_id": item_id, "q": q} @app.get("/just_another_endpoint") def read_just_another_endpoint(): return "Green is my pepper" @app.post("/items") def create_item(item: Dict[Any, Any]): return None return app def create_starlette_app() -> Starlette: async def homepage(request): return PlainTextResponse("Homepage") return Starlette(routes=[Route("/", endpoint=homepage)]) def reset_prometheus() -> None: from prometheus_client import REGISTRY # Unregister all collectors. collectors = list(REGISTRY._collector_to_names.keys()) print(f"before unregister collectors={collectors}") for collector in collectors: REGISTRY.unregister(collector) print(f"after unregister collectors={list(REGISTRY._collector_to_names.keys())}") # Import default collectors. from prometheus_client import gc_collector, platform_collector, process_collector # Re-register default collectors. process_collector.ProcessCollector() platform_collector.PlatformCollector() gc_collector.GCCollector() # ------------------------------------------------------------------------------ # Tests def test_expose_default_content_type(): reset_prometheus() app = create_fastapi_app() Instrumentator().instrument(app).expose(app) client = TestClient(app) response = client.get("/metrics") print(response.headers.items()) assert ( "text/plain; version=0.0.4; charset=utf-8; charset=utf-8" not in response.headers.values() ) def test_fastapi_app_expose(): reset_prometheus() app = create_fastapi_app() Instrumentator().instrument(app).expose(app) client = TestClient(app) response = client.get("/metrics") assert response.status_code == 200 def test_starlette_app_expose(): reset_prometheus() app = create_starlette_app() Instrumentator().instrument(app).expose(app) client = TestClient(app) response = client.get("/metrics") assert response.status_code == 200 prometheus-fastapi-instrumentator-7.1.0/tests/test_instrumentation.py000066400000000000000000000407411476661573700265050ustar00rootroot00000000000000import asyncio import os from http import HTTPStatus from typing import Any, Dict, Optional from fastapi import FastAPI, HTTPException from prometheus_client import CONTENT_TYPE_LATEST, REGISTRY, Info, generate_latest from requests import Response as TestClientResponse from starlette.responses import Response from starlette.testclient import TestClient from prometheus_fastapi_instrumentator import Instrumentator, metrics setattr(TestClientResponse, "__test__", False) # ------------------------------------------------------------------------------ # Setup CUSTOM_METRICS = ["http_request_duration_seconds"] def create_app() -> FastAPI: app = FastAPI() # Unregister all collectors. collectors = list(REGISTRY._collector_to_names.keys()) print(f"before unregister collectors={collectors}") for collector in collectors: REGISTRY.unregister(collector) print(f"after unregister collectors={list(REGISTRY._collector_to_names.keys())}") # Import default collectors. from prometheus_client import gc_collector, platform_collector, process_collector # Re-register default collectors. process_collector.ProcessCollector() platform_collector.PlatformCollector() gc_collector.GCCollector() print(f"after re-register collectors={list(REGISTRY._collector_to_names.keys())}") @app.get("/") def read_root(): return "Hello World!" @app.get("/sleep") async def sleep(seconds: float): await asyncio.sleep(seconds) return f"I have slept for {seconds}s" @app.get("/always_error") def read_always_error(): raise HTTPException(status_code=404, detail="Not really error") @app.get("/always_error_httpstatus_enum") def read_always_error_httpstatus_enum(): raise HTTPException( status_code=HTTPStatus.NOT_FOUND, detail="Not really an error" ) @app.get("/ignore") def read_ignore(): return "Should be ignored" @app.get("/items/{item_id}") def read_item(item_id: int, q: Optional[str] = None): return {"item_id": item_id, "q": q} @app.get("/just_another_endpoint") def read_just_another_endpoint(): return "Green is my pepper" @app.post("/items") def create_item(item: Dict[Any, Any]): return None return app def expose_metrics(app: FastAPI) -> None: if "PROMETHEUS_MULTIPROC_DIR" in os.environ: pmd = os.environ["PROMETHEUS_MULTIPROC_DIR"] print(f"Env var PROMETHEUS_MULTIPROC_DIR='{pmd}' detected.") if os.path.isdir(pmd): print(f"Env var PROMETHEUS_MULTIPROC_DIR='{pmd}' is a dir.") from prometheus_client import CollectorRegistry, multiprocess registry = CollectorRegistry() multiprocess.MultiProcessCollector(registry) else: raise ValueError(f"Env var PROMETHEUS_MULTIPROC_DIR='{pmd}' not a directory.") else: registry = REGISTRY @app.get("/metrics") def metrics(): return Response(generate_latest(registry), media_type=CONTENT_TYPE_LATEST) return registry def get_response(client: TestClient, path: str) -> TestClientResponse: response = client.get(path) print(f"\nResponse path='{path}' status='{response.status_code}':\n") for line in response.content.split(b"\n"): print(line.decode()) return response def assert_is_not_multiprocess(response: TestClientResponse) -> None: assert response.status_code == 200 assert b"Multiprocess" not in response.content assert b"# HELP process_cpu_seconds_total" in response.content def assert_request_count( expected: float, name: str = "http_request_duration_seconds_count", handler: str = "/", method: str = "GET", status: str = "2xx", ) -> None: result = REGISTRY.get_sample_value( name, {"handler": handler, "method": method, "status": status} ) print( ( f"{name} handler={handler} method={method} status={status} " f"result={result} expected={expected}" ) ) assert result == expected assert result + 1.0 != expected # ------------------------------------------------------------------------------ # Tests def test_app(): app = create_app() client = TestClient(app) response = get_response(client, "/") assert response.status_code == 200 assert b"Hello World!" in response.content response = get_response(client, "/always_error") assert response.status_code == 404 assert b"Not really error" in response.content response = get_response(client, "/always_error_httpstatus_enum") assert response.status_code == 404 assert b"Not really an error" in response.content response = get_response(client, "/items/678?q=43243") assert response.status_code == 200 assert b"678" in response.content response = get_response(client, "/items/hallo") assert response.status_code == 422 assert b"integer" in response.content response = get_response(client, "/just_another_endpoint") assert response.status_code == 200 assert b"Green" in response.content def test_metrics_endpoint_availability(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") get_response(client, "/") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(2) # ------------------------------------------------------------------------------ # Test gzip def test_gzip_accepted(): app = create_app() Instrumentator().instrument(app).expose(app, should_gzip=True) client = TestClient(app) get_response(client, "/") get_response(client, "/") response = get_response(client, "/metrics") assert response.headers["Content-Encoding"] == "gzip" assert int(response.headers["Content-Length"]) <= 2000 def test_gzip_not_accepted(): app = create_app() Instrumentator().instrument(app).expose(app, should_gzip=False) client = TestClient(app) get_response(client, "/") get_response(client, "/") response = get_response(client, "/metrics") assert response.headers.get("Content-Encoding") is None assert int(response.headers["Content-Length"]) >= 2000 # ------------------------------------------------------------------------------ # Test metric name def test_default_metric_name(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(1) assert b"http_request_duration_seconds" in response.content def test_default_without_add(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(1) assert b"http_request_duration_seconds" in response.content def test_custom_metric_name(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add( metrics.latency(metric_name="fastapi_latency") ).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(1, name="fastapi_latency_count") assert b"fastapi_latency" in response.content assert b"http_request_duration_seconds" not in response.content # ------------------------------------------------------------------------------ # Test grouping of status codes. def test_grouped_status_codes(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(1) assert b'status="2xx"' in response.content assert b'status="200"' not in response.content def test_grouped_status_codes_with_enumeration(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/always_error_httpstatus_enum") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert b'status="4xx"' in response.content assert b'status="H00"' not in response.content def test_ungrouped_status_codes(): app = create_app() Instrumentator(should_group_status_codes=False).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(1, status="200") assert b'status="2xx"' not in response.content assert b'status="200"' in response.content # ------------------------------------------------------------------------------ # Test handling of templates / untemplated. def test_ignore_untemplated(): app = create_app() Instrumentator(should_ignore_untemplated=True).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") get_response(client, "/items/678?q=43243") get_response(client, "/does_not_exist") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(1) assert b'handler="/does_not_exist"' not in response.content assert b'handler="none"' not in response.content def test_dont_ignore_untemplated_ungrouped(): app = create_app() Instrumentator(should_ignore_untemplated=False, should_group_untemplated=False).add( metrics.latency() ).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") get_response(client, "/") get_response(client, "/items/678?q=43243") get_response(client, "/does_not_exist") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(2) assert b'handler="/does_not_exist"' in response.content assert b'handler="none"' not in response.content def test_grouping_untemplated(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/") get_response(client, "/items/678?q=43243") get_response(client, "/does_not_exist") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(1) assert b'handler="/does_not_exist"' not in response.content assert b'handler="none"' in response.content def test_excluding_handlers(): app = create_app() Instrumentator(excluded_handlers=["fefefwefwe"]).add(metrics.latency()).instrument( app ) expose_metrics(app) client = TestClient(app) get_response(client, "/") get_response(client, "/metrics") get_response(client, "/fefefwefwe") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert_request_count(1) assert b'handler="/metrics"' in response.content assert b'handler="/fefefwefwe"' not in response.content assert b'handler="none"' not in response.content def test_excluding_handlers_regex(): app = create_app() Instrumentator(excluded_handlers=["^/$"]).add(metrics.latency()).instrument(app) expose_metrics(app) client = TestClient(app) get_response(client, "/ignore") get_response(client, "/ignore") get_response(client, "/") response = get_response(client, "/metrics") assert b'handler="/"' not in response.content assert b'handler="none"' not in response.content assert b'handler="/ignore"' in response.content def test_excluded_handlers_none(): app = create_app() exporter = Instrumentator(excluded_handlers=[]).add(metrics.latency()).instrument(app) expose_metrics(app) assert len(exporter.excluded_handlers) == 0 assert isinstance(exporter.excluded_handlers, list) assert exporter.excluded_handlers is not None # ------------------------------------------------------------------------------ # Test bucket without infinity. def test_bucket_without_inf(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add( metrics.latency( buckets=( 1, 2, 3, ) ) ).instrument(app).expose(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert_is_not_multiprocess(response) assert b"http_request_duration_seconds" in response.content # ------------------------------------------------------------------------------ # Test env var option. def test_should_respect_env_var_existence_exists(): app = create_app() Instrumentator(should_respect_env_var=True, env_var_name="eoioerwjioGFIUONEIO").add( metrics.latency() ).instrument(app).expose(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert response.status_code == 404 def test_should_respect_env_var_existence_not_exists(): app = create_app() os.environ["eoioerwjioGFIUONEIO"] = "true" Instrumentator(should_respect_env_var=True, env_var_name="eoioerwjioGFIUONEIO").add( metrics.latency() ).instrument(app).expose(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert response.status_code == 200 # ------------------------------------------------------------------------------ # Test decimal rounding. def calc_entropy(decimal_str: str): decimals = [int(x) for x in decimal_str] print(decimals) return sum(abs(decimals[i] - decimals[i - 1]) for i in range(len(decimals)) if i != 0) def test_entropy(): assert calc_entropy([1, 0, 0, 4, 0]) == 9 def test_default_no_rounding(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add( metrics.latency( buckets=( 1, 2, 3, ) ) ).instrument(app).expose(app) client = TestClient(app) get_response(client, "/") get_response(client, "/") get_response(client, "/") _ = get_response(client, "/metrics") result = REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {"handler": "/", "method": "GET", "status": "2xx"}, ) entropy = calc_entropy(str(result).split(".")[1][4:]) assert entropy > 15 def test_rounding(): app = create_app() Instrumentator(should_round_latency_decimals=True).add( metrics.latency( buckets=( 1, 2, 3, ) ) ).instrument(app).expose(app) client = TestClient(app) get_response(client, "/") get_response(client, "/") get_response(client, "/") _ = get_response(client, "/metrics") result = REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {"handler": "/", "method": "GET", "status": "2xx"}, ) entropy = calc_entropy(str(result).split(".")[1][4:]) assert entropy < 10 def test_custom_async_instrumentation(): app = create_app() client = TestClient(app) sync_metric = Info("sync_metric", "Documentation") async_metric = Info("async_metric", "Documentation") async def get_value(): return "X_ASYNC_X" async def async_function(x): value = await get_value() async_metric.info({"type": value}) def sync_function(_): sync_metric.info({"type": "X_SYNC_X"}) instrumentator = Instrumentator() instrumentator.add(sync_function) instrumentator.add(async_function) instrumentator.instrument(app).expose(app) get_response(client, "/") get_response(client, "/metrics") result_async = REGISTRY.get_sample_value( "async_metric_info", {"type": "X_ASYNC_X"}, ) assert result_async > 0 result_sync = REGISTRY.get_sample_value( "sync_metric_info", {"type": "X_SYNC_X"}, ) assert result_sync > 0 prometheus-fastapi-instrumentator-7.1.0/tests/test_instrumentator_expose.py000066400000000000000000000043251476661573700277210ustar00rootroot00000000000000from fastapi import FastAPI from prometheus_client import REGISTRY from requests import Response as TestClientResponse from starlette.testclient import TestClient from prometheus_fastapi_instrumentator import Instrumentator # ------------------------------------------------------------------------------ # Setup def create_app() -> FastAPI: app = FastAPI() # Unregister all collectors. collectors = list(REGISTRY._collector_to_names.keys()) print(f"before unregister collectors={collectors}") for collector in collectors: REGISTRY.unregister(collector) print(f"after unregister collectors={list(REGISTRY._collector_to_names.keys())}") # Import default collectors. from prometheus_client import gc_collector, platform_collector, process_collector # Re-register default collectors. process_collector.ProcessCollector() platform_collector.PlatformCollector() gc_collector.GCCollector() print(f"after re-register collectors={list(REGISTRY._collector_to_names.keys())}") @app.get("/") def read_root(): return "Hello World!" return app def get_response(client: TestClient, path: str) -> TestClientResponse: response = client.get(path) print(f"\nResponse path='{path}' status='{response.status_code}':\n") for line in response.content.split(b"\n"): print(line.decode()) return response # ------------------------------------------------------------------------------ # Tests def test_expose_defaults(): app = create_app() Instrumentator().instrument(app).expose(app) client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert response.status_code == 200 assert b"http_request" in response.content def test_expose_custom_path(): app = create_app() Instrumentator().instrument(app).expose(app, endpoint="/custom_metrics") client = TestClient(app) get_response(client, "/") response = get_response(client, "/metrics") assert response.status_code == 404 assert b"http_request" not in response.content response = get_response(client, "/custom_metrics") assert response.status_code == 200 assert b"http_request" in response.content prometheus-fastapi-instrumentator-7.1.0/tests/test_instrumentator_mounted_apps.py000066400000000000000000000057771476661573700311300ustar00rootroot00000000000000from fastapi import FastAPI from helpers import utils from prometheus_client import Counter from starlette.testclient import TestClient from prometheus_fastapi_instrumentator import Instrumentator, metrics def test_mounted_app_with_app(): """Tests handling of mounted app when root app is instrumented.""" utils.reset_collectors() app = FastAPI() @app.get("/app") def read_main(): return {"message": "Hello World from main app"} subapp = FastAPI() @subapp.get("/sub") def read_sub(): return {"message": "Hello World from sub API"} app.mount("/subapi", subapp) metric = Counter("test", "Test.", ("modified_handler", "handler")) def instrumentation(info: metrics.Info) -> None: metric.labels( modified_handler=info.modified_handler, handler=str(info.request.url) ).inc() Instrumentator().add(instrumentation).instrument(app).expose(app) client = TestClient(app) for url in ["/subapi/sub", "/subapi", "/app"]: print(f"GET {url} " + client.get(url).content.decode()) response = client.get("/metrics").content.decode() print("GET /metrics\n" + response) want = '{handler="http://testserver/subapi/sub",modified_handler="/subapi/sub"} 1.0\n' assert want in response want = '{handler="http://testserver/subapi",modified_handler="none"} 1.0\n' assert want in response want = '{handler="http://testserver/subapi/",modified_handler="none"} 1.0\n' assert want in response want = '{handler="http://testserver/app",modified_handler="/app"} 1.0\n' assert want in response def test_mounted_app_instrumented_only(): """Tests case when mounted app is instrumented and not root app.""" utils.reset_collectors() app = FastAPI() @app.get("/app") def read_main(): return {"message": "Hello World from main app"} subapp = FastAPI() @subapp.get("/sub") def read_sub(): return {"message": "Hello World from sub API"} app.mount("/subapi", subapp) metric = Counter("test", "Test.", ("modified_handler", "handler")) def instrumentation(info: metrics.Info) -> None: metric.labels( modified_handler=info.modified_handler, handler=str(info.request.url) ).inc() Instrumentator().add(instrumentation).instrument(subapp).expose(app) client = TestClient(app) for url in ["/subapi/sub", "/subapi", "/app"]: print(f"GET {url} " + client.get(url).content.decode()) response = client.get("/metrics").content.decode() print("GET /metrics\n" + response) # Note the modified_handler. It is relative to the instrumented subapp. want = '{handler="http://testserver/subapi/sub",modified_handler="/sub"} 1.0\n' assert want in response want = '{handler="http://testserver/subapi/",modified_handler="none"} 1.0\n' assert want in response want = '{handler="http://testserver/subapi"' assert want not in response want = '{handler="http://testserver/app"' assert want not in response prometheus-fastapi-instrumentator-7.1.0/tests/test_instrumentator_multiple_apps.py000066400000000000000000000120451476661573700312720ustar00rootroot00000000000000from fastapi import FastAPI from prometheus_client import CollectorRegistry, Counter from starlette.testclient import TestClient from prometheus_fastapi_instrumentator import Instrumentator, metrics def test_multiple_apps_custom_registry(): """ Tests instrumentation of multiple apps in combination with middlewares where each app gets it's own registry. In addition it tests that custom metrics are not shared between app's metrics endpoints. """ app1 = FastAPI() app2 = FastAPI() @app1.get("/dummy") def read_dummy_app1(): return "Hello from app1" @app2.get("/dummy") def read_dummy_app2(): return "Hello from app2" registry1 = CollectorRegistry(auto_describe=True) registry2 = CollectorRegistry(auto_describe=True) Instrumentator(registry=registry1).instrument(app1).expose(app1) Instrumentator(registry=registry2).instrument(app2).expose(app2) Counter("test_app1_only", "In app1 metrics only.", registry=registry1).inc() # Add middleware after adding the instrumentator, this triggers another # app.build_middleware_stack(), which creates the middleware again, but it # will use the same Prometheus registry again, which could try to create the # same metrics again causing duplication errors. @app1.middleware("http") @app2.middleware("http") async def dummy_middleware(request, call_next): response = await call_next(request) return response client1 = TestClient(app1) client2 = TestClient(app2) client1.get("/dummy") client2.get("/dummy") metrics1 = client1.get("/metrics").content.decode() metrics2 = client2.get("/metrics").content.decode() print("app1 GET /metrics\n" + metrics1) print("app2 GET /metrics\n" + metrics2) want = 'http_requests_total{handler="/dummy",method="GET",status="2xx"} 1.0\n' assert want in metrics1 assert want in metrics2 want = "test_app1_only_total 1.0\n" assert want in metrics1 assert want not in metrics2 def test_multiple_apps_expose_defaults(): """Tests instrumentation of multiple apps in combination with middlewares.""" app1 = FastAPI() app2 = FastAPI() @app1.get("/dummy") def read_dummy_app1(): return "Hello from app1" @app2.get("/dummy") def read_dummy_app2(): return "Hello from app2" Instrumentator().instrument(app1).expose(app1) Instrumentator().instrument(app2).expose(app2) # Add middleware after adding the instrumentator, this triggers another # app.build_middleware_stack(), which creates the middleware again, but it # will use the same Prometheus registry again, which could try to create the # same metrics again causing duplication errors. @app1.middleware("http") @app2.middleware("http") async def dummy_middleware(request, call_next): response = await call_next(request) return response client1 = TestClient(app1) client2 = TestClient(app2) client1.get("/dummy") client2.get("/dummy") metrics1 = client1.get("/metrics").content.decode() metrics2 = client2.get("/metrics").content.decode() print("app1 GET /metrics\n" + metrics1) print("app2 GET /metrics\n" + metrics2) want = 'http_requests_total{handler="/dummy",method="GET",status="2xx"} 1.0\n' assert want in metrics1 assert want in metrics2 def test_multiple_apps_expose_full(): """Tests instrumentation of multiple apps in combination with middlewares.""" app1 = FastAPI() app2 = FastAPI() @app1.get("/dummy") def read_dummy_app1(): return "Hello from app1" @app2.get("/dummy") def read_dummy_app2(): return "Hello from app2" Instrumentator().add( metrics.request_size(), metrics.requests(), metrics.combined_size(), metrics.response_size(), metrics.latency(), metrics.default(), ).instrument(app1).expose(app1) Instrumentator().add( metrics.request_size(), metrics.requests(), metrics.combined_size(), metrics.response_size(), metrics.latency(), metrics.default(), ).instrument(app2).expose(app2) # Add middleware after adding the instrumentator, this triggers another # app.build_middleware_stack(), which creates the middleware again, but it # will use the same Prometheus registry again, which could try to create the # same metrics again causing duplication errors. @app1.middleware("http") @app2.middleware("http") async def dummy_middleware(request, call_next): response = await call_next(request) return response client1 = TestClient(app1) client2 = TestClient(app2) client1.get("/dummy") client2.get("/dummy") metrics1 = client1.get("/metrics").content.decode() metrics2 = client2.get("/metrics").content.decode() print("app1 GET /metrics\n" + metrics1) print("app2 GET /metrics\n" + metrics2) want = 'http_requests_total{handler="/dummy",method="GET",status="2xx"} 1.0\n' assert want in metrics1 assert want in metrics2 prometheus-fastapi-instrumentator-7.1.0/tests/test_instrumentator_multiproc.py000066400000000000000000000151571476661573700304410ustar00rootroot00000000000000""" Testing things in multi process mode is super weird, at least to me. I don't understand how the registries in the Prometheus client library work once multi process mode is activated. For now I seem to get by trying to reset collectors, even though it does not fully reset everything. """ import asyncio from datetime import datetime import pytest from fastapi import FastAPI from helpers import utils from httpx import AsyncClient from starlette.testclient import TestClient from prometheus_fastapi_instrumentator import Instrumentator @pytest.mark.skipif( utils.is_prometheus_multiproc_valid(), reason="Environment variable must be not set in parent process.", ) def test_multiproc_dir_not_found(monkeypatch): """Tests failing early if env var is set but dir does not exist.""" monkeypatch.setenv("PROMETHEUS_MULTIPROC_DIR", "/DOES/NOT/EXIST") with pytest.raises(ValueError, match="not a directory"): Instrumentator().instrument(FastAPI()) @pytest.mark.skipif( utils.is_prometheus_multiproc_valid(), reason="Environment variable must be not set in parent process.", ) def test_multiproc_expose_no_dir(monkeypatch): """ Tests that metrics endpoint will raise exception if dir does not exist. Method expose that contains closure does not check for existence. """ app = FastAPI() instrumentator = Instrumentator() monkeypatch.setenv("PROMETHEUS_MULTIPROC_DIR", "/DOES/NOT/EXIST") instrumentator.instrument(app).expose(app) with pytest.raises(ValueError, match="env PROMETHEUS_MULTIPROC_DIR"): TestClient(app).get("/metrics") @pytest.mark.skipif( utils.is_prometheus_multiproc_valid(), reason="Environment variable must be not set in parent process.", ) def test_multiproc_anti_test(monkeypatch, tmp_path): """ Shows weird behavior of Prometheus client library. If env var is monkey patched, no errors whatsoever occur, but the metrics endpoint returns nothing. Also the internal registry contains no metrics. The moment I run this test with the env var set manually in the parent process, things start to work. """ app = FastAPI() monkeypatch.setenv("PROMETHEUS_MULTIPROC_DIR", str(tmp_path)) Instrumentator().instrument(app).expose(app) client = TestClient(app) client.get("/dummy") metrics_response = client.get("/metrics") assert metrics_response.status_code == 200 metrics_content = metrics_response.content.decode() print("GET /metrics\n" + metrics_content) assert len(metrics_content) == 0 @pytest.mark.skipif( not utils.is_prometheus_multiproc_valid(), reason="Environment variable must be set in parent process.", ) def test_multiproc_no_default_stuff(): """Tests that multi process mode is activated. It is checked indirectly by asserting that metrics that are not supported in multi process mode are not exposed by Prometheus. """ assert utils.is_prometheus_multiproc_valid() utils.reset_collectors() app = FastAPI() Instrumentator().instrument(app).expose(app) client = TestClient(app) metrics_response = client.get("/metrics") assert metrics_response.status_code == 200 metrics_content = metrics_response.content.decode() print("GET /metrics\n" + metrics_content) assert "process_open_fds" not in metrics_content @pytest.mark.skipif( not utils.is_prometheus_multiproc_valid(), reason="Environment variable must be set in parent process.", ) def test_multiproc_correct_count(): """Tests that counter metric has expected value with multi process mode.""" assert utils.is_prometheus_multiproc_valid() utils.reset_collectors() app = FastAPI() Instrumentator().instrument(app).expose(app) client = TestClient(app) @app.get("/ping") def get_ping(): return "pong" client.get("/ping") client.get("/ping") client.get("/ping") metrics_response = client.get("/metrics") assert metrics_response.status_code == 200 metrics_content = metrics_response.content.decode() print("GET /metrics\n" + metrics_content) want = 'http_requests_total{handler="/ping",method="GET",status="2xx"} 3.0\n' assert want in metrics_content @pytest.mark.skipif( not utils.is_prometheus_multiproc_valid(), reason="Environment variable must be set in parent process.", ) async def test_multiproc_inprogress_metric(): """ Tests that in-progress metric is counting correctly in multi process mode. Depends on sleeping to get metrics while other requests are still running. """ assert utils.is_prometheus_multiproc_valid() utils.reset_collectors() app = FastAPI() @app.get("/sleep") async def get_sleep(seconds: float): await asyncio.sleep(seconds) return f"Slept for {seconds}s" Instrumentator( should_instrument_requests_inprogress=True, inprogress_labels=True ).instrument(app).expose(app) async with AsyncClient(app=app, base_url="http://test") as ac: tasks = [] for i in range(3): tasks.append(asyncio.create_task(ac.get("/sleep?seconds=1"))) print("1:", datetime.utcnow()) await asyncio.sleep(0.5) print("2:", datetime.utcnow()) metrics_response = await ac.get("/metrics") await asyncio.gather(*tasks) assert metrics_response.status_code == 200 metrics_content = metrics_response.content.decode() print("3:", datetime.utcnow()) print("GET /metrics\n" + metrics_content) assert ( 'http_requests_inprogress{handler="/sleep",method="GET"} 3.0' in metrics_content ) assert ( 'http_requests_inprogress{handler="/metrics",method="GET"} 1.0' in metrics_content ) @pytest.mark.skipif( not utils.is_prometheus_multiproc_valid(), reason="Environment variable must be set in parent process.", ) def test_multiproc_no_duplicates(): """ Tests that metrics endpoint does not contain duplicate metrics. According to documentation of Prometheus client library this can happen if metrics endpoint is setup incorrectly and multi process mode is activated. Can be done by having an endpoint that uses registry created outside of function body. """ assert utils.is_prometheus_multiproc_valid() utils.reset_collectors() app = FastAPI() Instrumentator().instrument(app).expose(app) client = TestClient(app) metrics_response = client.get("/metrics") assert metrics_response.status_code == 200 metrics_content = metrics_response.content.decode() print("GET /metrics\n" + metrics_content) substring = "# TYPE http_requests_total counter" assert metrics_content.count(substring) == 1 prometheus-fastapi-instrumentator-7.1.0/tests/test_markers.py000066400000000000000000000001341476661573700246760ustar00rootroot00000000000000import pytest @pytest.mark.slow def test_slow(): pass def test_not_slow(): pass prometheus-fastapi-instrumentator-7.1.0/tests/test_metrics.py000066400000000000000000000445431476661573700247140ustar00rootroot00000000000000from typing import Any, Dict, Optional import pytest from fastapi import FastAPI, HTTPException, responses from prometheus_client import REGISTRY, Histogram from requests import Response as TestClientResponse from starlette.testclient import TestClient from prometheus_fastapi_instrumentator import Instrumentator, metrics # ------------------------------------------------------------------------------ # Setup CUSTOM_METRICS = ["http_request_duration_seconds"] def create_app() -> FastAPI: app = FastAPI() # Unregister all collectors. collectors = list(REGISTRY._collector_to_names.keys()) print(f"before unregister collectors={collectors}") for collector in collectors: REGISTRY.unregister(collector) print(f"after unregister collectors={list(REGISTRY._collector_to_names.keys())}") # Import default collectors. from prometheus_client import gc_collector, platform_collector, process_collector # Re-register default collectors. process_collector.ProcessCollector() platform_collector.PlatformCollector() gc_collector.GCCollector() print(f"after re-register collectors={list(REGISTRY._collector_to_names.keys())}") @app.get("/") def read_root(): return "Hello World!" @app.get("/always_error") def read_always_error(): raise HTTPException(status_code=404, detail="Not really error") @app.get("/ignore") def read_ignore(): return "Should be ignored" @app.get("/items/{item_id}") def read_item(item_id: int, q: Optional[str] = None): return {"item_id": item_id, "q": q} @app.get("/just_another_endpoint") def read_just_another_endpoint(): return "Green is my pepper" @app.post("/items") def create_item(item: Dict[Any, Any]): return None @app.get("/runtime_error") def always_error(): raise RuntimeError() return app def get_response(client: TestClient, path: str) -> TestClientResponse: response = client.get(path) print(f"\nResponse path='{path}' status='{response.status_code}':\n") for line in response.content.split(b"\n"): print(line.decode()) return response # ------------------------------------------------------------------------------ # Test helpers / misc def test_is_duplicated_time_series(): error = ValueError("xx Duplicated timeseries in CollectorRegistry: xx") assert metrics._is_duplicated_time_series(error) error = ValueError("xx Duplicated time series in CollectorRegistry: xx") assert metrics._is_duplicated_time_series(error) error = ValueError("xx xx") assert not metrics._is_duplicated_time_series(error) def test_existence_of_attributes(): info = metrics.Info( request=None, response=None, method=None, modified_duration=None, modified_status=None, modified_handler=None, ) assert info.request is None assert info.response is None assert info.method is None assert info.modified_duration is None assert info.modified_status is None assert info.modified_handler is None assert info.modified_duration_without_streaming == 0.0 def test_build_label_attribute_names_all_false(): label_names, info_attribute_names = metrics._build_label_attribute_names( should_include_handler=False, should_include_method=False, should_include_status=False, ) assert label_names == [] assert info_attribute_names == [] def test_build_label_attribute_names_all_true(): label_names, info_attribute_names = metrics._build_label_attribute_names( should_include_handler=True, should_include_method=True, should_include_status=True, ) assert label_names == ["handler", "method", "status"] assert info_attribute_names == [ "modified_handler", "method", "modified_status", ] def test_build_label_attribute_names_mixed(): label_names, info_attribute_names = metrics._build_label_attribute_names( should_include_handler=True, should_include_method=False, should_include_status=True, ) assert label_names == ["handler", "status"] assert info_attribute_names == ["modified_handler", "modified_status"] def test_api_throwing_error(): app = create_app() client = TestClient(app) with pytest.raises(RuntimeError): get_response(client, "/runtime_error") # ------------------------------------------------------------------------------ # request_size def test_request_size_all_labels(): app = create_app() Instrumentator().add(metrics.request_size()).instrument(app) client = TestClient(app) client.request(method="GET", url="/", content="some data") assert ( REGISTRY.get_sample_value( "http_request_size_bytes_sum", {"handler": "/", "method": "GET", "status": "2xx"}, ) == 9 ) def test_request_size_no_labels(): app = create_app() Instrumentator().add( metrics.request_size( should_include_handler=False, should_include_method=False, should_include_status=False, ) ).instrument(app) client = TestClient(app) client.request(method="GET", url="/", content="some data") assert REGISTRY.get_sample_value("http_request_size_bytes_sum", {}) == 9 def test_namespace_subsystem(): app = create_app() Instrumentator().add( metrics.request_size( should_include_handler=False, should_include_method=False, should_include_status=False, metric_namespace="namespace", metric_subsystem="subsystem", ) ).instrument(app).expose(app) client = TestClient(app) response = get_response(client, "/metrics") assert b" http_request_size_bytes" not in response.content assert b" namespace_subsystem_http_request_size_bytes" in response.content def test_request_size_no_cl(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add(metrics.request_size()).instrument( app ).expose(app) client = TestClient(app) client.get("/") response = get_response(client, "/metrics") assert b"http_request_size_bytes" in response.content assert b"http_request_size_bytes_count{" in response.content # ------------------------------------------------------------------------------ # response_size def test_response_size_all_labels(): app = create_app() Instrumentator().add(metrics.response_size()).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_response_size_bytes_sum", {"handler": "/", "method": "GET", "status": "2xx"}, ) == 14 ) def test_response_size_no_labels(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add( metrics.response_size( should_include_handler=False, should_include_method=False, should_include_status=False, ) ).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert REGISTRY.get_sample_value("http_response_size_bytes_sum", {}) == 14 def test_response_size_with_runtime_error(): app = create_app() Instrumentator().add(metrics.response_size()).instrument(app).expose(app) client = TestClient(app) try: get_response(client, "/runtime_error") except RuntimeError: pass response = get_response(client, "/metrics") assert ( b'http_response_size_bytes_count{handler="/runtime_error",method="GET",status="5xx"} 1.0' in response.content ) # ------------------------------------------------------------------------------ # combined_size def test_combined_size_all_labels(): app = create_app() Instrumentator().add(metrics.combined_size()).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_combined_size_bytes_sum", {"handler": "/", "method": "GET", "status": "2xx"}, ) == 14 ) def test_combined_size_all_labels_with_data(): app = create_app() Instrumentator().add(metrics.combined_size()).instrument(app).expose(app) client = TestClient(app) client.request(method="GET", url="/", content="fegfgeegeg") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_combined_size_bytes_sum", {"handler": "/", "method": "GET", "status": "2xx"}, ) == 24 ) def test_combined_size_no_labels(): app = create_app() Instrumentator().add( metrics.combined_size( should_include_handler=False, should_include_method=False, should_include_status=False, ) ).instrument(app) client = TestClient(app) client.get("/") assert REGISTRY.get_sample_value("http_combined_size_bytes_sum", {}) == 14 def test_combined_size_with_runtime_error(): app = create_app() Instrumentator().add(metrics.combined_size()).instrument(app).expose(app) client = TestClient(app) try: get_response(client, "/runtime_error") except RuntimeError: pass response = get_response(client, "/metrics") assert ( b'http_combined_size_bytes_count{handler="/runtime_error",method="GET",status="5xx"} 1.0' in response.content ) # ------------------------------------------------------------------------------ # latency def test_latency_all_labels(): app = create_app() Instrumentator().add(metrics.latency()).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {"handler": "/", "method": "GET", "status": "2xx"}, ) > 0 ) def test_latency_no_labels(): app = create_app() Instrumentator().add( metrics.latency( should_include_handler=False, should_include_method=False, should_include_status=False, ) ).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {}, ) > 0 ) def test_latency_with_bucket_no_inf(): app = create_app() Instrumentator().add( metrics.latency( should_include_handler=False, should_include_method=False, should_include_status=False, buckets=(1, 2, 3), ) ).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {}, ) > 0 ) def test_latency_duration_without_streaming(): _ = create_app() app = FastAPI() client = TestClient(app) @app.get("/") def root(): return responses.StreamingResponse(("x" * 1_000 for _ in range(5))) METRIC = Histogram( "http_request_duration_with_streaming_seconds", "x", ) def instrumentation(info: metrics.Info) -> None: METRIC.observe(info.modified_duration) Instrumentator().add( metrics.latency( should_include_handler=False, should_include_method=False, should_include_status=False, should_exclude_streaming_duration=True, ), instrumentation, ).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {}, ) < REGISTRY.get_sample_value( "http_request_duration_with_streaming_seconds_sum", {}, ) # ------------------------------------------------------------------------------ # default def test_default(): app = create_app() Instrumentator().add(metrics.default()).instrument(app).expose(app) client = TestClient(app) client.request(method="GET", url="/", content="fefeef") client.request(method="GET", url="/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_requests_total", {"handler": "/", "method": "GET", "status": "2xx"}, ) > 0 ) assert ( REGISTRY.get_sample_value( "http_request_size_bytes_sum", {"handler": "/"}, ) > 0 ) assert ( REGISTRY.get_sample_value( "http_response_size_bytes_sum", {"handler": "/"}, ) > 0 ) assert ( REGISTRY.get_sample_value( "http_request_duration_highr_seconds_sum", {}, ) > 0 ) assert ( REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {"handler": "/", "method": "GET"}, ) > 0 ) def test_default_should_only_respect_2xx_for_highr(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add( metrics.default(should_only_respect_2xx_for_highr=True) ).instrument(app).expose(app) client = TestClient(app) client.request(method="GET", url="/efefewffe", content="fefeef") client.request(method="GET", url="/ffe04904nfiuo-ni") response = get_response(client, "/metrics") assert b"http_request_duration_highr_seconds_count 0.0" in response.content def test_default_should_not_only_respect_2xx_for_highr(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).add( metrics.default(should_only_respect_2xx_for_highr=False) ).instrument(app).expose(app) client = TestClient(app) client.get("/efefewffe") client.get("/ffe04904nfiuo-ni") response = get_response(client, "/metrics") assert b"http_request_duration_highr_seconds_count 0.0" not in response.content assert b"http_request_duration_highr_seconds_count 2.0" in response.content def test_default_with_runtime_error(): app = create_app() Instrumentator().instrument(app).expose(app) client = TestClient(app) try: get_response(client, "/runtime_error") except RuntimeError: pass response = get_response(client, "/metrics") assert ( b'http_request_size_bytes_count{handler="/runtime_error"} 1.0' in response.content ) def test_default_duration_without_streaming(): _ = create_app() app = FastAPI() @app.get("/") def root(): return responses.StreamingResponse(("x" * 1_000 for _ in range(5))) METRIC = Histogram( "http_request_duration_with_streaming_seconds", "x", labelnames=["handler"] ) def instrumentation(info: metrics.Info) -> None: METRIC.labels(info.modified_handler).observe(info.modified_duration) Instrumentator().add( metrics.default(should_exclude_streaming_duration=True), instrumentation ).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert REGISTRY.get_sample_value( "http_request_duration_with_streaming_seconds_sum", {"handler": "/"}, ) > REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {"handler": "/", "method": "GET"}, ) def test_custom_labels(): app = create_app() Instrumentator().add( metrics.default(custom_labels={"a_custom_label": "a_custom_value"}) ).instrument(app).expose(app) client = TestClient(app) client.request(method="GET", url="/", content="foo") client.request(method="GET", url="/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_requests_total", { "handler": "/", "method": "GET", "status": "2xx", "a_custom_label": "a_custom_value", }, ) > 0 ) assert ( REGISTRY.get_sample_value( "http_request_size_bytes_sum", { "handler": "/", "a_custom_label": "a_custom_value", "a_custom_label": "a_custom_value", }, ) > 0 ) assert ( REGISTRY.get_sample_value( "http_response_size_bytes_sum", {"handler": "/", "a_custom_label": "a_custom_value"}, ) > 0 ) assert ( REGISTRY.get_sample_value( "http_request_duration_highr_seconds_sum", {}, ) > 0 ) assert ( REGISTRY.get_sample_value( "http_request_duration_seconds_sum", {"handler": "/", "method": "GET", "a_custom_label": "a_custom_value"}, ) > 0 ) # ------------------------------------------------------------------------------ # requests def test_requests_all_labels(): app = create_app() Instrumentator().add(metrics.requests()).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_requests_total", {"handler": "/", "method": "GET", "status": "2xx"}, ) == 1 ) def test_requests_no_labels(): app = create_app() Instrumentator().add( metrics.requests( should_include_handler=False, should_include_method=False, should_include_status=False, ) ).instrument(app).expose(app) client = TestClient(app) client.get("/") _ = get_response(client, "/metrics") assert ( REGISTRY.get_sample_value( "http_requests_total", {}, ) == 2 ) def test_request_custom_namespace(): app = create_app() Instrumentator(excluded_handlers=["/metrics"]).instrument( app, metric_namespace="namespace", metric_subsystem="example" ).expose(app) client = TestClient(app) client.get("/") response = get_response(client, "/metrics") assert ( b"namespace_example_http_request_duration_highr_seconds_bucket" in response.content ) prometheus-fastapi-instrumentator-7.1.0/tests/test_middleware.py000066400000000000000000000121211476661573700253460ustar00rootroot00000000000000from fastapi import FastAPI, responses, status from fastapi.testclient import TestClient from prometheus_fastapi_instrumentator import Instrumentator, metrics def test_info_body_default(): """ Tests that `info.response.body` is empty even if response body is not empty. This is the expected default that can be changed with `body_handlers`. """ app = FastAPI() client = TestClient(app) @app.get("/", response_class=responses.PlainTextResponse) def root(): return "123456789" instrumentation_executed = False def instrumentation(info: metrics.Info) -> None: nonlocal instrumentation_executed instrumentation_executed = True assert len(info.response.body) == 0 Instrumentator().instrument(app).add(instrumentation) client.get("/") assert instrumentation_executed def test_info_body_empty(): """ Tests that `info.response.body` is empty if actual response is also empty. """ app = FastAPI() client = TestClient(app) @app.get("/") def root(): return responses.Response(status_code=status.HTTP_200_OK) instrumentation_executed = False def instrumentation(info: metrics.Info) -> None: nonlocal instrumentation_executed instrumentation_executed = True assert len(info.response.body) == 0 Instrumentator(body_handlers=[r".*"]).instrument(app).add(instrumentation) client.get("/") assert instrumentation_executed def test_info_body_stream_small(): """ Tests that `info.response.body` is correct if small response is streamed. """ app = FastAPI() client = TestClient(app) @app.get("/") def root(): return responses.StreamingResponse((str(num) + "xxx" for num in range(5))) instrumentation_executed = False def instrumentation(info: metrics.Info) -> None: nonlocal instrumentation_executed instrumentation_executed = True assert len(info.response.body) == 20 assert info.response.body.decode() == "0xxx1xxx2xxx3xxx4xxx" Instrumentator(body_handlers=[r".*"]).instrument(app).add(instrumentation) response = client.get("/") assert instrumentation_executed assert len(response.content) == 20 assert response.content.decode() == "0xxx1xxx2xxx3xxx4xxx" def test_info_body_stream_large(): """ Tests that `info.response.body` is correct if large response is streamed. """ app = FastAPI() client = TestClient(app) @app.get("/") def root(): return responses.StreamingResponse(("x" * 1_000_000 for _ in range(5))) instrumentation_executed = False def instrumentation(info: metrics.Info) -> None: nonlocal instrumentation_executed instrumentation_executed = True assert len(info.response.body) == 5_000_000 Instrumentator(body_handlers=[r".*"]).instrument(app).add(instrumentation) response = client.get("/") assert instrumentation_executed assert len(response.content) == 5_000_000 def test_info_body_bulk_small(): """ Tests that `info.response.body` is correct if small response is returned. """ app = FastAPI() client = TestClient(app) @app.get("/", response_class=responses.PlainTextResponse) def root(): return "123456789" instrumentation_executed = False def instrumentation(info: metrics.Info) -> None: print(info.response.body) nonlocal instrumentation_executed instrumentation_executed = True assert len(info.response.body) == 9 assert info.response.body == b"123456789" Instrumentator(body_handlers=[r".*"]).instrument(app).add(instrumentation) response = client.get("/") assert instrumentation_executed assert len(response.content) == 9 assert response.content == b"123456789" def test_info_body_bulk_large(): """ Tests that `info.response.body` is correct if large response is returned. """ app = FastAPI() client = TestClient(app) @app.get("/", response_class=responses.PlainTextResponse) def root(): return "x" * 5_000_000 instrumentation_executed = False def instrumentation(info: metrics.Info) -> None: print(info.response.body) nonlocal instrumentation_executed instrumentation_executed = True assert len(info.response.body) == 5_000_000 Instrumentator(body_handlers=[r".*"]).instrument(app).add(instrumentation) response = client.get("/") assert instrumentation_executed assert len(response.content) == 5_000_000 def test_info_body_duration_without_streaming(): app = FastAPI() client = TestClient(app) @app.get("/") def root(): return responses.StreamingResponse(("x" * 1_000 for _ in range(5))) instrumentation_executed = False def instrumentation(info: metrics.Info) -> None: nonlocal instrumentation_executed instrumentation_executed = True assert info.modified_duration_without_streaming < info.modified_duration Instrumentator(body_handlers=[r".*"]).instrument(app).add(instrumentation) client.get("/") assert instrumentation_executed