pax_global_header 0000666 0000000 0000000 00000000064 14762515446 0014530 g ustar 00root root 0000000 0000000 52 comment=9e6cb679dfb56cb82d9a8cc91f956efd72427055
nhairs-python-json-logger-9e6cb67/ 0000775 0000000 0000000 00000000000 14762515446 0017206 5 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/.github/ 0000775 0000000 0000000 00000000000 14762515446 0020546 5 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/.github/workflows/ 0000775 0000000 0000000 00000000000 14762515446 0022603 5 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/.github/workflows/test-suite.yml 0000664 0000000 0000000 00000001334 14762515446 0025435 0 ustar 00root root 0000000 0000000 name: Test python-json-logger
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
lint:
name: "Python Lint"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v3
- name: Lint with tox
run: uvx tox -e lint
test:
name: "Python Test ${{ matrix.os }}"
needs: [lint]
runs-on: "${{ matrix.os }}"
strategy:
fail-fast: false # allow tests to run on all platforms
matrix:
os:
- ubuntu-latest
- windows-latest
- macos-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v3
- name: Test with tox
run: uvx tox
nhairs-python-json-logger-9e6cb67/.gitignore 0000664 0000000 0000000 00000000246 14762515446 0021200 0 ustar 00root root 0000000 0000000 *.pyc
*.swp
build
dist
dist_uploaded
*.egg-info
# Tests and validation
.tox/
.mypy_cache
# Python's venv
.env
.venv
env
# IDE
.vscode
.idea
# generated docs
site
nhairs-python-json-logger-9e6cb67/CODE_OF_CONDUCT.md 0000777 0000000 0000000 00000000000 14762515446 0025761 2docs/contributing.md ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/LICENSE 0000664 0000000 0000000 00000002461 14762515446 0020216 0 ustar 00root root 0000000 0000000 Copyright (c) 2011, Zakaria Zajac and the python-json-logger Contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
nhairs-python-json-logger-9e6cb67/MANIFEST.in 0000664 0000000 0000000 00000000077 14762515446 0020750 0 ustar 00root root 0000000 0000000 include LICENSE
include README.md
recursive-include tests *.py
nhairs-python-json-logger-9e6cb67/NOTICE 0000664 0000000 0000000 00000000321 14762515446 0020106 0 ustar 00root root 0000000 0000000 This software includes the following licenced software:
- mkdocstrings-python
Copyright (c) 2021, Timothée Mazzucotelli
Licenced under ISC Licence
Source: https://github.com/mkdocstrings/python
nhairs-python-json-logger-9e6cb67/README.md 0000664 0000000 0000000 00000003356 14762515446 0020474 0 ustar 00root root 0000000 0000000 [](https://pypi.python.org/pypi/python-json-logger/)
[](https://pypi.python.org/pypi/python-json-logger/)
[](https://pypi.python.org/pypi/python-json-logger/)
[](https://github.com/nhairs/python-json-logger)
[](https://github.com/nhairs/python-json-logger)

#
# Python JSON Logger
Python JSON Logger enables you produce JSON logs when using Python's `logging` package.
JSON logs are machine readable allowing for much easier parsing and ingestion into log aggregation tools.
## Documentation
- [Documentation](https://nhairs.github.io/python-json-logger/latest/)
- [Quickstart Guide](https://nhairs.github.io/python-json-logger/latest/quickstart/)
- [Change Log](https://nhairs.github.io/python-json-logger/latest/changelog/)
- [Contributing](https://nhairs.github.io/python-json-logger/latest/contributing/)
## License
This project is licensed under the BSD 2 Clause License - see [`LICENSE`](https://github.com/nhairs/python-json-logger/blob/main/LICENSE)
## Authors and Maintainers
This project was originally authored by [Zakaria Zajac](https://github.com/madzak) and our wonderful [contributors](https://github.com/nhairs/python-json-logger/graphs/contributors)
It is currently maintained by:
- [Nicholas Hairs](https://github.com/nhairs) - [nicholashairs.com](https://www.nicholashairs.com)
nhairs-python-json-logger-9e6cb67/SECURITY.md 0000777 0000000 0000000 00000000000 14762515446 0024113 2docs/security.md ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/docs/ 0000775 0000000 0000000 00000000000 14762515446 0020136 5 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/docs/changelog.md 0000664 0000000 0000000 00000022163 14762515446 0022413 0 ustar 00root root 0000000 0000000 # Change Log
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [3.3.0](https://github.com/nhairs/python-json-logger/compare/v3.2.1...v3.3.0) - 2025-03-06
### Added
- `exc_info_as_array` and `stack_info_as_array` options are added to `pythonjsonlogger.core.BaseJsonFormatter` allowing both to be encoded as list of lines instead of a single multi-line string. [#35](https://github.com/nhairs/python-json-logger/issues/35)
### Security
- Remove `msgspec-python313-pre` from `dev` dependencies preventing potential RCE. Details: [GHSA-wmxh-pxcx-9w24](https://github.com/nhairs/python-json-logger/security/advisories/GHSA-wmxh-pxcx-9w24#advisory-comment-121307)
Thanks @1hakusai1 and @omnigodz
## [3.2.1](https://github.com/nhairs/python-json-logger/compare/v3.2.0...v3.2.1) - 2024-12-16
### Fixed
- Import error on `import pythonjsonlogger.jsonlogger` [#29](https://github.com/nhairs/python-json-logger/issues/29)
## [3.2.0](https://github.com/nhairs/python-json-logger/compare/v3.1.0...v3.2.0) - 2024-12-11
### Changed
- `pythonjsonlogger.[ORJSON,MSGSPEC]_AVAILABLE` no longer imports the respective package when determining availability.
- `pythonjsonlogger.[orjson,msgspec]` now throws a `pythonjsonlogger.exception.MissingPackageError` when required libraries are not available. These contain more information about what is missing whilst still being an `ImportError`.
- `defaults` parameter is no longer ignored and now conforms to the standard library. Setting a defaults dictionary will add the specified keys if the those keys do not exist in a record or weren't passed by the `extra` parameter when logging a message.
- `typing_extensions` is only installed on Python version < 3.10.
- Support Python 3.13
- `msgspec` has only been tested against pre-release versions.
Thanks @cjwatson and @bharel
## [3.1.0](https://github.com/nhairs/python-json-logger/compare/v3.0.1...v3.1.0) - 2023-05-28
This splits common funcitonality out to allow supporting other JSON encoders. Although this is a large refactor, backwards compatibility has been maintained.
### Added
- `pythonjsonlogger.core` - more details below.
- `pythonjsonlogger.defaults` module that provides many functions for handling unsupported types.
- Orjson encoder support via `pythonjsonlogger.orjson.OrjsonFormatter` with the following additions:
- bytes are URL safe base64 encoded.
- Exceptions are "pretty printed" using the exception name and message e.g. `"ValueError: bad value passed"`
- Enum values use their value, Enum classes now return all values as a list.
- Tracebacks are supported
- Classes (aka types) are support
- Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
- MsgSpec encoder support via `pythonjsonlogger.msgspec.MsgspecFormatter` with the following additions:
- Exceptions are "pretty printed" using the exception name and message e.g. `"ValueError: bad value passed"`
- Enum classes now return all values as a list.
- Tracebacks are supported
- Classes (aka types) are support
- Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
- Note: msgspec only supprts enum values of type `int` or `str` [jcrist/msgspec#680](https://github.com/jcrist/msgspec/issues/680)
### Changed
- `pythonjsonlogger.jsonlogger` has been moved to `pythonjsonlogger.json` with core functionality moved to `pythonjsonlogger.core`.
- `pythonjsonlogger.core.BaseJsonFormatter` properly supports all `logging.Formatter` arguments:
- `fmt` is unchanged.
- `datefmt` is unchanged.
- `style` can now support non-standard arguments by setting `validate` to `False`
- `validate` allows non-standard `style` arguments or prevents calling `validate` on standard `style` arguments.
- `default` is ignored.
- `pythonjsonlogger.json.JsonFormatter` default encodings changed:
- bytes are URL safe base64 encoded.
- Exception formatting detected using `BaseException` instead of `Exception`. Now "pretty prints" the exception using the exception name and message e.g. `"ValueError: bad value passed"`
- Dataclasses are now supported
- Enum values now use their value, Enum classes now return all values as a list.
- Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
- Renaming fields now preserves order ([#7](https://github.com/nhairs/python-json-logger/issues/7)) and ignores missing fields ([#6](https://github.com/nhairs/python-json-logger/issues/6)).
- Documentation
- Generated documentation using `mkdocs` is stored in `docs/`
- Documentation within `README.md` has been moved to `docs/index.md` and `docs/qucikstart.md`.
- `CHANGELOG.md` has been moved to `docs/change-log.md`
- `SECURITY.md` has been moved and replaced with a symbolic link to `docs/security.md`.
### Deprecated
- `pythonjsonlogger.jsonlogger` is now `pythonjsonlogger.json`
- `pythonjsonlogger.jsonlogger.RESERVED_ATTRS` is now `pythonjsonlogger.core.RESERVED_ATTRS`.
- `pythonjsonlogger.jsonlogger.merge_record_extra` is now `pythonjsonlogger.core.merge_record_extra`.
### Removed
- Python 3.7 support dropped
- `pythonjsonlogger.jsonlogger.JsonFormatter._str_to_fn` replaced with `pythonjsonlogger.core.str_to_object`.
## [3.0.1](https://github.com/nhairs/python-json-logger/compare/v3.0.0...v3.0.1) - 2023-04-01
### Fixes
- Fix spelling of parameter `json_serialiser` -> `json_serializer` ([#8](https://github.com/nhairs/python-json-logger/issues/8)) - @juliangilbey
## [3.0.0](https://github.com/nhairs/python-json-logger/compare/v2.0.7...v3.0.0) - 2024-03-25
Note: using new major version to seperate changes from this fork and the original (upstream). See [#1](https://github.com/nhairs/python-json-logger/issues/1) for details.
### Changes
- Update supported Python versions - @nhairs
- Drop 3.6
- The following versions are supported and tested:
- CPython 3.7-3.12 (ubuntu, windows, mac)
- PyPy 3.7-3.10 (ubuntu, wundows, mac)
- `RESERVED_ATTRS` is now a list and version dependent
- Fix `JsonFormatter.__init__` return type (`None`) - @nhairs
- Moved to `pyproject.toml` - @nhairs
- Update linting and testing - @nhairs
- Split lint and test steps in GHA
- Use validate-pyproject, black, pylint, mypy
## [2.0.7](https://github.com/nhairs/python-json-logger/compare/v2.0.6...v2.0.7) - 2023-02-21
### Changed
- Fix inclusion of py.typed in pip packages - @sth
- Added pytest support with test file rename. Migrated to assertEqual
## [2.0.6](https://github.com/nhairs/python-json-logger/compare/v2.0.5...v2.0.6) - 2023-02-14
### Changed
- Parameter `rename_fields` in merge_record_extra is now optional - @afallou
## [2.0.5](https://github.com/nhairs/python-json-logger/compare/v2.0.4...v2.0.5) - 2023-02-12
### Added
- Allow reserved attrs to be renamed - @henkhogan
- Support added for Python 3.11
- Now verifying builds in Pypy 3.9 as well
- Type annotations are now in the package - @louis-jaris
### Changed
- Fix rename_fields for exc_info - @guilhermeferrari
- Cleaned up test file for PEP8 - @lopagela
- Cleaned up old Python 2 artifacts - @louis-jaris
- Dropped Python 3.5 support - @idomozes
- Moved type check via tox into 3.11 run only
- Added test run in Python3.6 (will keep for a little while longer, but it's EOL so upgrade)
## [2.0.4](https://github.com/nhairs/python-json-logger/compare/v2.0.3...v2.0.4) - 2022-07-11
### Changed
- Fix too strict regex for percentage style logging - @aberres
## [2.0.3](https://github.com/nhairs/python-json-logger/compare/v2.0.2...v2.0.3) - 2022-07-08
### Added
- Add PEP 561 marker/basic mypy configuration. - @bringhurst
- Workaround logging.LogRecord.msg type of string. - @bringhurst
### Changed
- Changed a link archive of the reference page in case it's down. - @ahonnecke
- Removed unnecessary try-except around OrderedDict usage - @sozofaan
- Update documentation link to json module + use https - @deronnax
- Dropped 3.5 support. - @bringhurst
## [2.0.2](https://github.com/nhairs/python-json-logger/compare/v2.0.1...v2.0.2) - 2021-07-27
### Added
- Officially supporting 3.9 - @felixonmars.
- You can now add static fields to log objects - @cosimomeli.
### Changed
- Dropped 3.4 support.
- Dropped Travis CI for Github Actions.
- Wheel should build for python 3 instead of just 3.4 now.
## [2.0.1](https://github.com/nhairs/python-json-logger/compare/v2.0.0...v2.0.1) - 2020-10-12
### Added
- Support Pypi long descripton - @ereli-cb
### Changed
- You can now rename output fields - @schlitzered
## [2.0.0](https://github.com/nhairs/python-json-logger/compare/v0.1.11...v2.0.0) - 2020-09-26
### Added
- New Changelog
- Added timezone support to timestamps - @lalten
- Refactored log record to function - @georgysavva
- Add python 3.8 support - @tommilligan
### Removed
- Support for Python 2.7
- Debian directory
## [0.1.11](https://github.com/nhairs/python-json-logger/compare/v0.1.10...v0.1.11) - 2019-03-29
### Added
- Support for Python 3.7
### Changed
- 'stack_info' flag in logging calls is now respected in JsonFormatter by [@ghShu](https://github.com/ghShu)
nhairs-python-json-logger-9e6cb67/docs/contributing.md 0000664 0000000 0000000 00000012273 14762515446 0023174 0 ustar 00root root 0000000 0000000 # Contributing
Contributions are welcome!
## Code of Conduct
In general we follow the [Python Software Foundation Code of Conduct](https://policies.python.org/python.org/code-of-conduct/). Please note that we are not affiliated with the PSF.
## Pull Request Process
**0. Before you begin**
If you're not familiar with contributing to open source software, [start by reading this guide](https://opensource.guide/how-to-contribute/).
Be aware that anything you contribute will be licenced under [the project's licence](https://github.com/nhairs/python-json-logger/blob/main/LICENSE). If you are making a change as a part of your job, be aware that your employer might own your work and you'll need their permission in order to licence the code.
### 1. Find something to work on
Where possible it's best to stick to established issues where discussion has already taken place. Contributions that haven't come from a discussed issue are less likely to be accepted.
The following are things that can be worked on without an existing issue:
- Updating documentation. This includes fixing in-code documentation / comments, and the overall docs.
- Small changes that don't change functionality such as refactoring or adding / updating tests.
### 2. Fork the repository and make your changes
We don't have styling documentation, so where possible try to match existing code. This includes the use of "headings" and "dividers" (this will make sense when you look at the code).
All devlopment tooling can be installed (usually into a virtual environment), using the `dev` optional dependency:
```shell
pip install -e '.[dev]'`
```
Before creating your pull request you'll want to format your code and run the linters and tests:
```shell
# Format
black src tests
# Lint
pylint --output-format=colorized src
mypy src tests
# Tests
pytest
```
If making changes to the documentation you can preview the changes locally using `mkdocs`. Changes to the README can be previewed using [`grip`](https://github.com/joeyespo/grip) (not included in `dev` dependencies).
```shell
mkdocs serve
```
!!! note
In general we will always squash merge pull requests so you do not need to worry about a "clean" commit history.
### 3. Checklist
Before pushing and creating your pull request, you should make sure you've done the following:
- Updated any relevant tests.
- Formatted your code and run the linters and tests.
- Updated the version number in `pyproject.toml`. In general using a `.devN` suffix is acceptable.
This is not required for changes that do no affect the code such as documentation.
- Add details of the changes to the change log (`docs/change-log.md`), creating a new section if needed.
- Add notes for new / changed features in the relevant docstring.
**4. Create your pull request**
When creating your pull request be aware that the title and description will be used for the final commit so pay attention to them.
Your pull request description should include the following:
- Why the pull request is being made
- Summary of changes
- How the pull request was tested - especially if not covered by unit testing.
Once you've submitted your pull request make sure that all CI jobs are passing. Pull requests with failing jobs will not be reviewed.
### 5. Code review
Your code will be reviewed by a maintainer.
If you're not familiar with code review start by reading [this guide](https://google.github.io/eng-practices/review/).
!!! tip "Remember you are not your work"
You might be asked to explain or justify your choices. This is not a criticism of your value as a person!
Often this is because there are multiple ways to solve the same problem and the reviewer would like to understand more about the way you solved.
## Common Topics
### Adding a new encoder
New encoders may be added, however how popular / common a library is will be taken into consideration before being added. You should open an issue before creating a pull request.
### Versioning and breaking compatability
This project uses semantic versioning.
In general backwards compatability is always preferred. This library is widely used and not particularly sophisticated and as such there must be a good reason for breaking changes.
Feature changes MUST be compatible with all [security supported versions of Python](https://endoflife.date/python) and SHOULD be compatible with all unsupported versions of Python where [recent downloads over the last 90 days exceeds 5% of all downloads](https://pypistats.org/packages/python-json-logger).
In general, only the latest `major.minor` version of Python JSON Logger is supported. Bug fixes and feature backports requiring a version branch may be considered but must be discussed with the maintainers first.
See also [Security Policy](security.md).
### Spelling
The original implementation of this project used US spelling so it will continue to use US spelling for all code.
Documentation is more flexible and may use a variety of English spellings.
### Contacting the Maintainers
In general it is preferred to keep communication to GitHub, e.g. through comments on issues and pull requests. If you do need to contact the maintainers privately, please do so using the email addresses in the maintainers section of the `pyproject.toml`.
nhairs-python-json-logger-9e6cb67/docs/cookbook.md 0000664 0000000 0000000 00000016727 14762515446 0022303 0 ustar 00root root 0000000 0000000 # Cookbook
Recipies for common tasks.
## Include all fields
By default Python JSON Logger will not include fields [defined in the standard library](https://docs.python.org/3/library/logging.html#logrecord-attributes) unless they are included in the format. Manually including all these fields is tedious and Python version specific. Instead of adding them as explicit fields, we can add them implicitly be ensuring they are not in the `reserver_attrs` argument of the formatter.
```python
all_fields_formatter = JsonFormatter(reserved_attrs=[])
```
## Custom Styles
It is possible to support custom `style`s by setting `validate=False` and overriding the `parse` method.
For example:
```python
class CommaSupport(JsonFormatter):
def parse(self) -> list[str]:
if isinstance(self._style, str) and self._style == ",":
return self._fmt.split(",")
return super().parse()
formatter = CommaSupport("message,asctime", style=",", validate=False)
```
## Modifying the logged data
You can modify the `dict` of data that will be logged by overriding the `process_log_record` method to modify fields before they are serialized to JSON.
```python
class SillyFormatter(JsonFormatter):
def process_log_record(log_record):
new_record = {k[::-1]: v for k, v in log_record.items()}
return new_record
```
## Request / Trace IDs
There are many ways to add consistent request IDs to your logging. The exact method will depend on your needs and application.
```python
## Common Setup
## -----------------------------------------------------------------------------
import logging
import uuid
from pythonjsonlogger.json import JsonFormatter
logger = logging.getLogger("test")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
logger.addHandler(handler)
```
One method would be to inject the request ID into each log call using the `extra` argument.
```python
## Solution 1
## -----------------------------------------------------------------------------
formatter = JsonFormatter()
handler.setFormatter(formatter)
def main_1():
print("========== MAIN 1 ==========")
for i in range(3):
request_id = uuid.uuid4()
logger.info("loop start", extra={"request_id": request_id})
logger.info(f"loop {i}", extra={"request_id": request_id})
logger.info("loop end", extra={"request_id": request_id})
return
main_1()
```
Another method would be to use a filter to modify the `LogRecord` attributes. This would also allow us to use it in any other standard logging machinery. For this example I've manually set a `REQUEST_ID` global and some helper functions, but you might already have stuff available to you; for example, if you're using a web-framework with baked in request IDs.
This is based on the [logging cookbook filter recipie](https://docs.python.org/3/howto/logging-cookbook.html#using-filters-to-impart-contextual-information).
```python
## Solution 2
## -----------------------------------------------------------------------------
REQUEST_ID: str | None = None
def get_request_id() -> str:
return REQUEST_ID
def generate_request_id():
global REQUEST_ID
REQUEST_ID = str(uuid.uuid4())
class RequestIdFilter(logging.Filter):
def filter(self, record):
record.record_id = get_request_id()
return True
request_id_filter = RequestIdFilter()
logger.addFilter(request_id_filter)
def main_2():
print("========== MAIN 2 ==========")
for i in range(3):
generate_request_id()
logger.info("loop start")
logger.info(f"loop {i}")
logger.info("loop end")
return
main_2()
logger.removeFilter(request_id_filter)
```
Another method would be to create a custom formatter class and override the `process_log_record` method. This allows us to inject fields into the record before we log it without modifying the original `LogRecord`.
```python
## Solution 3
## -----------------------------------------------------------------------------
# Reuse REQUEST_ID stuff from solution 2
class MyFormatter(JsonFormatter):
def process_log_record(self, log_record):
log_record["request_id"] = get_request_id()
return log_record
handler.setFormatter(MyFormatter())
def main_3():
print("========== MAIN 3 ==========")
for i in range(3):
generate_request_id()
logger.info("loop start")
logger.info(f"loop {i}")
logger.info("loop end")
return
main_3()
```
## Using `fileConfig`
To use the module with a config file using the [`fileConfig` function](https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig), use the class `pythonjsonlogger.json.JsonFormatter`. Here is a sample config file.
```ini
[loggers]
keys = root,custom
[logger_root]
handlers =
[logger_custom]
level = INFO
handlers = custom
qualname = custom
[handlers]
keys = custom
[handler_custom]
class = StreamHandler
level = INFO
formatter = json
args = (sys.stdout,)
[formatters]
keys = json
[formatter_json]
format = %(message)s
class = pythonjsonlogger.jsonlogger.JsonFormatter
```
## Logging Expensive to Compute Data
By the nature of Python's logging library, the JSON formatters will only ever run in handlers which are enabled for the given log level. This saves the performance hit of constructing JSON that is never used - but what about the data we pass into the logger? There are two options available to us: using if statements to avoid the call altogether, or using lazy string evaluation libraries.
!!! note
The below strategies will work for data passed in the `msg` and `extra` arguments.
To avoid the logging calls we use `logger.isEnabledFor` to ensure that we only start constructing our log messages if the logger is enabled:
```python
import logging
import time
from pythonjsonlogger.json import JsonFormatter
def expensive_to_compute():
time.sleep(5)
return "world"
## Setup
## -------------------------------------
logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = JsonFormatter()
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
## Log Using isEnabledFor
## -------------------------------------
start = time.time()
if logger.isEnabledFor(logging.INFO):
logger.info(
{
"data": "hello {}".format(expensive_to_compute())
}
)
print(f"Logging INFO using isEnabledFor took: {int(time.time() - start)}s")
start = time.time()
if logger.isEnabledFor(logging.DEBUG):
logger.debug(
{
"data": "hello {}".format(expensive_to_compute())
}
)
print(f"Logging DEBUG using isEnabledFor took: {int(time.time() - start)}s")
```
For lazy string evaluation we can take advantage of the fact that the default JSON encoders included in this package will call `str` on unkown objects. We can use this to build our own lazy string evaluators, or we can use an existing external package. Pre-existing solutions include: [`lazy-string`](https://pypi.org/project/lazy-string/)'s `LazyString` or [`stringlike`](https://pypi.org/project/stringlike/)'s `CachedLazyString`.
```python
## Log Using lazy-string
## -------------------------------------
from lazy_string import LazyString as L
start = time.time()
logger.info(
{
"data": L("hello {}".format, L(expensive_to_compute))
}
)
print(f"Logging INFO using LazyString took: {int(time.time() - start)}s")
start = time.time()
logger.debug(
{
"data": L("hello {}".format, L(expensive_to_compute))
}
)
print(f"Logging DEBUG using LazyString took: {int(time.time() - start)}s")
```
nhairs-python-json-logger-9e6cb67/docs/index.md 0000664 0000000 0000000 00000006532 14762515446 0021575 0 ustar 00root root 0000000 0000000 # Python JSON Logger
[](https://pypi.python.org/pypi/python-json-logger/)
[](https://pypi.python.org/pypi/python-json-logger/)
[](https://pypi.python.org/pypi/python-json-logger/)
[](https://github.com/nhairs/python-json-logger)
[](https://github.com/nhairs/python-json-logger)

## Introduction
Python JSON Logger enables you produce JSON logs when using Python's `logging` package.
JSON logs are machine readable allowing for much easier parsing and ingestion into log aggregation tools.
This library assumes that you are famliar with the `logging` standard library package; if you are not you should start by reading the official [Logging HOWTO](https://docs.python.org/3/howto/logging.html).
## Features
- **Standard Library Compatible:** Implement JSON logging without modifying your existing log setup.
- **Supports Multiple JSON Encoders:** In addition to the standard libary's `json` module, also supports the [`orjson`][pythonjsonlogger.orjson], [`msgspec`][pythonjsonlogger.msgspec] JSON encoders.
- **Fully Customizable Output Fields:** Control required, excluded, and static fields including automatically picking up custom attributes on `LogRecord` objects. Fields can be renamed before they are output.
- **Encode Any Type:** Encoders are customized to ensure that something sane is logged for any input including those that aren't supported by default. For example formatting UUID objects into their string representation and bytes objects into a base 64 encoded string.
## Quick Start
Follow our [Quickstart Guide](quickstart.md).
```python title="TLDR"
import logging
from pythonjsonlogger.json import JsonFormatter
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setFormatter(JsonFormatter())
logger.addHandler(handler)
logger.info("Logging using pythonjsonlogger!", extra={"more_data": True})
# {"message": "Logging using pythonjsonlogger!", "more_data": true}
```
## Bugs, Feature Requests etc
Please [submit an issue on github](https://github.com/nhairs/python-json-logger/issues).
In the case of bug reports, please help us help you by following best practices [^1^](https://marker.io/blog/write-bug-report/) [^2^](https://www.chiark.greenend.org.uk/~sgtatham/bugs.html).
In the case of feature requests, please provide background to the problem you are trying to solve so that we can a solution that makes the most sense for the library as well as your use case.
## License
This project is licensed under the BSD 2 Clause License - see [`LICENSE`](https://github.com/nhairs/python-json-logger/blob/main/LICENSE)
## Authors and Maintainers
This project was originally authored by [Zakaria Zajac](https://github.com/madzak) and our wonderful [contributors](https://github.com/nhairs/python-json-logger/graphs/contributors)
It is currently maintained by:
- [Nicholas Hairs](https://github.com/nhairs) - [nicholashairs.com](https://www.nicholashairs.com)
nhairs-python-json-logger-9e6cb67/docs/quickstart.md 0000664 0000000 0000000 00000010606 14762515446 0022655 0 ustar 00root root 0000000 0000000 # Quick Start
## Installation
!!! note
All versions of this fork use version `>=3.0.0`.
To use pre-fork versions use `python-json-logger<3`.
### Install via pip
```shell
pip install python-json-logger
```
### Install from GitHub
To install from [releases](https://github.com/nhairs/python-json-logger/releases) (including development releases), you can use the URL to the specific wheel.
```shell
# e.g. 3.0.0 wheel
pip install 'python-json-logger@https://github.com/nhairs/python-json-logger/releases/download/v3.0.0/python_json_logger-3.0.0-py3-none-any.whl'
```
## Usage
Python JSON Logger provides [`logging.Formatter`](https://docs.python.org/3/library/logging.html#logging.Formatter) classes that encode the logged message into JSON. Although [a variety of JSON encoders are supported](#alternate-json-encoders), the following examples will use the [JsonFormatter][pythonjsonlogger.json.JsonFormatter] which uses the the `json` module from the standard library.
### Integrating with Python's logging framework
To produce JSON output, attach the formatter to a logging handler:
```python
import logging
from pythonjsonlogger.json import JsonFormatter
logger = logging.getLogger()
logHandler = logging.StreamHandler()
formatter = JsonFormatter()
logHandler.setFormatter(formatter)
logger.addHandler(logHandler)
```
### Output fields
#### Required Fields
You can control the logged fields by setting the `fmt` argument when creating the formatter. By default formatters will follow the same `style` of `fmt` as the `logging` module: `%`, `$`, and `{`. All [`LogRecord` attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes) can be output using their name.
```python
formatter = JsonFormatter("{message}{asctime}{exc_info}", style="{")
```
#### Message Fields
Instead of logging a string message you can log using a `dict`.
```python
logger.info({
"my_data": 1,
"message": "if you don't include this it will be an empty string",
"other_stuff": False,
})
```
!!! warning
Be aware that if you log using a `dict`, other formatters may not be able to handle it.
You can also add additional message fields using the `extra` argument.
```python
logger.info(
"this logs the same additional fields as above",
extra={
"my_data": 1,
"other_stuff": False,
},
)
```
Finally, any non-standard attributes added to a `LogRecord` will also be included in the logged data. See [Cookbook: Request / Trace IDs](cookbook.md#request-trace-ids) for an example.
#### Default Fields
Default fields that are added to every log record prior to any other field can be set using the `default` argument.
```python
formatter = JsonFormatter(
defaults={"environment": "dev"}
)
# ...
logger.info("this overwrites the environment field", extras={"environment": "dev"})
```
#### Static Fields
Static fields that are added to every log record can be set using the `static_fields` argument.
```python
formatter = JsonFormatter(
static_fields={"True gets logged on every record?": True}
)
```
### Excluding fields
You can prevent fields being added to the output data by adding them to `reserved_attrs`. By default all [`LogRecord` attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes) are exluded.
```python
from pythonjsonlogger.core import RESERVED_ATTRS
formatter = JsonFormatter(
reserved_attrs=RESERVED_ATTRS+["request_id", "my_other_field"]
)
```
### Renaming fields
You can rename fields using the `rename_fields` argument.
```python
formatter = JsonFormatter(
"{message}{levelname}",
style="{",
rename_fields={"levelname": "LEVEL"},
)
```
### Custom object serialization
Most formatters support `json_default` which is used to control how objects are serialized.
```python
def my_default(obj):
if isinstance(obj, MyClass):
return {"special": obj.special}
formatter = JsonFormatter(json_default=my_default)
```
!!! note
When providing your own `json_default`, you likely want to call the original `json_default` for your encoder. Python JSON Logger provides custom default serializers for each encoder that tries very hard to ensure sane output is always logged.
### Alternate JSON Encoders
The following JSON encoders are also supported:
- [orjson](https://github.com/ijl/orjson) - [pythonjsonlogger.orjson.OrjsonFormatter][]
- [msgspec](https://github.com/jcrist/msgspec) - [pythonjsonlogger.msgspec.MsgspecFormatter][]
nhairs-python-json-logger-9e6cb67/docs/security.md 0000664 0000000 0000000 00000001062 14762515446 0022326 0 ustar 00root root 0000000 0000000 # Security Policy
## Supported Versions
Security support for Python JSON Logger is provided for all [security supported versions of Python](https://endoflife.date/python) and for unsupported versions of Python where [recent downloads over the last 90 days exceeds 5% of all downloads](https://pypistats.org/packages/python-json-logger).
As of 2024-04-24 security support is provided for Python versions `3.8+`.
## Reporting a Vulnerability
Please report vulnerabilties [using GitHub](https://github.com/nhairs/python-json-logger/security/advisories/new).
nhairs-python-json-logger-9e6cb67/mkdocs.yml 0000664 0000000 0000000 00000005320 14762515446 0021211 0 ustar 00root root 0000000 0000000 site_name: "Python JSON Logger"
site_url: https://nhairs.github.io/python-json-logger
repo_url: https://github.com/nhairs/python-json-logger
edit_uri: tree/master/docs
copyright: " Copyright © Python JSON Logger Contributors"
watch:
- mkdocs.yml
- README.md
- src/pythonjsonlogger
- docs
nav:
- "Home": index.md
- quickstart.md
- cookbook.md
- changelog.md
- security.md
- contributing.md
- API Reference:
- ... | reference/pythonjsonlogger/*
theme:
name: material
icon:
logo: material/code-braces
features:
- navigation.instant
- navigation.sections
- navigation.indexes
- navigation.expand
- navigation.top
- content.code.annotate
- content.code.copy
- toc.follow
palette:
- media: "(prefers-color-scheme: light)"
primary: amber
scheme: default
toggle:
icon: material/weather-night
name: Switch to dark mode
- media: "(prefers-color-scheme: dark)"
primary: amber
scheme: slate
toggle:
icon: material/weather-sunny
name: Switch to light mode
extra:
social:
- icon: fontawesome/brands/github
link: https://github.com/nhairs/python-json-logger
version:
provider: mike
markdown_extensions:
- toc:
permalink: "🔗"
- admonition
- def_list
- mdx_truly_sane_lists
- pymdownx.highlight:
anchor_linenums: true
- pymdownx.inlinehilite
- pymdownx.snippets
- pymdownx.superfences
- pymdownx.details
- pymdownx.caret
plugins:
- autorefs
- search:
lang: en
- awesome-pages:
collapse_single_pages: true
- gen-files:
scripts:
- scripts/gen_ref_nav.py
- mkdocstrings:
default_handler: python
handlers:
python:
paths:
- src
import:
- https://docs.python.org/3/objects.inv
# - https://mkdocstrings.github.io/objects.inv
# - https://mkdocstrings.github.io/griffe/objects.inv
options:
filters:
- "!^_"
heading_level: 1
inherited_members: true
merge_init_into_class: true
#preload_modules: []
separate_signature: true
show_root_heading: true
show_root_full_path: true
show_signature_annotations: true
show_symbol_type_heading: true
show_symbol_type_toc: true
signature_crossrefs: true
summary: true
unwrap_annotated: true
show_source: false
docstring_section_style: spacy
- literate-nav:
nav_file: SUMMARY.txt
- mike:
canonical_version: latest
nhairs-python-json-logger-9e6cb67/mypy.ini 0000664 0000000 0000000 00000000066 14762515446 0020707 0 ustar 00root root 0000000 0000000 [mypy]
[mypy-orjson.*]
ignore_missing_imports = True
nhairs-python-json-logger-9e6cb67/pylintrc 0000664 0000000 0000000 00000037357 14762515446 0021014 0 ustar 00root root 0000000 0000000 [MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-whitelist=orjson
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=0
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=raw-checker-failed,
bad-inline-option,
locally-disabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
## General Changes
# Explicit is better than implicit so allow bare returns
useless-return,
# pylint and black sometimes disagree - we always prefer black in these
# cases. Disable rules that can cause conflicts
line-too-long,
# Module docstrings are not required
missing-module-docstring,
## Project Disables
duplicate-code
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
# which contain the number of messages in each category, as well as 'statement'
# which is the total number of statements analyzed. This score is used by the
# global evaluation report (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit
[LOGGING]
# Format style used to check logging format string. `old` means using %
# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_,
e,
r,
id,
f,
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# List of decorators that change the signature of a decorated function.
signature-mutators=
[STRING]
# This flag controls whether the implicit-str-concat-in-sequence should
# generate a warning on implicit string concatenation in sequences defined over
# several lines.
check-str-concat-over-line-jumps=no
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it work,
# install the python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=LF # Force UNIX style new lines
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )??$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled).
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled).
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[DESIGN]
# Maximum number of arguments for function / method.
max-args=10
# Maximum number of attributes for a class (see R0902).
max-attributes=15
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=10
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=1
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=builtins.BaseException,
builtins.Exception
nhairs-python-json-logger-9e6cb67/pyproject.toml 0000664 0000000 0000000 00000004111 14762515446 0022117 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "python-json-logger"
version = "3.3.0"
description = "JSON Log Formatter for the Python Logging Package"
authors = [
{name = "Zakaria Zajac", email = "zak@madzak.com"},
{name = "Nicholas Hairs", email = "info+python-json-logger@nicholashairs.com"},
]
maintainers = [
{name = "Nicholas Hairs", email = "info+python-json-logger@nicholashairs.com"},
]
# Dependency Information
requires-python = ">=3.8"
dependencies = [
"typing_extensions;python_version<'3.10'",
]
# Extra information
readme = "README.md"
license = {text = "BSD-2-Clause License"}
classifiers = [
"Development Status :: 6 - Mature",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Topic :: System :: Logging",
"Typing :: Typed",
]
[project.urls]
Homepage = "https://nhairs.github.io/python-json-logger"
GitHub = "https://github.com/nhairs/python-json-logger"
[project.optional-dependencies]
dev = [
## Optional but required for dev
"orjson;implementation_name!='pypy'",
"msgspec;implementation_name!='pypy'",
## Lint
"validate-pyproject[all]",
"black",
"pylint",
"mypy",
## Test
"pytest",
"freezegun",
"backports.zoneinfo;python_version<'3.9'",
"tzdata",
## Build
"build",
## Docs
"mkdocs",
"mkdocs-material>=8.5",
"mkdocs-awesome-pages-plugin",
"mdx_truly_sane_lists",
"mkdocstrings[python]",
"mkdocs-gen-files",
"mkdocs-literate-nav",
"mike",
]
[tool.setuptools.packages.find]
where = ["src"]
include = ["pythonjsonlogger*"]
[tool.setuptools.package-data]
pythonjsonlogger = ["py.typed"]
[tool.black]
line-length = 100
nhairs-python-json-logger-9e6cb67/scripts/ 0000775 0000000 0000000 00000000000 14762515446 0020675 5 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/scripts/gen_ref_nav.py 0000664 0000000 0000000 00000002233 14762515446 0023520 0 ustar 00root root 0000000 0000000 # NOTICE: This file is from mkdocstrings-python see NOTICE for details
"""Generate the code reference pages and navigation."""
from pathlib import Path
import mkdocs_gen_files
nav = mkdocs_gen_files.Nav()
mod_symbol = '
'
for path in sorted(Path("src").rglob("*.py")):
module_path = path.relative_to("src").with_suffix("")
doc_path = path.relative_to("src").with_suffix(".md")
full_doc_path = Path("reference", doc_path)
parts = tuple(module_path.parts)
if parts[-1] == "__init__":
parts = parts[:-1]
doc_path = doc_path.with_name("index.md")
full_doc_path = full_doc_path.with_name("index.md")
elif parts[-1].startswith("_"):
continue
nav_parts = [f"{mod_symbol} {part}" for part in parts]
nav[tuple(nav_parts)] = doc_path.as_posix()
with mkdocs_gen_files.open(full_doc_path, "w") as fd:
ident = ".".join(parts)
fd.write(f"::: {ident}")
mkdocs_gen_files.set_edit_path(full_doc_path, ".." / path)
with mkdocs_gen_files.open("reference/SUMMARY.txt", "w") as nav_file:
nav_file.writelines(nav.build_literate_nav())
nhairs-python-json-logger-9e6cb67/src/ 0000775 0000000 0000000 00000000000 14762515446 0017775 5 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/ 0000775 0000000 0000000 00000000000 14762515446 0023410 5 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/__init__.py 0000664 0000000 0000000 00000000643 14762515446 0025524 0 ustar 00root root 0000000 0000000 ### IMPORTS
### ============================================================================
## Future
## Standard Library
import warnings
## Installed
## Application
from . import json
from . import utils
### CONSTANTS
### ============================================================================
ORJSON_AVAILABLE = utils.package_is_available("orjson")
MSGSPEC_AVAILABLE = utils.package_is_available("msgspec")
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/core.py 0000664 0000000 0000000 00000034375 14762515446 0024726 0 ustar 00root root 0000000 0000000 """Core functionality shared by all JSON loggers"""
### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
from datetime import datetime, timezone
import importlib
import logging
import re
import sys
from typing import Optional, Union, Callable, List, Dict, Container, Any, Sequence
if sys.version_info >= (3, 10):
from typing import TypeAlias
else:
from typing_extensions import TypeAlias
## Installed
## Application
### CONSTANTS
### ============================================================================
RESERVED_ATTRS: List[str] = [
"args",
"asctime",
"created",
"exc_info",
"exc_text",
"filename",
"funcName",
"levelname",
"levelno",
"lineno",
"module",
"msecs",
"message",
"msg",
"name",
"pathname",
"process",
"processName",
"relativeCreated",
"stack_info",
"thread",
"threadName",
]
"""Default reserved attributes.
These come from the [default attributes of `LogRecord` objects](http://docs.python.org/library/logging.html#logrecord-attributes).
Note:
Although considered a constant, this list is dependent on the Python version due to
different `LogRecord` objects having different attributes in different Python versions.
*Changed in 3.0*: `RESERVED_ATTRS` is now `list[str]` instead of `tuple[str, ...]`.
"""
if sys.version_info >= (3, 12):
# taskName added in python 3.12
RESERVED_ATTRS.append("taskName")
RESERVED_ATTRS.sort()
STYLE_STRING_TEMPLATE_REGEX = re.compile(r"\$\{(.+?)\}", re.IGNORECASE) # $ style
STYLE_STRING_FORMAT_REGEX = re.compile(r"\{(.+?)\}", re.IGNORECASE) # { style
STYLE_PERCENT_REGEX = re.compile(r"%\((.+?)\)", re.IGNORECASE) # % style
## Type Aliases
## -----------------------------------------------------------------------------
OptionalCallableOrStr: TypeAlias = Optional[Union[Callable, str]]
"""Type alias"""
LogRecord: TypeAlias = Dict[str, Any]
"""Type alias"""
### FUNCTIONS
### ============================================================================
def str_to_object(obj: Any) -> Any:
"""Import strings to an object, leaving non-strings as-is.
Args:
obj: the object or string to process
*New in 3.1*
"""
if not isinstance(obj, str):
return obj
module_name, attribute_name = obj.rsplit(".", 1)
return getattr(importlib.import_module(module_name), attribute_name)
def merge_record_extra(
record: logging.LogRecord,
target: Dict,
reserved: Container[str],
rename_fields: Optional[Dict[str, str]] = None,
) -> Dict:
"""
Merges extra attributes from LogRecord object into target dictionary
Args:
record: logging.LogRecord
target: dict to update
reserved: dict or list with reserved keys to skip
rename_fields: an optional dict, used to rename field names in the output.
e.g. Rename `levelname` to `log.level`: `{'levelname': 'log.level'}`
*Changed in 3.1*: `reserved` is now `Container[str]`.
"""
if rename_fields is None:
rename_fields = {}
for key, value in record.__dict__.items():
# this allows to have numeric keys
if key not in reserved and not (hasattr(key, "startswith") and key.startswith("_")):
target[rename_fields.get(key, key)] = value
return target
### CLASSES
### ============================================================================
class BaseJsonFormatter(logging.Formatter):
"""Base class for all formatters
Must not be used directly.
*New in 3.1*
*Changed in 3.2*: `defaults` argument is no longer ignored.
*Added in UNRELEASED*: `exc_info_as_array` and `stack_info_as_array` options are added.
"""
_style: Union[logging.PercentStyle, str] # type: ignore[assignment]
## Parent Methods
## -------------------------------------------------------------------------
# pylint: disable=too-many-arguments,super-init-not-called
def __init__(
self,
fmt: Optional[str] = None,
datefmt: Optional[str] = None,
style: str = "%",
validate: bool = True,
*,
prefix: str = "",
rename_fields: Optional[Dict[str, str]] = None,
rename_fields_keep_missing: bool = False,
static_fields: Optional[Dict[str, Any]] = None,
reserved_attrs: Optional[Sequence[str]] = None,
timestamp: Union[bool, str] = False,
defaults: Optional[Dict[str, Any]] = None,
exc_info_as_array: bool = False,
stack_info_as_array: bool = False,
) -> None:
"""
Args:
fmt: string representing fields to log
datefmt: format to use when formatting `asctime` field
style: how to extract log fields from `fmt`
validate: validate `fmt` against style, if implementing a custom `style` you
must set this to `False`.
defaults: a dictionary containing default fields that are added before all other fields and
may be overridden. The supplied fields are still subject to `rename_fields`.
prefix: an optional string prefix added at the beginning of
the formatted string
rename_fields: an optional dict, used to rename field names in the output.
Rename `message` to `@message`: `{'message': '@message'}`
rename_fields_keep_missing: When renaming fields, include missing fields in the output.
static_fields: an optional dict, used to add fields with static values to all logs
reserved_attrs: an optional list of fields that will be skipped when
outputting json log record. Defaults to [all log record attributes][pythonjsonlogger.core.RESERVED_ATTRS].
timestamp: an optional string/boolean field to add a timestamp when
outputting the json log record. If string is passed, timestamp will be added
to log record using string as key. If True boolean is passed, timestamp key
will be "timestamp". Defaults to False/off.
exc_info_as_array: break the exc_info into a list of lines based on line breaks.
stack_info_as_array: break the stack_info into a list of lines based on line breaks.
*Changed in 3.1*:
- you can now use custom values for style by setting validate to `False`.
The value is stored in `self._style` as a string. The `parse` method will need to be
overridden in order to support the new style.
- Renaming fields now preserves the order that fields were added in and avoids adding
missing fields. The original behaviour, missing fields have a value of `None`, is still
available by setting `rename_fields_keep_missing` to `True`.
"""
## logging.Formatter compatibility
## ---------------------------------------------------------------------
# Note: validate added in 3.8, defaults added in 3.10
if style in logging._STYLES:
_style = logging._STYLES[style][0](fmt) # type: ignore[operator]
if validate:
_style.validate()
self._style = _style
self._fmt = _style._fmt
elif not validate:
self._style = style
self._fmt = fmt
else:
raise ValueError(f"Style must be one of: {','.join(logging._STYLES.keys())}")
self.datefmt = datefmt
## JSON Logging specific
## ---------------------------------------------------------------------
self.prefix = prefix
self.rename_fields = rename_fields if rename_fields is not None else {}
self.rename_fields_keep_missing = rename_fields_keep_missing
self.static_fields = static_fields if static_fields is not None else {}
self.reserved_attrs = set(reserved_attrs if reserved_attrs is not None else RESERVED_ATTRS)
self.timestamp = timestamp
self._required_fields = self.parse()
self._skip_fields = set(self._required_fields)
self._skip_fields.update(self.reserved_attrs)
self.defaults = defaults if defaults is not None else {}
self.exc_info_as_array = exc_info_as_array
self.stack_info_as_array = stack_info_as_array
return
def format(self, record: logging.LogRecord) -> str:
"""Formats a log record and serializes to json
Args:
record: the record to format
"""
message_dict: Dict[str, Any] = {}
# TODO: logging.LogRecord.msg and logging.LogRecord.message in typeshed
# are always type of str. We shouldn't need to override that.
if isinstance(record.msg, dict):
message_dict = record.msg
record.message = ""
else:
record.message = record.getMessage()
# only format time if needed
if "asctime" in self._required_fields:
record.asctime = self.formatTime(record, self.datefmt)
# Display formatted exception, but allow overriding it in the
# user-supplied dict.
if record.exc_info and not message_dict.get("exc_info"):
message_dict["exc_info"] = self.formatException(record.exc_info)
if not message_dict.get("exc_info") and record.exc_text:
message_dict["exc_info"] = record.exc_text
# Display formatted record of stack frames
# default format is a string returned from :func:`traceback.print_stack`
if record.stack_info and not message_dict.get("stack_info"):
message_dict["stack_info"] = self.formatStack(record.stack_info)
log_record: LogRecord = {}
self.add_fields(log_record, record, message_dict)
log_record = self.process_log_record(log_record)
return self.serialize_log_record(log_record)
## JSON Formatter Specific Methods
## -------------------------------------------------------------------------
def parse(self) -> List[str]:
"""Parses format string looking for substitutions
This method is responsible for returning a list of fields (as strings)
to include in all log messages.
You can support custom styles by overriding this method.
Returns:
list of fields to be extracted and serialized
"""
if isinstance(self._style, logging.StringTemplateStyle):
formatter_style_pattern = STYLE_STRING_TEMPLATE_REGEX
elif isinstance(self._style, logging.StrFormatStyle):
formatter_style_pattern = STYLE_STRING_FORMAT_REGEX
elif isinstance(self._style, logging.PercentStyle):
# PercentStyle is parent class of StringTemplateStyle and StrFormatStyle
# so it must be checked last.
formatter_style_pattern = STYLE_PERCENT_REGEX
else:
raise ValueError(f"Style {self._style!r} is not supported")
if self._fmt:
return formatter_style_pattern.findall(self._fmt)
return []
def serialize_log_record(self, log_record: LogRecord) -> str:
"""Returns the final representation of the log record.
Args:
log_record: the log record
"""
return self.prefix + self.jsonify_log_record(log_record)
def add_fields(
self,
log_record: Dict[str, Any],
record: logging.LogRecord,
message_dict: Dict[str, Any],
) -> None:
"""Extract fields from a LogRecord for logging
This method can be overridden to implement custom logic for adding fields.
Args:
log_record: data that will be logged
record: the record to extract data from
message_dict: dictionary that was logged instead of a message. e.g
`logger.info({"is_this_message_dict": True})`
"""
for field in self.defaults:
log_record[self._get_rename(field)] = self.defaults[field]
for field in self._required_fields:
log_record[self._get_rename(field)] = record.__dict__.get(field)
for data_dict in [self.static_fields, message_dict]:
for key, value in data_dict.items():
log_record[self._get_rename(key)] = value
merge_record_extra(
record,
log_record,
reserved=self._skip_fields,
rename_fields=self.rename_fields,
)
if self.timestamp:
key = self.timestamp if isinstance(self.timestamp, str) else "timestamp"
log_record[self._get_rename(key)] = datetime.fromtimestamp(
record.created, tz=timezone.utc
)
if self.rename_fields_keep_missing:
for field in self.rename_fields.values():
if field not in log_record:
log_record[field] = None
return
def _get_rename(self, key: str) -> str:
return self.rename_fields.get(key, key)
# Child Methods
# ..........................................................................
def jsonify_log_record(self, log_record: LogRecord) -> str:
"""Convert this log record into a JSON string.
Child classes MUST override this method.
Args:
log_record: the data to serialize
"""
raise NotImplementedError()
def process_log_record(self, log_record: LogRecord) -> LogRecord:
"""Custom processing of the log record.
Child classes can override this method to alter the log record before it
is serialized.
Args:
log_record: incoming data
"""
return log_record
def formatException(self, ei) -> Union[str, list[str]]: # type: ignore
"""Format and return the specified exception information.
If exc_info_as_array is set to True, This method returns an array of strings.
"""
exception_info_str = super().formatException(ei)
return exception_info_str.splitlines() if self.exc_info_as_array else exception_info_str
def formatStack(self, stack_info) -> Union[str, list[str]]: # type: ignore
"""Format and return the specified stack information.
If stack_info_as_array is set to True, This method returns an array of strings.
"""
stack_info_str = super().formatStack(stack_info)
return stack_info_str.splitlines() if self.stack_info_as_array else stack_info_str
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/defaults.py 0000664 0000000 0000000 00000014661 14762515446 0025601 0 ustar 00root root 0000000 0000000 """Collection of functions for building custom `json_default` functions.
In general functions come in pairs of `use_x_default` and `x_default`, where the former is used
to determine if you should call the latter.
Most `use_x_default` functions also act as a [`TypeGuard`](https://mypy.readthedocs.io/en/stable/type_narrowing.html#user-defined-type-guards).
"""
### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
import base64
import dataclasses
import datetime
import enum
import sys
from types import TracebackType
from typing import Any
import traceback
import uuid
if sys.version_info >= (3, 10):
from typing import TypeGuard
else:
from typing_extensions import TypeGuard
## Installed
## Application
### FUNCTIONS
### ============================================================================
def unknown_default(obj: Any) -> str:
"""Backup default function for any object type.
Will attempt to use `str` or `repr`. If both functions error will return
the string `"__could_not_encode__"`.
Args:
obj: object to handle
"""
try:
return str(obj)
except Exception: # pylint: disable=broad-exception-caught
pass
try:
return repr(obj)
except Exception: # pylint: disable=broad-exception-caught
pass
return "__could_not_encode__"
## Types
## -----------------------------------------------------------------------------
def use_type_default(obj: Any) -> TypeGuard[type]:
"""Default check function for `type` objects (aka classes)."""
return isinstance(obj, type)
def type_default(obj: type) -> str:
"""Default function for `type` objects.
Args:
obj: object to handle
"""
return obj.__name__
## Dataclasses
## -----------------------------------------------------------------------------
def use_dataclass_default(obj: Any) -> bool:
"""Default check function for dataclass instances"""
return dataclasses.is_dataclass(obj) and not isinstance(obj, type)
def dataclass_default(obj) -> dict[str, Any]:
"""Default function for dataclass instances
Args:
obj: object to handle
"""
return dataclasses.asdict(obj)
## Dates and Times
## -----------------------------------------------------------------------------
def use_time_default(obj: Any) -> TypeGuard[datetime.time]:
"""Default check function for `datetime.time` instances"""
return isinstance(obj, datetime.time)
def time_default(obj: datetime.time) -> str:
"""Default function for `datetime.time` instances
Args:
obj: object to handle
"""
return obj.isoformat()
def use_date_default(obj: Any) -> TypeGuard[datetime.date]:
"""Default check function for `datetime.date` instances"""
return isinstance(obj, datetime.date)
def date_default(obj: datetime.date) -> str:
"""Default function for `datetime.date` instances
Args:
obj: object to handle
"""
return obj.isoformat()
def use_datetime_default(obj: Any) -> TypeGuard[datetime.datetime]:
"""Default check function for `datetime.datetime` instances"""
return isinstance(obj, datetime.datetime)
def datetime_default(obj: datetime.datetime) -> str:
"""Default function for `datetime.datetime` instances
Args:
obj: object to handle
"""
return obj.isoformat()
def use_datetime_any(obj: Any) -> TypeGuard[datetime.time | datetime.date | datetime.datetime]:
"""Default check function for `datetime` related instances"""
return isinstance(obj, (datetime.time, datetime.date, datetime.datetime))
def datetime_any(obj: datetime.time | datetime.date | datetime.date) -> str:
"""Default function for `datetime` related instances
Args:
obj: object to handle
"""
return obj.isoformat()
## Exception and Tracebacks
## -----------------------------------------------------------------------------
def use_exception_default(obj: Any) -> TypeGuard[BaseException]:
"""Default check function for exception instances.
Exception classes are not treated specially and should be handled by the
`[use_]type_default` functions.
"""
return isinstance(obj, BaseException)
def exception_default(obj: BaseException) -> str:
"""Default function for exception instances
Args:
obj: object to handle
"""
return f"{obj.__class__.__name__}: {obj}"
def use_traceback_default(obj: Any) -> TypeGuard[TracebackType]:
"""Default check function for tracebacks"""
return isinstance(obj, TracebackType)
def traceback_default(obj: TracebackType) -> str:
"""Default function for tracebacks
Args:
obj: object to handle
"""
return "".join(traceback.format_tb(obj)).strip()
## Enums
## -----------------------------------------------------------------------------
def use_enum_default(obj: Any) -> TypeGuard[enum.Enum | enum.EnumMeta]:
"""Default check function for enums.
Supports both enum classes and enum values.
"""
return isinstance(obj, (enum.Enum, enum.EnumMeta))
def enum_default(obj: enum.Enum | enum.EnumMeta) -> Any | list[Any]:
"""Default function for enums.
Supports both enum classes and enum values.
Args:
obj: object to handle
"""
if isinstance(obj, enum.Enum):
return obj.value
return [e.value for e in obj] # type: ignore[var-annotated]
## UUIDs
## -----------------------------------------------------------------------------
def use_uuid_default(obj: Any) -> TypeGuard[uuid.UUID]:
"""Default check function for `uuid.UUID` instances"""
return isinstance(obj, uuid.UUID)
def uuid_default(obj: uuid.UUID) -> str:
"""Default function for `uuid.UUID` instances
Formats the UUID using "hyphen" format.
Args:
obj: object to handle
"""
return str(obj)
## Bytes
## -----------------------------------------------------------------------------
def use_bytes_default(obj: Any) -> TypeGuard[bytes | bytearray]:
"""Default check function for bytes"""
return isinstance(obj, (bytes, bytearray))
def bytes_default(obj: bytes | bytearray, url_safe: bool = True) -> str:
"""Default function for bytes
Args:
obj: object to handle
url_safe: use URL safe base 64 character set.
Returns:
The byte data as a base 64 string.
"""
if url_safe:
return base64.urlsafe_b64encode(obj).decode("utf8")
return base64.b64encode(obj).decode("utf8")
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/exception.py 0000664 0000000 0000000 00000001444 14762515446 0025763 0 ustar 00root root 0000000 0000000 ### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
## Installed
## Application
### CLASSES
### ============================================================================
class PythonJsonLoggerError(Exception):
"Generic base clas for all Python JSON Logger exceptions"
class MissingPackageError(ImportError, PythonJsonLoggerError):
"A required package is missing"
def __init__(self, name: str, extras_name: str | None = None) -> None:
msg = f"The {name!r} package is required but could not be found."
if extras_name is not None:
msg += f" It can be installed using 'python-json-logger[{extras_name}]'."
super().__init__(msg)
return
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/json.py 0000664 0000000 0000000 00000010105 14762515446 0024730 0 ustar 00root root 0000000 0000000 """JSON formatter using the standard library's `json` for encoding.
Module contains the `JsonFormatter` and a custom `JsonEncoder` which supports a greater
variety of types.
"""
### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
import datetime
import json
from typing import Any, Callable, Optional, Union
import warnings
## Application
from . import core
from . import defaults as d
### CLASSES
### ============================================================================
class JsonEncoder(json.JSONEncoder):
"""A custom encoder extending [json.JSONEncoder](https://docs.python.org/3/library/json.html#json.JSONEncoder)"""
def default(self, o: Any) -> Any:
if d.use_datetime_any(o):
return self.format_datetime_obj(o)
if d.use_exception_default(o):
return d.exception_default(o)
if d.use_traceback_default(o):
return d.traceback_default(o)
if d.use_enum_default(o):
return d.enum_default(o)
if d.use_bytes_default(o):
return d.bytes_default(o)
if d.use_dataclass_default(o):
return d.dataclass_default(o)
if d.use_type_default(o):
return d.type_default(o)
try:
return super().default(o)
except TypeError:
return d.unknown_default(o)
def format_datetime_obj(self, o: datetime.time | datetime.date | datetime.datetime) -> str:
"""Format datetime objects found in `self.default`
This allows subclasses to change the datetime format without understanding the
internals of the default method.
"""
return d.datetime_any(o)
class JsonFormatter(core.BaseJsonFormatter):
"""JSON formatter using the standard library's [`json`](https://docs.python.org/3/library/json.html) for encoding"""
def __init__(
self,
*args,
json_default: core.OptionalCallableOrStr = None,
json_encoder: core.OptionalCallableOrStr = None,
json_serializer: Union[Callable, str] = json.dumps,
json_indent: Optional[Union[int, str]] = None,
json_ensure_ascii: bool = True,
**kwargs,
) -> None:
"""
Args:
args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
json_default: a function for encoding non-standard objects
json_encoder: custom JSON encoder
json_serializer: a [`json.dumps`](https://docs.python.org/3/library/json.html#json.dumps)-compatible callable
that will be used to serialize the log record.
json_indent: indent parameter for the `json_serializer`
json_ensure_ascii: `ensure_ascii` parameter for the `json_serializer`
kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
"""
super().__init__(*args, **kwargs)
self.json_default = core.str_to_object(json_default)
self.json_encoder = core.str_to_object(json_encoder)
self.json_serializer = core.str_to_object(json_serializer)
self.json_indent = json_indent
self.json_ensure_ascii = json_ensure_ascii
if not self.json_encoder and not self.json_default:
self.json_encoder = JsonEncoder
return
def jsonify_log_record(self, log_record: core.LogRecord) -> str:
"""Returns a json string of the log record."""
return self.json_serializer(
log_record,
default=self.json_default,
cls=self.json_encoder,
indent=self.json_indent,
ensure_ascii=self.json_ensure_ascii,
)
### DEPRECATED COMPATIBILITY
### ============================================================================
def __getattr__(name: str):
if name == "RESERVED_ATTRS":
warnings.warn(
"RESERVED_ATTRS has been moved to pythonjsonlogger.core",
DeprecationWarning,
)
return core.RESERVED_ATTRS
raise AttributeError(f"module {__name__} has no attribute {name}")
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/jsonlogger.py 0000664 0000000 0000000 00000000641 14762515446 0026134 0 ustar 00root root 0000000 0000000 """Stub module retained for compatibility.
It retains access to old names whilst sending deprecation warnings.
"""
# pylint: disable=wrong-import-position,unused-import
import warnings
## Throw warning
warnings.warn(
"pythonjsonlogger.jsonlogger has been moved to pythonjsonlogger.json",
DeprecationWarning,
)
## Import names
from .json import JsonFormatter, JsonEncoder
from .core import RESERVED_ATTRS
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/msgspec.py 0000664 0000000 0000000 00000004161 14762515446 0025425 0 ustar 00root root 0000000 0000000 """JSON Formatter using [`msgspec`](https://github.com/jcrist/msgspec)"""
### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
from typing import Any
## Installed
## Application
from . import core
from . import defaults as d
from .utils import package_is_available
# We import msgspec after checking it is available
package_is_available("msgspec", throw_error=True)
import msgspec.json # pylint: disable=wrong-import-position,wrong-import-order
### FUNCTIONS
### ============================================================================
def msgspec_default(obj: Any) -> Any:
"""msgspec default encoder function for non-standard types"""
if d.use_exception_default(obj):
return d.exception_default(obj)
if d.use_traceback_default(obj):
return d.traceback_default(obj)
if d.use_enum_default(obj):
return d.enum_default(obj)
if d.use_type_default(obj):
return d.type_default(obj)
return d.unknown_default(obj)
### CLASSES
### ============================================================================
class MsgspecFormatter(core.BaseJsonFormatter):
"""JSON formatter using [`msgspec.json.Encoder`](https://jcristharif.com/msgspec/api.html#msgspec.json.Encoder) for encoding."""
def __init__(
self,
*args,
json_default: core.OptionalCallableOrStr = msgspec_default,
**kwargs,
) -> None:
"""
Args:
args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
json_default: a function for encoding non-standard objects
kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
"""
super().__init__(*args, **kwargs)
self.json_default = core.str_to_object(json_default)
self._encoder = msgspec.json.Encoder(enc_hook=self.json_default)
return
def jsonify_log_record(self, log_record: core.LogRecord) -> str:
"""Returns a json string of the log record."""
return self._encoder.encode(log_record).decode("utf8")
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/orjson.py 0000664 0000000 0000000 00000004465 14762515446 0025305 0 ustar 00root root 0000000 0000000 """JSON Formatter using [orjson](https://github.com/ijl/orjson)"""
### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
from typing import Any
## Installed
## Application
from . import core
from . import defaults as d
from .utils import package_is_available
# We import msgspec after checking it is available
package_is_available("orjson", throw_error=True)
import orjson # pylint: disable=wrong-import-position,wrong-import-order
### FUNCTIONS
### ============================================================================
def orjson_default(obj: Any) -> Any:
"""orjson default encoder function for non-standard types"""
if d.use_exception_default(obj):
return d.exception_default(obj)
if d.use_traceback_default(obj):
return d.traceback_default(obj)
if d.use_bytes_default(obj):
return d.bytes_default(obj)
if d.use_enum_default(obj):
return d.enum_default(obj)
if d.use_type_default(obj):
return d.type_default(obj)
return d.unknown_default(obj)
### CLASSES
### ============================================================================
class OrjsonFormatter(core.BaseJsonFormatter):
"""JSON formatter using [orjson](https://github.com/ijl/orjson) for encoding."""
def __init__(
self,
*args,
json_default: core.OptionalCallableOrStr = orjson_default,
json_indent: bool = False,
**kwargs,
) -> None:
"""
Args:
args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
json_default: a function for encoding non-standard objects
json_indent: indent output with 2 spaces.
kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
"""
super().__init__(*args, **kwargs)
self.json_default = core.str_to_object(json_default)
self.json_indent = json_indent
return
def jsonify_log_record(self, log_record: core.LogRecord) -> str:
"""Returns a json string of the log record."""
opt = orjson.OPT_NON_STR_KEYS
if self.json_indent:
opt |= orjson.OPT_INDENT_2
return orjson.dumps(log_record, default=self.json_default, option=opt).decode("utf8")
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/py.typed 0000664 0000000 0000000 00000000120 14762515446 0025100 0 ustar 00root root 0000000 0000000 # PEP-561 marker. https://mypy.readthedocs.io/en/latest/installed_packages.html
nhairs-python-json-logger-9e6cb67/src/pythonjsonlogger/utils.py 0000664 0000000 0000000 00000002146 14762515446 0025125 0 ustar 00root root 0000000 0000000 """Utilities for Python JSON Logger"""
### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
import importlib.util
## Installed
## Application
from .exception import MissingPackageError
### FUNCTIONS
### ============================================================================
def package_is_available(
name: str, *, throw_error: bool = False, extras_name: str | None = None
) -> bool:
"""Determine if the given package is available for import.
Args:
name: Import name of the package to check.
throw_error: Throw an error if the package is unavailable.
extras_name: Extra dependency name to use in `throw_error`'s message.
Raises:
MissingPackageError: When `throw_error` is `True` and the return value would be `False`
Returns:
If the package is available for import.
"""
available = importlib.util.find_spec(name) is not None
if not available and throw_error:
raise MissingPackageError(name, extras_name)
return available
nhairs-python-json-logger-9e6cb67/tests/ 0000775 0000000 0000000 00000000000 14762515446 0020350 5 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/tests/__init__.py 0000664 0000000 0000000 00000000000 14762515446 0022447 0 ustar 00root root 0000000 0000000 nhairs-python-json-logger-9e6cb67/tests/test_deprecation.py 0000664 0000000 0000000 00000002315 14762515446 0024257 0 ustar 00root root 0000000 0000000 ### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
import subprocess
import sys
## Installed
import pytest
## Application
import pythonjsonlogger
### TESTS
### ============================================================================
def test_jsonlogger_deprecated():
with pytest.deprecated_call():
import pythonjsonlogger.jsonlogger
return
def test_jsonlogger_reserved_attrs_deprecated():
with pytest.deprecated_call():
# Note: We use json instead of jsonlogger as jsonlogger will also produce
# a DeprecationWarning and we specifically want the one for RESERVED_ATTRS
pythonjsonlogger.json.RESERVED_ATTRS
return
@pytest.mark.parametrize(
"command",
[
"from pythonjsonlogger import jsonlogger",
"import pythonjsonlogger.jsonlogger",
"from pythonjsonlogger.jsonlogger import JsonFormatter",
"from pythonjsonlogger.jsonlogger import RESERVED_ATTRS",
],
)
def test_import(command: str):
output = subprocess.check_output([sys.executable, "-c", f"{command};print('OK')"])
assert output.strip() == b"OK"
return
nhairs-python-json-logger-9e6cb67/tests/test_formatters.py 0000664 0000000 0000000 00000052250 14762515446 0024153 0 ustar 00root root 0000000 0000000 ### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
from dataclasses import dataclass
import datetime
import enum
import io
import json
import logging
import sys
import traceback
from types import TracebackType
from typing import Any, Generator
import uuid
if sys.version_info >= (3, 9):
import zoneinfo
else:
from backports import zoneinfo
## Installed
import freezegun
import pytest
## Application
import pythonjsonlogger
import pythonjsonlogger.defaults
from pythonjsonlogger.core import RESERVED_ATTRS, BaseJsonFormatter, merge_record_extra
from pythonjsonlogger.json import JsonFormatter
if pythonjsonlogger.ORJSON_AVAILABLE:
from pythonjsonlogger.orjson import OrjsonFormatter
if pythonjsonlogger.MSGSPEC_AVAILABLE:
from pythonjsonlogger.msgspec import MsgspecFormatter
### SETUP
### ============================================================================
ALL_FORMATTERS: list[type[BaseJsonFormatter]] = [JsonFormatter]
if pythonjsonlogger.ORJSON_AVAILABLE:
ALL_FORMATTERS.append(OrjsonFormatter)
if pythonjsonlogger.MSGSPEC_AVAILABLE:
ALL_FORMATTERS.append(MsgspecFormatter)
_LOGGER_COUNT = 0
@dataclass
class LoggingEnvironment:
logger: logging.Logger
buffer: io.StringIO
handler: logging.Handler
def set_formatter(self, formatter: BaseJsonFormatter) -> None:
self.handler.setFormatter(formatter)
return
def load_json(self) -> Any:
return json.loads(self.buffer.getvalue())
@pytest.fixture
def env() -> Generator[LoggingEnvironment, None, None]:
global _LOGGER_COUNT # pylint: disable=global-statement
_LOGGER_COUNT += 1
logger = logging.getLogger(f"pythonjsonlogger.tests.{_LOGGER_COUNT}")
logger.setLevel(logging.DEBUG)
buffer = io.StringIO()
handler = logging.StreamHandler(buffer)
logger.addHandler(handler)
yield LoggingEnvironment(logger=logger, buffer=buffer, handler=handler)
logger.removeHandler(handler)
logger.setLevel(logging.NOTSET)
buffer.close()
return
def get_traceback_from_exception_followed_by_log_call(env_: LoggingEnvironment) -> str:
try:
raise Exception("test")
except Exception as e:
env_.logger.exception("hello")
str_traceback = traceback.format_exc()
# Formatter removes trailing new line
if str_traceback.endswith("\n"):
str_traceback = str_traceback[:-1]
return str_traceback
class SomeClass:
def __init__(self, thing: int):
self.thing = thing
return
class BrokenClass:
def __str__(self) -> str:
raise ValueError("hahah sucker")
def __repr__(self) -> str:
return self.__str__()
@dataclass
class SomeDataclass:
things: str
stuff: int
junk: bool
try:
raise ValueError
except ValueError as e:
STATIC_TRACEBACK = e.__traceback__
del e
class MultiEnum(enum.Enum):
NONE = None
BOOL = False
STR = "somestring"
INT = 99
BYTES = b"some-bytes"
NO_TEST = object() # Sentinal
### TESTS
### ============================================================================
def test_merge_record_extra():
record = logging.LogRecord(
"name", level=1, pathname="", lineno=1, msg="Some message", args=None, exc_info=None
)
output = merge_record_extra(record, target={"foo": "bar"}, reserved=[])
assert output["foo"] == "bar"
assert output["msg"] == "Some message"
return
## Common Formatter Tests
## -----------------------------------------------------------------------------
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_default_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_())
msg = "testing logging format"
env.logger.info(msg)
log_json = env.load_json()
assert log_json["message"] == msg
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_percentage_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(
class_(
# All kind of different styles to check the regex
"[%(levelname)8s] %(message)s %(filename)s:%(lineno)d %(asctime)"
)
)
msg = "testing logging format"
env.logger.info(msg)
log_json = env.load_json()
assert log_json["message"] == msg
assert log_json.keys() == {"levelname", "message", "filename", "lineno", "asctime"}
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_defaults_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(defaults={"first": 1, "second": 2}))
env.logger.info("testing defaults field", extra={"first": 1234})
log_json = env.load_json()
assert log_json["first"] == 1234
assert log_json["second"] == 2
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_rename_base_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(rename_fields={"message": "@message"}))
msg = "testing logging format"
env.logger.info(msg)
log_json = env.load_json()
assert log_json["@message"] == msg
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_rename_with_defaults(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
"""Make sure that the default fields are also renamed."""
env.set_formatter(class_(rename_fields={"custom": "@custom"}, defaults={"custom": 1234}))
msg = "testing rename with defaults"
env.logger.info(msg)
log_json = env.load_json()
assert log_json["@custom"] == 1234
assert "custom" not in log_json
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_rename_missing(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(rename_fields={"missing_field": "new_field"}))
msg = "test rename missing field"
env.logger.info(msg)
log_json = env.load_json()
assert log_json["message"] == msg
assert "missing_field" not in log_json
assert "new_field" not in log_json
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_rename_keep_missing(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(
class_(rename_fields={"missing_field": "new_field"}, rename_fields_keep_missing=True)
)
msg = "test keep rename missing field"
env.logger.info(msg)
log_json = env.load_json()
assert log_json["message"] == msg
assert "missing_field" not in log_json
assert log_json["new_field"] is None
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_rename_preserve_order(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(
class_("{levelname}{message}{asctime}", style="{", rename_fields={"levelname": "LEVEL"})
)
env.logger.info("testing logging rename order")
log_json = env.load_json()
assert list(log_json.keys())[0] == "LEVEL"
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_rename_once(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(
class_(
"{levelname}{message}{asctime}",
style="{",
rename_fields={"levelname": "LEVEL", "message": "levelname"},
)
)
msg = "something"
env.logger.info(msg)
log_json = env.load_json()
assert log_json["LEVEL"] == "INFO"
assert log_json["levelname"] == msg
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_add_static_fields(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(static_fields={"log_stream": "kafka"}))
msg = "testing static fields"
env.logger.info(msg)
log_json = env.load_json()
assert log_json["log_stream"] == "kafka"
assert log_json["message"] == msg
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_format_keys(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
supported_keys = [
"asctime",
"created",
"filename",
"funcName",
"levelname",
"levelno",
"lineno",
"module",
"msecs",
"message",
"name",
"pathname",
"process",
"processName",
"relativeCreated",
"thread",
"threadName",
]
log_format = lambda x: [f"%({i:s})s" for i in x]
custom_format = " ".join(log_format(supported_keys))
env.set_formatter(class_(custom_format))
msg = "testing logging format"
env.logger.info(msg)
log_json = env.load_json()
for key in supported_keys:
assert key in log_json
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_unknown_format_key(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_("%(unknown_key)s %(message)s"))
env.logger.info("testing unknown logging format")
# make sure no error occurs
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_log_dict(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_())
msg = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}
env.logger.info(msg)
log_json = env.load_json()
assert log_json["text"] == msg["text"]
assert log_json["num"] == msg["num"]
assert log_json["5"] == msg[5]
assert log_json["nested"] == msg["nested"]
assert log_json["message"] == ""
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_log_dict_defaults(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(defaults={"d1": 1234, "d2": "hello"}))
msg = {"d2": "world"}
env.logger.info(msg)
log_json = env.load_json()
assert log_json["d1"] == 1234
assert log_json["d2"] == "world"
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_log_extra(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_())
extra = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}
env.logger.info("hello", extra=extra) # type: ignore[arg-type]
log_json = env.load_json()
assert log_json["text"] == extra["text"]
assert log_json["num"] == extra["num"]
assert log_json["5"] == extra[5]
assert log_json["nested"] == extra["nested"]
assert log_json["message"] == "hello"
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_custom_logic_adds_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
class CustomJsonFormatter(class_): # type: ignore[valid-type,misc]
def process_log_record(self, log_record):
log_record["custom"] = "value"
return super().process_log_record(log_record)
env.set_formatter(CustomJsonFormatter())
env.logger.info("message")
log_json = env.load_json()
assert log_json["custom"] == "value"
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_exc_info(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_())
expected_value = get_traceback_from_exception_followed_by_log_call(env)
log_json = env.load_json()
assert log_json["exc_info"] == expected_value
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_exc_info_renamed(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_("%(exc_info)s", rename_fields={"exc_info": "stack_trace"}))
expected_value = get_traceback_from_exception_followed_by_log_call(env)
log_json = env.load_json()
assert log_json["stack_trace"] == expected_value
assert "exc_info" not in log_json
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_exc_info_renamed_not_required(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(rename_fields={"exc_info": "stack_trace"}))
expected_value = get_traceback_from_exception_followed_by_log_call(env)
log_json = env.load_json()
assert log_json["stack_trace"] == expected_value
assert "exc_info" not in log_json
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_exc_info_renamed_no_error(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(rename_fields={"exc_info": "stack_trace"}))
env.logger.info("message")
log_json = env.load_json()
assert "stack_trace" not in log_json
assert "exc_info" not in log_json
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_custom_object_serialization(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
def encode_complex(z):
if isinstance(z, complex):
return (z.real, z.imag)
raise TypeError(f"Object of type {type(z)} is no JSON serializable")
env.set_formatter(class_(json_default=encode_complex)) # type: ignore[call-arg]
env.logger.info("foo", extra={"special": complex(3, 8)})
log_json = env.load_json()
assert log_json["special"] == [3.0, 8.0]
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_rename_reserved_attrs(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
log_format = lambda x: [f"%({i:s})s" for i in x]
reserved_attrs_map = {
"exc_info": "error.type",
"exc_text": "error.message",
"funcName": "log.origin.function",
"levelname": "log.level",
"module": "log.origin.file.name",
"processName": "process.name",
"threadName": "process.thread.name",
"msg": "log.message",
}
custom_format = " ".join(log_format(reserved_attrs_map.keys()))
reserved_attrs = [
attr for attr in RESERVED_ATTRS if attr not in list(reserved_attrs_map.keys())
]
env.set_formatter(
class_(custom_format, reserved_attrs=reserved_attrs, rename_fields=reserved_attrs_map)
)
env.logger.info("message")
log_json = env.load_json()
for old_name, new_name in reserved_attrs_map.items():
assert new_name in log_json
assert old_name not in log_json
return
@freezegun.freeze_time(datetime.datetime(2017, 7, 14, 2, 40))
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_default_encoder_with_timestamp(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
if (pythonjsonlogger.ORJSON_AVAILABLE and class_ is OrjsonFormatter) or (
pythonjsonlogger.MSGSPEC_AVAILABLE and class_ is MsgspecFormatter
):
# FakeDatetime not supported
# https://github.com/ijl/orjson/issues/481
# https://github.com/jcrist/msgspec/issues/678
def json_default(obj: Any) -> Any:
if isinstance(obj, freezegun.api.FakeDate):
return obj.isoformat()
raise ValueError(f"Unexpected object: {obj!r}")
env.set_formatter(class_(timestamp=True, json_default=json_default)) # type: ignore[call-arg]
else:
env.set_formatter(class_(timestamp=True))
env.logger.info("Hello")
log_json = env.load_json()
assert log_json["timestamp"] == "2017-07-14T02:40:00+00:00"
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
@pytest.mark.parametrize(
["obj", "type_", "expected"],
[
("somestring", str, "somestring"),
("some unicode Привет", str, "some unicode Привет"),
(1234, int, 1234),
(1234.5, float, 1234.5),
(False, bool, False),
(None, type(None), None),
(b"some-bytes", str, "c29tZS1ieXRlcw=="),
(datetime.time(16, 45, 30, 100), str, "16:45:30.000100"),
(datetime.date(2024, 5, 5), str, "2024-05-05"),
(datetime.datetime(2024, 5, 5, 16, 45, 30, 100), str, "2024-05-05T16:45:30.000100"),
(
datetime.datetime(2024, 5, 5, 16, 45, 30, 100, zoneinfo.ZoneInfo("Australia/Sydney")),
str,
"2024-05-05T16:45:30.000100+10:00",
),
(
uuid.UUID("urn:uuid:12345678-1234-5678-1234-567812345678"),
str,
"12345678-1234-5678-1234-567812345678",
),
(Exception, str, "Exception"),
(Exception("Foo occurred"), str, "Exception: Foo occurred"),
(BaseException, str, "BaseException"),
(BaseException("BaseFoo occurred"), str, "BaseException: BaseFoo occurred"),
(STATIC_TRACEBACK, str, pythonjsonlogger.defaults.traceback_default(STATIC_TRACEBACK)), # type: ignore[arg-type]
(
SomeDataclass(things="le_things", stuff=99, junk=False),
dict,
{"things": "le_things", "stuff": 99, "junk": False},
),
(SomeDataclass, str, "SomeDataclass"),
(SomeClass, str, "SomeClass"),
(SomeClass(1234), str, NO_TEST),
(BrokenClass(), str, "__could_not_encode__"),
(MultiEnum.NONE, type(None), None),
(MultiEnum.BOOL, bool, MultiEnum.BOOL.value),
(MultiEnum.STR, str, MultiEnum.STR.value),
(MultiEnum.INT, int, MultiEnum.INT.value),
(MultiEnum.BYTES, str, "c29tZS1ieXRlcw=="),
(MultiEnum, list, [None, False, "somestring", 99, "c29tZS1ieXRlcw=="]),
],
)
def test_common_types_encoded(
env: LoggingEnvironment,
class_: type[BaseJsonFormatter],
obj: object,
type_: type,
expected: Any,
):
## Known bad cases
if pythonjsonlogger.MSGSPEC_AVAILABLE and class_ is MsgspecFormatter:
# Dataclass: https://github.com/jcrist/msgspec/issues/681
# Enum: https://github.com/jcrist/msgspec/issues/680
# These have been fixed in msgspec 0.19.0, however they also dropped python 3.8 support.
# https://github.com/jcrist/msgspec/releases/tag/0.19.0
if sys.version_info < (3, 9) and (
obj is SomeDataclass
or (
isinstance(obj, enum.Enum)
and obj in {MultiEnum.BYTES, MultiEnum.NONE, MultiEnum.BOOL}
)
):
pytest.xfail()
## Test
env.set_formatter(class_())
extra = {
"extra": obj,
"extra_dict": {"item": obj},
"extra_list": [obj],
}
env.logger.info("hello", extra=extra)
log_json = env.load_json()
assert isinstance(log_json["extra"], type_)
assert isinstance(log_json["extra_dict"]["item"], type_)
assert isinstance(log_json["extra_list"][0], type_)
if expected is NO_TEST:
return
if expected is None or isinstance(expected, bool):
assert log_json["extra"] is expected
assert log_json["extra_dict"]["item"] is expected
assert log_json["extra_list"][0] is expected
else:
assert log_json["extra"] == expected
assert log_json["extra_dict"]["item"] == expected
assert log_json["extra_list"][0] == expected
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_custom_default(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
def custom_default(obj):
if isinstance(obj, SomeClass):
return {"TYPE": obj.thing}
return None
env.set_formatter(class_(json_default=custom_default)) # type: ignore[call-arg]
env.logger.info("hello", extra={"extra": SomeClass(999)})
log_json = env.load_json()
assert log_json["extra"] == {"TYPE": 999}
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_exc_info_as_array(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(exc_info_as_array=True))
try:
raise Exception("Error")
except BaseException:
env.logger.exception("Error occurs")
log_json = env.load_json()
assert isinstance(log_json["exc_info"], list)
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_exc_info_as_array_no_exc_info(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(exc_info_as_array=True))
env.logger.info("hello")
log_json = env.load_json()
assert "exc_info" not in log_json
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_stack_info_as_array(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
env.set_formatter(class_(stack_info_as_array=True))
env.logger.info("hello", stack_info=True)
log_json = env.load_json()
assert isinstance(log_json["stack_info"], list)
return
@pytest.mark.parametrize("class_", ALL_FORMATTERS)
def test_stack_info_as_array_no_stack_info(
env: LoggingEnvironment, class_: type[BaseJsonFormatter]
):
env.set_formatter(class_(stack_info_as_array=True))
env.logger.info("hello", stack_info=False)
log_json = env.load_json()
assert "stack_info" not in log_json
return
## JsonFormatter Specific
## -----------------------------------------------------------------------------
def test_json_ensure_ascii_true(env: LoggingEnvironment):
env.set_formatter(JsonFormatter())
env.logger.info("Привет")
# Note: we don't use env.load_json as we want to know the raw output
msg = env.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0]
assert msg == r"\u041f\u0440\u0438\u0432\u0435\u0442"
return
def test_json_ensure_ascii_false(env: LoggingEnvironment):
env.set_formatter(JsonFormatter(json_ensure_ascii=False))
env.logger.info("Привет")
# Note: we don't use env.load_json as we want to know the raw output
msg = env.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0]
assert msg == "Привет"
return
nhairs-python-json-logger-9e6cb67/tests/test_missing.py 0000664 0000000 0000000 00000003660 14762515446 0023437 0 ustar 00root root 0000000 0000000 ### IMPORTS
### ============================================================================
## Future
from __future__ import annotations
## Standard Library
## Installed
import pytest
## Application
import pythonjsonlogger
from pythonjsonlogger.utils import package_is_available
from pythonjsonlogger.exception import MissingPackageError
### CONSTANTS
### ============================================================================
MISSING_PACKAGE_NAME = "package_name_is_definintely_not_available"
MISSING_PACKAGE_EXTRA = "package_extra_that_is_unique"
### TESTS
### ============================================================================
def test_package_is_available():
assert package_is_available("json")
return
def test_package_not_available():
assert not package_is_available(MISSING_PACKAGE_NAME)
return
def test_package_not_available_throw():
with pytest.raises(MissingPackageError) as e:
package_is_available(MISSING_PACKAGE_NAME, throw_error=True)
assert MISSING_PACKAGE_NAME in e.value.msg
assert MISSING_PACKAGE_EXTRA not in e.value.msg
return
def test_package_not_available_throw_extras():
with pytest.raises(MissingPackageError) as e:
package_is_available(
MISSING_PACKAGE_NAME, throw_error=True, extras_name=MISSING_PACKAGE_EXTRA
)
assert MISSING_PACKAGE_NAME in e.value.msg
assert MISSING_PACKAGE_EXTRA in e.value.msg
return
## Python JSON Logger Specific
## -----------------------------------------------------------------------------
if not pythonjsonlogger.ORJSON_AVAILABLE:
def test_orjson_import_error():
with pytest.raises(MissingPackageError, match="orjson"):
import pythonjsonlogger.orjson
return
if not pythonjsonlogger.MSGSPEC_AVAILABLE:
def test_msgspec_import_error():
with pytest.raises(MissingPackageError, match="msgspec"):
import pythonjsonlogger.msgspec
return
nhairs-python-json-logger-9e6cb67/tox.ini 0000664 0000000 0000000 00000000655 14762515446 0020527 0 ustar 00root root 0000000 0000000 [tox]
requires = tox>=3,tox-uv
envlist = py{38,39,310,311,312,313}, pypy{38,39,310}
[testenv]
description = run unit tests
extras = dev
commands =
pytest tests
[testenv:format]
description = run formatters
extras = dev
commands =
black src tests
[testenv:lint]
description = run linters
extras = dev
commands =
validate-pyproject pyproject.toml
black --check --diff src tests
pylint src
mypy src tests