pax_global_header 0000666 0000000 0000000 00000000064 15071204466 0014517 g ustar 00root root 0000000 0000000 52 comment=0f5c59bec3f6d19720a3322efb1df9e328ff4733
python-tiered-debug-1.4.0/ 0000775 0000000 0000000 00000000000 15071204466 0015400 5 ustar 00root root 0000000 0000000 python-tiered-debug-1.4.0/.gitignore 0000664 0000000 0000000 00000006707 15071204466 0017402 0 ustar 00root root 0000000 0000000 # Google Drive
.tmp.driveupload/
.tmp.drivedownload/
# macOS
.DS_Store
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
cov_html/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc
python-tiered-debug-1.4.0/.readthedocs.yaml 0000664 0000000 0000000 00000000273 15071204466 0020631 0 ustar 00root root 0000000 0000000 ---
version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.12"
python:
install:
- path: .
- requirements: docs/requirements.txt
sphinx:
configuration: docs/conf.py
python-tiered-debug-1.4.0/.vscode/ 0000775 0000000 0000000 00000000000 15071204466 0016741 5 ustar 00root root 0000000 0000000 python-tiered-debug-1.4.0/.vscode/settings.json 0000664 0000000 0000000 00000000324 15071204466 0021473 0 ustar 00root root 0000000 0000000 {
"mypy-type-checker.importStrategy": "useBundled",
"black-formatter.importStrategy": "useBundled",
"pylint.importStrategy": "useBundled",
"pylint.args": [
"--disable=E1205,W1203"
]
}
python-tiered-debug-1.4.0/LICENSE 0000664 0000000 0000000 00000026123 15071204466 0016411 0 ustar 00root root 0000000 0000000 Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2025 Aaron Mildenstein
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
python-tiered-debug-1.4.0/README.md 0000664 0000000 0000000 00000002031 15071204466 0016653 0 ustar 00root root 0000000 0000000 # tiered-debug
[](https://pypi.org/project/tiered-debug)
[](https://pypi.org/project/tiered-debug)
[](https://tiered-debug.readthedocs.io/en/latest/?badge=latest)
-----
## Table of Contents
- [tiered-debug](#tiered-debug)
- [Table of Contents](#table-of-contents)
- [Documentation](#documentation)
- [License](#license)
## Documentation
Visit the [tiered-debug documentation](https://tiered-debug.readthedocs.io/en/latest/) for:
- [Installation Guide](https://tiered-debug.readthedocs.io/en/latest/installation.html)
- [Usage Examples](https://tiered-debug.readthedocs.io/en/latest/usage.html)
- [API Reference](https://tiered-debug.readthedocs.io/en/latest/api.html)
## License
`tiered-debug` is distributed under the terms of the [Apache](LICENSE) license.
© Copyright 2025 Aaron Mildenstein
python-tiered-debug-1.4.0/docs/ 0000775 0000000 0000000 00000000000 15071204466 0016330 5 ustar 00root root 0000000 0000000 python-tiered-debug-1.4.0/docs/CHANGELOG.rst 0000664 0000000 0000000 00000020411 15071204466 0020347 0 ustar 00root root 0000000 0000000 Changelog
=========
All notable changes to ``tiered-debug`` will be documented in this file.
The format is based on `Keep a Changelog `_,
and this project adheres to `Semantic Versioning `_.
[1.4.0] - 2025-10-07
--------------------
Changes
~~~~~~~
A bit of a reversion from 1.3.1 with regards to default logger kwargs, but with a better approach.
- Updated version to 1.4.0 for release.
- Configure ``log``, ``lv1``, ``lv2``, ``lv3``, ``lv4``, and ``lv5`` methods in ``_base.py`` to use default ``None`` value for ``exc_info``, ``stack_info``, ``stacklevel``, and ``extra``. If ``None`` is provided, the logging module will apply its own defaults (``exc_info=False``, ``stack_info=False``, ``extra={}``, and then set ``stacklevel`` to the `effective` stack level).
- Pruned unused import in ``docs/conf.py``.
- Updated ``debug.py`` to use ``Literal[1, 2, 3, 4, 5]`` for ``begin`` and ``end`` parameters in ``begin_end`` decorator in order to match the typing in ``_base.py``. This cleans up MyPy and other linter warnings.
- Updated ``test_base.py`` tests ``test_log_with_default_stacklevel`` and ``test_log_levels`` to collect the logger name from the fixture rather than hardcoding an expected value.
All tests passing. Tested pre-release in a sample project and everything looks good.
[1.3.1] - 2025-10-03
--------------------
Changes
~~~~~~~
- Updated version to 1.3.1 for patch release.
- Updated ``log``, ``lv1``, ``lv2``, ``lv3``, ``lv4``, and ``lv5`` methods in ``_base.py`` to use proper default values for ``exc_info`` (``False``), ``stack_info`` (``False``), and ``stacklevel`` (``1``) instead of ``None``.
- Corrected tests ``test_log_with_default_stacklevel`` and ``test_log_levels`` to verify that the record logger name matches the logger name assigned in the fixture, rather than a hardcoded expected value.
- Corrected ``test_add_handler`` and ``test_log_with_default_stacklevel`` in ``test_base.py`` to add ``caplog.set_level(logging.DEBUG)`` for proper DEBUG log capture. Kudos to @schoekek for identifying the issue and supplying the fix in #6. The nice thing is that it's not a code change, but an update to how testing is done.
- Discovered that ReadTheDocs builds were failing due to conditional resulting in ``html_theme`` being set to ``None`` when building on ReadTheDocs. Updated ``docs/conf.py`` to always set ``html_theme`` to ``sphinx_rtd_theme``, at least for now.
All tests passing.
[1.3.0] - 2025-04-21
--------------------
Added
~~~~~
- Added ``exc_info``, ``stack_info``, and ``extra`` keyword arguments to ``log``, ``lv1``, ``lv2``, ``lv3``, ``lv4``, and ``lv5`` methods in ``TieredDebug``, following Python ``logging`` module specifications.
- ``log`` method now ensures ``extra`` is an empty dictionary if ``None`` is provided.
- Standardized all docstrings in ``_base.py`` to Google-style format with line length limits (code and docstrings: soft 80, hard 88; Args/Returns/Raises definitions: soft 72, hard 80; Examples: soft 68, hard 76).
- Added doctests to ``_base.py`` for key methods to demonstrate usage and validate behavior.
- Standardized all docstrings in ``debug.py`` to Google-style format with line length limits (code and docstrings: soft 80, hard 88; Args/Returns/Raises definitions: soft 72, hard 80; Examples: soft 68, hard 76).
- Added doctests to ``debug.py`` with line length limits (soft 68, hard 76) for decorator and global instance.
- Standardized module docstring in ``__init__.py`` to Google-style format with doctests and line length limits (code and docstrings: soft 80, hard 88; Args/Returns/Raises definitions: soft 72, hard 80; Examples: soft 68, hard 76).
- Updated ``docs/conf.py`` for tiered-debug with Google-style docstring, doctests, direct metadata imports enabled by module installation, and line length limits (code and docstrings: soft 80, hard 88; Args/Returns/Raises definitions: soft 72, hard 80; Examples: soft 68, hard 76).
- Updated ``.readthedocs.yaml`` to configure ReadTheDocs build with module installation, Sphinx configuration, and dependency installation via ``docs/requirements.txt``.
- Added ``docs/requirements.txt`` with Sphinx dependencies for ReadTheDocs documentation builds.
- Enhanced tests in ``test_base.py`` to cover ``exc_info``, ``stack_info``, and ``extra`` parameters in ``TieredDebug`` logging methods, including edge cases and performance.
- Added ``__version__``, ``__author__``, and ``__copyright__`` to ``__all__`` in ``__init__.py`` to export metadata.
- Added ``W0622`` to pylint disable in ``docs/conf.py`` to suppress redefined built-in warnings for ``copyright``.
- Fixed ``test_log_with_extra`` and ``test_log_all_parameters_combined`` in ``test_base.py`` to check log record attributes for ``extra`` metadata due to ``pytest.ini`` log format.
- Fixed ``test_log_with_stack_info`` and ``test_log_all_parameters_combined`` in ``test_base.py`` to check for correct stack trace prefix across Python 3.8-3.13.
- Updated ``test_log_with_invalid_extra_type`` in ``test_base.py`` to expect TypeError for invalid ``extra`` types, aligning with ``_base.py`` validation.
- Renamed ``stklvl`` to ``stacklevel`` and reordered keyword arguments (``exc_info``, ``stack_info``, ``stacklevel``, ``extra``) in ``_base.py`` methods to match ``logging.Logger.debug``.
- Updated ``debug.py`` to use ``stacklevel`` and enhanced ``begin_end`` decorator to accept ``stacklevel`` and ``extra``, defaulting to updating only ``stacklevel``.
- Updated ``test_base.py`` to use ``stacklevel``, reordered keyword arguments, and added tests for ``*args`` message formatting support in ``_base.py`` methods.
- Fixed ``test_select_frame_getter_non_cpython`` in ``test_base.py`` to correctly call ``inspect.currentframe()`` without arguments.
- Updated ``debug.py`` to restore ``begin`` and ``end`` arguments for ``begin_end`` decorator, retaining ``stacklevel`` and ``extra``.
- Updated ``test_debug.py`` to test ``begin``, ``end``, ``stacklevel``, and ``extra`` in ``begin_end`` decorator, restoring original test structure.
- Corrected ``test_debug.py`` to ensure all tests pass, as updated by user.
- Updated ``index.rst`` to highlight ``stacklevel`` and ``extra`` and clarify ``debug.py``’s role.
- Updated ``usage.rst`` to include ``stacklevel``, ``extra``, ``*args``, correct ``set_level``, align log output with ``pytest.ini``, and enhance Elasticsearch handler example.
- Updated ``usage.rst`` formatters to include ``extra`` fields (``%(context)s``, ``%(module)s``) in log output for ``TieredDebug``, ``debug.py``, Elasticsearch, and pytest examples.
- Re-rendered ``usage.rst`` Python code blocks to fit within a 90-character hard limit to avoid side-scrolling.
- Corrected spacing in ``usage.rst`` bash code block to improve visibility in rendered documentation, as updated by user.
[1.2.1] - 2025-04-17
--------------------
Added
~~~~~
- Unit tests for ``debug.py`` in ``test_debug.py``.
Changed
~~~~~~~
- Fixed unit tests in ``test_base.py`` where setting the log level for caplog was required.
[1.2.0] - 2025-04-17
--------------------
Added
~~~~~
- Instance-level logger (``self._logger``) in ``TieredDebug`` for flexible configuration.
- ``add_handler`` method to attach handlers at ``logging.DEBUG`` level, with info message for duplicates.
- Cached ``_select_frame_getter`` to use ``sys._getframe`` in CPython and ``inspect.currentframe`` elsewhere.
- Sphinx autodoc docstrings for all classes and methods.
- Support for custom logger names via ``logger_name`` parameter in ``TieredDebug.__init__``.
Changed
~~~~~~~
- Removed environment variable support (``TIERED_DEBUG_LEVEL``, ``TIERED_STACK_LEVEL``).
- Updated ``check_val`` to handle ``TypeError`` and ``ValueError`` with specific error logging.
- Improved error handling and validation throughout ``TieredDebug``.
[1.1.0] - 2025-04-15
--------------------
Added
~~~~~
- Initial ``TieredDebug`` class with tiered logging levels (1-5).
- ``begin_end`` decorator in ``debug.py`` for logging function call boundaries.
- Environment variable support for setting debug and stack levels.
- Basic unit tests in ``test_base.py``.
[1.0.0] - 2025-03-31
--------------------
Added
~~~~~
- Initial release of ``tiered_debug`` module.
- ``TieredDebug`` class with module-level logger.
- Support for debug levels 1-5 and stack levels 1-9.
- ``debug.py`` sample module with global ``debug`` instance.
python-tiered-debug-1.4.0/docs/Makefile 0000664 0000000 0000000 00000001141 15071204466 0017765 0 ustar 00root root 0000000 0000000 # Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SPHINXPROJ = tiered-debug
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) python-tiered-debug-1.4.0/docs/api.rst 0000664 0000000 0000000 00000000573 15071204466 0017640 0 ustar 00root root 0000000 0000000 .. _api:
API Reference
=============
This section provides detailed documentation for the ``tiered-debug`` module's
classes, functions, and types, generated from their docstrings.
Modules
-------
.. automodule:: tiered_debug._base
:members:
:undoc-members:
:show-inheritance:
.. automodule:: tiered_debug.debug
:members:
:undoc-members:
:show-inheritance:
python-tiered-debug-1.4.0/docs/conf.py 0000664 0000000 0000000 00000005150 15071204466 0017630 0 ustar 00root root 0000000 0000000 """Sphinx configuration for tiered-debug documentation.
Configures Sphinx to generate documentation for the tiered-debug package,
using autodoc, Napoleon, doctest, viewcode, and intersphinx extensions.
Imports metadata (__version__, __author__, __copyright__) from
tiered_debug, leveraging module installation for ReadTheDocs. Sets up
GitHub integration for "Edit Source" links and supports Python 3.8-3.13.
Attributes:
project: Project name ("tiered-debug"). (str)
author: Author name from tiered_debug.__author__. (str)
version: Major.minor version (e.g., "1.3"). (str)
release: Full version (e.g., "1.3.0"). (str)
html_theme: Theme for HTML output, defaults to "sphinx_rtd_theme". (str)
Examples:
>>> project
'tiered-debug'
>>> author
'Aaron Mildenstein'
>>> version
'1.4'
>>> 'autodoc' in [ext.split('.')[-1] for ext in extensions]
True
"""
# pylint: disable=C0103,E0401,W0622
# -- Imports and setup -----------------------------------------------------
from tiered_debug import __author__, __copyright__, __version__
# -- Project information -----------------------------------------------------
project = "tiered-debug"
github_user = "untergeek"
github_repo = "tiered-debug"
github_branch = "main"
author = __author__
copyright = __copyright__
release = __version__
version = ".".join(release.split(".")[:2])
# -- General configuration ---------------------------------------------------
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.viewcode",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
]
napoleon_google_docstring = True
napoleon_numpy_docstring = False
templates_path = ["_templates"]
exclude_patterns = ["_build"]
source_suffix = ".rst"
master_doc = "index"
# -- Options for HTML output -------------------------------------------------
pygments_style = "sphinx"
html_theme = "sphinx_rtd_theme"
# html_theme = (
# "sphinx_rtd_theme" if environ.get("READTHEDOCS") != "True" else "sphinx_rtd_theme"
# )
# Add "Edit Source" links into the template
html_context = {
"display_github": True,
"github_user": github_user,
"github_repo": github_repo,
"github_version": github_branch,
"conf_py_path": "/docs/",
}
# -- Autodoc configuration ---------------------------------------------------
autoclass_content = "both"
autodoc_member_order = "bysource"
autodoc_default_options = {
"members": True,
"undoc-members": True,
"show-inheritance": True,
}
# -- Intersphinx configuration -----------------------------------------------
intersphinx_mapping = {
"python": ("https://docs.python.org/3.12", None),
}
python-tiered-debug-1.4.0/docs/contributing.rst 0000664 0000000 0000000 00000003466 15071204466 0021602 0 ustar 00root root 0000000 0000000 .. _contributing:
Contributing
============
Contributions to ``tiered_debug`` are welcome! This guide outlines how to contribute to the project.
Getting Started
---------------
1. **Fork the repository** (if hosted on a platform like GitHub).
2. **Clone your fork**:
.. code-block:: bash
git clone https://github.com/your-username/tiered_debug.git
cd tiered_debug
3. **Install dependencies**:
.. code-block:: bash
pip install pytest
4. **Run tests** to ensure the project is working:
.. code-block:: bash
pytest test_base.py -v
Submitting Changes
------------------
1. **Create a branch** for your changes:
.. code-block:: bash
git checkout -b my-feature
2. **Make changes** and commit with clear messages:
.. code-block:: bash
git commit -m "Add feature X to TieredDebug"
3. **Update tests** in ``test_base.py`` to cover your changes.
4. **Run tests** to verify:
.. code-block:: bash
pytest
5. **Push to your fork** and create a pull request:
.. code-block:: bash
git push origin my-feature
Code Style
----------
- Follow PEP 8 for Python code style.
- Use Sphinx autodoc docstrings (reStructuredText) for documentation.
- Ensure all public methods and classes are documented.
Documentation
-------------
Update documentation in the ``docs`` folder when adding features:
- Edit RST files (``usage.rst``, ``api.rst``, etc.).
- Add entries to ``CHANGELOG.rst`` under the appropriate version.
Run Sphinx to build docs locally:
.. code-block:: bash
cd docs
pip install sphinx
make html
Open ``docs/_build/html/index.html`` to view the generated documentation.
Issues and Feedback
-------------------
Report bugs or suggest features by opening an issue on the project's repository
(if applicable) or contacting the maintainer directly.
python-tiered-debug-1.4.0/docs/debug.rst 0000664 0000000 0000000 00000000341 15071204466 0020146 0 ustar 00root root 0000000 0000000 .. _debug:
The sample ``debug.py`` module
==============================
This file is part of the source code and is at ``./src/tiered_debug/debug.py``.
.. literalinclude:: ../src/tiered_debug/debug.py
:language: python
python-tiered-debug-1.4.0/docs/index.rst 0000664 0000000 0000000 00000001373 15071204466 0020175 0 ustar 00root root 0000000 0000000
tiered_debug documentation
==========================
**tiered-debug** is a Python module for tiered debug logging at levels 1-5, with
configurable stack levels for accurate caller reporting and extra metadata for
enhanced log context. It is designed for projects requiring detailed debugging,
such as Elasticsearch workflows.
The module provides a ``TieredDebug`` class for logging, and the ``debug.py``
module offers a global ``TieredDebug`` instance and a ``begin_end`` decorator to
log function entry and exit at customizable debug levels.
.. toctree::
:maxdepth: 2
:caption: Contents:
installation
usage
debug
api
contributing
CHANGELOG
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
python-tiered-debug-1.4.0/docs/installation.rst 0000664 0000000 0000000 00000002636 15071204466 0021572 0 ustar 00root root 0000000 0000000 .. _installation:
Installation
============
Requirements
------------
- Python 3.8 or higher
Install tiered-debug
--------------------
``tiered-debug`` is typically installed as a dependecy.
1. **pyproject.toml**:
Add ``tiered-debug`` to your ``pyproject.toml``:
.. code-block:: toml
dependencies = [
'tiered-debug==1.2.0'
]
2. **setup.py**:
If you are using ``setup.py``, add ``tiered-debug`` to your ``install_requires``:
.. code-block:: python
from setuptools import setup, find_packages
setup(
name='your_project',
version='0.1.0',
packages=find_packages(),
install_requires=[
'tiered-debug==1.2.0'
],
)
3. **requirements.txt**:
This is no longer common, but you can add ``tiered-debug`` to your ``requirements.txt``:
4. **Verify installation**:
Test the module by running a simple script:
.. code-block:: python
from tiered_debug import TieredDebug
debug = TieredDebug(level=2)
debug.lv1("Test message")
Configuration
-------------
You can optionally configure the logger by adding a handler:
.. code-block:: python
import logging
debug = TieredDebug()
debug.add_handler(logging.StreamHandler(), formatter=logging.Formatter(
"%(asctime)s %(funcName)s:%(lineno)d %(message)s"))
For Elasticsearch logging, add a custom handler (see :ref:`usage`).
python-tiered-debug-1.4.0/docs/requirements.txt 0000664 0000000 0000000 00000000042 15071204466 0021610 0 ustar 00root root 0000000 0000000 sphinx>=4.0
sphinx-rtd-theme>=1.0
python-tiered-debug-1.4.0/docs/usage.rst 0000664 0000000 0000000 00000011135 15071204466 0020167 0 ustar 00root root 0000000 0000000 .. _usage:
Usage
=====
The ``tiered-debug`` module provides flexible debug logging for Python projects.
This guide shows how to use the ``TieredDebug`` class directly or via the
``debug.py`` module for project-wide debugging with customizable log levels and
metadata.
Using TieredDebug Directly
--------------------------
Create a ``TieredDebug`` instance and add handlers as needed:
.. code-block:: python
from tiered_debug import TieredDebug
import logging
debug = TieredDebug(level=3)
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter(
"%(asctime)s %(funcName)s:%(lineno)d %(context)s %(message)s"
)
)
debug.lv1("Level 1: %s", "always logs") # Always logs
debug.lv3("Level 3: %s", "logs", extra={"context": "test"}) # Logs (level <= 3)
debug.lv5("Level 5 message") # Ignored (level > 3)
Use the ``change_level`` context manager for temporary level changes:
.. code-block:: python
with debug.change_level(5):
debug.lv5("Temporary high-level log") # Logs
debug.lv5("Ignored again") # Ignored
Using ``debug.py`` for Project-Wide Debugging
---------------------------------------------
Copy the contents of :doc:`debug.py ` into your project to use a global
``TieredDebug`` instance:
1. **Configure the logger** in ``debug.py``:
.. code-block:: python
# debug.py
from tiered_debug.debug import debug
import logging
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter(
"%(asctime)s %(levelname)-9s %(name)22s "
"%(funcName)22s:%(lineno)-4d %(module)s %(message)s"
)
)
2. **Use in other modules**:
.. code-block:: python
# my_module.py
from .debug import debug, begin_end
@begin_end(begin=2, end=3, stacklevel=2, extra={"module": "my_module"})
def process_task(task_id: str):
debug.lv1(f"Task %s started", task_id)
process_task("123") # Logs BEGIN at 2, message at 1, END at 3
Regarding the ``begin_end`` decorator
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The decorator wraps functions to log entry and exit at specified debug levels:
.. code-block:: python
from .debug import debug, begin_end
debug.level = 3
@begin_end(begin=2, end=3, extra={"module": "my_module"})
def my_function():
debug.lv1("My function executed")
def run():
my_function()
This produces log output like:
.. code-block:: bash
2025-05-20 10:00:00,000 DEBUG my_module run:12 my_module DEBUG2 BEGIN CALL: my_function()
2025-05-20 10:00:00,001 DEBUG my_module my_function:8 my_module DEBUG1 My function executed
2025-05-20 10:00:00,002 DEBUG my_module run:12 my_module DEBUG3 END CALL: my_function()
The ``BEGIN`` and ``END`` messages appear at the call site (``run:12``) due to
the decorator’s wrapping logic, which logs at the point of function invocation.
Elasticsearch Logging
--------------------
Add an Elasticsearch handler to log to an index (requires ``elasticsearch``):
.. code-block:: python
from logging.handlers import BufferingHandler
from elasticsearch import Elasticsearch
class ESHandler(BufferingHandler):
def __init__(self, es_host: str, index: str):
super().__init__(capacity=1000)
self.es = Elasticsearch([es_host])
self.index = index
def flush(self):
for record in self.buffer:
body = {
"message": record.getMessage(),
"level": record.levelname,
"context": getattr(record, "context", None)
}
self.es.index(index=self.index, body=body)
self.buffer.clear()
debug = TieredDebug()
debug.add_handler(
ESHandler("localhost:9200", "debug-logs"),
formatter=logging.Formatter("%(context)s %(message)s")
)
debug.lv1("Logged to ES", extra={"context": "test"})
Testing with pytest
-------------------
Use pytest’s ``caplog`` fixture to test logging:
.. code-block:: python
from tiered_debug import TieredDebug
import logging
def test_logging(caplog):
debug = TieredDebug(level=2)
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter(
"%(funcName)s:%(lineno)d %(context)s %(message)s"
)
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv2("Test: %s", "value", extra={"context": "test"})
assert "DEBUG2 Test: value" in caplog.text
assert caplog.records[0].context == "test"
python-tiered-debug-1.4.0/mypy.ini 0000664 0000000 0000000 00000000140 15071204466 0017072 0 ustar 00root root 0000000 0000000 [mypy]
plugins = returns.contrib.mypy.returns_plugin
cache_dir = "/tmp/mypy_cache/tiered-debug"
python-tiered-debug-1.4.0/pylintrc.toml 0000664 0000000 0000000 00000051336 15071204466 0020151 0 ustar 00root root 0000000 0000000 [tool.pylint.main]
# Analyse import fallback blocks. This can be used to support both Python 2 and 3
# compatible code, which means that the block might have code that exists only in
# one or another interpreter, leading to false positives when analysed.
# analyse-fallback-blocks =
# Clear in-memory caches upon conclusion of linting. Useful if running pylint in
# a server-like mode.
# clear-cache-post-run =
# Always return a 0 (non-error) status code, even if lint errors are found. This
# is primarily useful in continuous integration scripts.
# exit-zero =
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
# extension-pkg-allow-list =
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
# extension-pkg-whitelist =
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
# fail-on =
# Specify a score threshold under which the program will exit with error.
fail-under = 10.0
# Interpret the stdin as a python script, whose filename needs to be passed as
# the module_or_package argument.
# from-stdin =
# Files or directories to be skipped. They should be base names, not paths.
ignore = ["CVS"]
# Add files or directories matching the regular expressions patterns to the
# ignore-list. The regex matches against paths and can be in Posix or Windows
# format. Because '\\' represents the directory delimiter on Windows systems, it
# can't be used as an escape character.
# ignore-paths =
# Files or directories matching the regular expression patterns are skipped. The
# regex matches against base names, not paths. The default value ignores Emacs
# file locks
ignore-patterns = ["^\\.#"]
# List of module names for which member attributes should not be checked (useful
# for modules/projects where namespaces are manipulated during runtime and thus
# existing member attributes cannot be deduced by static analysis). It supports
# qualified module names, as well as Unix pattern matching.
# ignored-modules =
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
# init-hook =
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use, and will cap the count on Windows to
# avoid hangs.
jobs = 1
# Control the amount of potential inferred values when inferring a single object.
# This can help the performance when dealing with large functions or complex,
# nested conditions.
limit-inference-results = 100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
# load-plugins =
# Pickle collected data for later comparisons.
persistent = true
# Minimum Python version to use for version dependent checks. Will default to the
# version used to run pylint.
py-version = "3.12"
# Discover python modules and packages in the file system subtree.
# recursive =
# Add paths to the list of the source roots. Supports globbing patterns. The
# source root is an absolute path or a path relative to the current working
# directory used to determine a package namespace for modules located under the
# source root.
# source-roots =
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode = true
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
# unsafe-load-any-extension =
[tool.pylint.basic]
# Naming style matching correct argument names.
argument-naming-style = "snake_case"
# Regular expression matching correct argument names. Overrides argument-naming-
# style. If left empty, argument names will be checked with the set naming style.
# argument-rgx =
# Naming style matching correct attribute names.
attr-naming-style = "snake_case"
# Regular expression matching correct attribute names. Overrides attr-naming-
# style. If left empty, attribute names will be checked with the set naming
# style.
# attr-rgx =
# Bad variable names which should always be refused, separated by a comma.
bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"]
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
# bad-names-rgxs =
# Naming style matching correct class attribute names.
class-attribute-naming-style = "any"
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style. If left empty, class attribute names will be checked
# with the set naming style.
# class-attribute-rgx =
# Naming style matching correct class constant names.
class-const-naming-style = "UPPER_CASE"
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style. If left empty, class constant names will be checked with
# the set naming style.
# class-const-rgx =
# Naming style matching correct class names.
class-naming-style = "PascalCase"
# Regular expression matching correct class names. Overrides class-naming-style.
# If left empty, class names will be checked with the set naming style.
# class-rgx =
# Naming style matching correct constant names.
const-naming-style = "UPPER_CASE"
# Regular expression matching correct constant names. Overrides const-naming-
# style. If left empty, constant names will be checked with the set naming style.
# const-rgx =
# Minimum line length for functions/classes that require docstrings, shorter ones
# are exempt.
docstring-min-length = -1
# Naming style matching correct function names.
function-naming-style = "snake_case"
# Regular expression matching correct function names. Overrides function-naming-
# style. If left empty, function names will be checked with the set naming style.
# function-rgx =
# Good variable names which should always be accepted, separated by a comma.
good-names = ["i", "j", "k", "ex", "Run", "_"]
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
# good-names-rgxs =
# Include a hint for the correct naming format with invalid-name.
# include-naming-hint =
# Naming style matching correct inline iteration names.
inlinevar-naming-style = "any"
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style. If left empty, inline iteration names will be checked
# with the set naming style.
# inlinevar-rgx =
# Naming style matching correct method names.
method-naming-style = "snake_case"
# Regular expression matching correct method names. Overrides method-naming-
# style. If left empty, method names will be checked with the set naming style.
# method-rgx =
# Naming style matching correct module names.
module-naming-style = "snake_case"
# Regular expression matching correct module names. Overrides module-naming-
# style. If left empty, module names will be checked with the set naming style.
# module-rgx =
# Colon-delimited sets of names that determine each other's naming style when the
# name regexes allow several styles.
# name-group =
# Regular expression which should only match function or class names that do not
# require a docstring.
no-docstring-rgx = "^_"
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties. These
# decorators are taken in consideration only for invalid-name.
property-classes = ["abc.abstractproperty"]
# Regular expression matching correct type alias names. If left empty, type alias
# names will be checked with the set naming style.
# typealias-rgx =
# Regular expression matching correct type variable names. If left empty, type
# variable names will be checked with the set naming style.
# typevar-rgx =
# Naming style matching correct variable names.
variable-naming-style = "snake_case"
# Regular expression matching correct variable names. Overrides variable-naming-
# style. If left empty, variable names will be checked with the set naming style.
# variable-rgx =
[tool.pylint.classes]
# Warn about protected attribute access inside special methods
# check-protected-access-in-special-methods =
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"]
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"]
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg = ["cls"]
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg = ["mcs"]
[tool.pylint.design]
# List of regular expressions of class ancestor names to ignore when counting
# public methods (see R0903)
# exclude-too-few-public-methods =
# List of qualified class names to ignore when counting class parents (see R0901)
# ignored-parents =
# Maximum number of arguments for function / method.
max-args = 5
# Maximum number of attributes for a class (see R0902).
max-attributes = 7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr = 5
# Maximum number of branch for function / method body.
max-branches = 12
# Maximum number of locals for function / method body.
max-locals = 15
# Maximum number of parents for a class (see R0901).
max-parents = 7
# Maximum number of public methods for a class (see R0904).
max-public-methods = 20
# Maximum number of return / yield for function / method body.
max-returns = 6
# Maximum number of statements in function / method body.
max-statements = 50
# Minimum number of public methods for a class (see R0903).
min-public-methods = 2
[tool.pylint.exceptions]
# Exceptions that will emit a warning when caught.
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pylint.format]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
# expected-line-ending-format =
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines = "^\\s*(# )??$"
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren = 4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string = " "
# Maximum number of characters on a single line.
max-line-length = 88
# Maximum number of lines in a module.
max-module-lines = 1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
# single-line-class-stmt =
# Allow the body of an if to be on the same line as the test if there is no else.
# single-line-if-stmt =
[tool.pylint.imports]
# List of modules that can be imported at any level, not just the top level one.
# allow-any-import-level =
# Allow explicit reexports by alias from a package __init__.
# allow-reexport-from-package =
# Allow wildcard imports from modules that define __all__.
# allow-wildcard-with-all =
# Deprecated modules which should not be used, separated by a comma.
# deprecated-modules =
# Output a graph (.gv or any supported image format) of external dependencies to
# the given file (report RP0402 must not be disabled).
# ext-import-graph =
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be disabled).
# import-graph =
# Output a graph (.gv or any supported image format) of internal dependencies to
# the given file (report RP0402 must not be disabled).
# int-import-graph =
# Force import order to recognize a module as part of the standard compatibility
# libraries.
# known-standard-library =
# Force import order to recognize a module as part of a third party library.
known-third-party = ["enchant"]
# Couples of modules and preferred modules, separated by a comma.
# preferred-modules =
[tool.pylint.logging]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style = "old"
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules = ["logging"]
[tool.pylint."messages control"]
# Only show warnings with the listed confidence levels. Leave empty to show all.
# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"]
# Disable the message, report, category or checker with the given id(s). You can
# either give multiple identifiers separated by comma (,) or put this option
# multiple times (only on the command line, not in the configuration file where
# it should appear only once). You can also use "--disable=all" to disable
# everything first and then re-enable specific checks. For example, if you want
# to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-symbolic-message-instead", "use-implicit-booleaness-not-comparison-to-string", "use-implicit-booleaness-not-comparison-to-zero"]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where it
# should appear only once). See also the "--disable" option for examples.
# enable =
[tool.pylint.method_args]
# List of qualified names (i.e., library.method) which require a timeout
# parameter e.g. 'requests.api.get,requests.api.post'
timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"]
[tool.pylint.miscellaneous]
# List of note tags to take in consideration, separated by a comma.
notes = ["FIXME", "XXX", "TODO"]
# Regular expression of note tags to take in consideration.
# notes-rgx =
[tool.pylint.refactoring]
# Maximum number of nested blocks for function / method body
max-nested-blocks = 5
# Complete name of functions that never returns. When checking for inconsistent-
# return-statements if a never returning function is called then it will be
# considered as an explicit return statement and no message will be printed.
never-returning-functions = ["sys.exit", "argparse.parse_error"]
# Let 'consider-using-join' be raised when the separator to join on would be non-
# empty (resulting in expected fixes of the type: ``"- " + " - ".join(items)``)
suggest-join-with-non-empty-separator = true
[tool.pylint.reports]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
# 'convention', and 'info' which contain the number of messages in each category,
# as well as 'statement' which is the total number of statements analyzed. This
# score is used by the global evaluation report (RP0004).
evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))"
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
# msg-template =
# Set the output format. Available formats are: text, parseable, colorized, json2
# (improved json format), json (old json format) and msvs (visual studio). You
# can also give a reporter class, e.g. mypackage.mymodule.MyReporterClass.
# output-format =
# Tells whether to display a full report or only the messages.
# reports =
# Activate the evaluation score.
score = true
[tool.pylint.similarities]
# Comments are removed from the similarity computation
ignore-comments = true
# Docstrings are removed from the similarity computation
ignore-docstrings = true
# Imports are removed from the similarity computation
ignore-imports = true
# Signatures are removed from the similarity computation
ignore-signatures = true
# Minimum lines number of a similarity.
min-similarity-lines = 4
[tool.pylint.spelling]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions = 4
# Spelling dictionary name. No available dictionaries : You need to install both
# the python package and the system dependency for enchant to work.
# spelling-dict =
# List of comma separated words that should be considered directives if they
# appear at the beginning of a comment and should not be checked.
spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:"
# List of comma separated words that should not be checked.
# spelling-ignore-words =
# A path to a file that contains the private dictionary; one word per line.
# spelling-private-dict-file =
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
# spelling-store-unknown-words =
[tool.pylint.typecheck]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators = ["contextlib.contextmanager"]
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
# generated-members =
# Tells whether missing members accessed in mixin class should be ignored. A
# class is considered mixin if its name matches the mixin-class-rgx option.
# Tells whether to warn about missing members when the owner of the attribute is
# inferred to be None.
ignore-none = true
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference can
# return multiple potential results while evaluating a Python object, but some
# branches might not be evaluated, which results in partial inference. In that
# case, it might be useful to still emit no-member and other checks for the rest
# of the inferred objects.
ignore-on-opaque-inference = true
# List of symbolic message names to ignore for Mixin members.
ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"]
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes = ["optparse.Values", "thread._local", "_thread._local", "argparse.Namespace"]
# Show a hint with possible names when a member name was not found. The aspect of
# finding the hint is based on edit distance.
missing-member-hint = true
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance = 1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices = 1
# Regex pattern to define which classes are considered mixins.
mixin-class-rgx = ".*[Mm]ixin"
# List of decorators that change the signature of a decorated function.
# signature-mutators =
[tool.pylint.variables]
# List of additional names supposed to be defined in builtins. Remember that you
# should avoid defining new builtins when possible.
# additional-builtins =
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables = true
# List of names allowed to shadow builtins
# allowed-redefined-builtins =
# List of strings which can identify a callback function by name. A callback name
# must start or end with one of those strings.
callbacks = ["cb_", "_cb"]
# A regular expression matching the name of dummy variables (i.e. expected to not
# be used).
dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_"
# Argument names that match this expression will be ignored.
ignored-argument-names = "_.*|^ignored_|^unused_"
# Tells whether we should check for unused import in __init__ files.
# init-import =
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"]
python-tiered-debug-1.4.0/pyproject.toml 0000664 0000000 0000000 00000005337 15071204466 0020324 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "tiered-debug"
dynamic = ["version"]
description = 'A Python logging helper module that allows multiple levels of debug logging'
readme = "README.md"
requires-python = ">=3.8"
license = { text='Apache-2.0' }
keywords = ['debug', 'logging', 'tiered-debug']
authors = [
{ name = "Aaron Mildenstein", email = "aaron@mildensteins.com" },
]
classifiers = [
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
dependencies = []
[project.optional-dependencies]
test = [
'pytest>=7.2.1',
'pytest-cov',
]
doc = ['sphinx', 'sphinx_rtd_theme']
[project.urls]
Homepage = "https://github.com/untergeek/tiered-debug"
"Bug Tracker" = "https://github.com/untergeek/tiered-debug/issues"
Issues = "https://github.com/untergeek/tiered-debug/issues"
Documentation = "https://tiered-debug.readthedocs.io/"
Source = "https://github.com/untergeek/tiered-debug"
"Release Notes" = "https://github.com/untergeek/tiered-debug/releases"
[tool.hatch.build.targets.sdist]
exclude = [
'dist',
'docs',
'tests',
'pytest.ini',
]
[tool.hatch.version]
path = "src/tiered_debug/__init__.py"
[tool.hatch.envs.test]
dependencies = [
'pytest >=7.2.1',
'pytest-cov',
]
# Test environment
[[tool.hatch.envs.test.matrix]]
python = ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
[tool.hatch.envs.test.scripts]
test = 'pytest'
test-cov = 'pytest --cov=tiered_debug'
cov-report = 'pytest --cov=tiered_debug --cov-report html:cov_html'
[tool.hatch.envs.types]
extra-dependencies = [
"mypy>=1.0.0",
]
[tool.hatch.envs.types.scripts]
check = "mypy --install-types --non-interactive {args:src/tiered_debug tests}"
[tool.coverage.run]
source_pkgs = ["tiered_debug"]
branch = true
parallel = true
omit = [
"src/tiered_debug/__init__.py",
]
[tool.coverage.paths]
tiered_debug = ["src/tiered_debug", "*/tiered-debug/src/tiered_debug"]
tests = ["tests", "*/tiered-debug/tests"]
[tool.coverage.report]
exclude_lines = [
"no cov",
"if __name__ == .__main__.:",
"if TYPE_CHECKING:",
]
[tool.black]
target-version = ['py38']
line-length = 88
skip-string-normalization = false
include = '\.pyi?$'
[tool.pylint.format]
max-line-length = "88"
[tool.pytest.ini_options]
pythonpath = ['.', 'src/tiered_debug']
minversion = '7.2'
addopts = '-ra -q'
testpaths = ['tests']
# Lint environment
[tool.hatch.envs.lint.scripts]
run-black = 'black --quiet --check --diff {args:.}'
python = ['run-black']
all = ['python'] python-tiered-debug-1.4.0/pytest.ini 0000664 0000000 0000000 00000000261 15071204466 0017430 0 ustar 00root root 0000000 0000000 [pytest]
log_cli=false
log_format = %(asctime)s %(levelname)-9s %(name)22s %(funcName)22s:%(lineno)-4d %(message)s
filterwarnings =
error
ignore:Unknown\ config\ option
python-tiered-debug-1.4.0/src/ 0000775 0000000 0000000 00000000000 15071204466 0016167 5 ustar 00root root 0000000 0000000 python-tiered-debug-1.4.0/src/tiered_debug/ 0000775 0000000 0000000 00000000000 15071204466 0020611 5 ustar 00root root 0000000 0000000 python-tiered-debug-1.4.0/src/tiered_debug/__init__.py 0000664 0000000 0000000 00000004311 15071204466 0022721 0 ustar 00root root 0000000 0000000 """Tiered Debugging Module.
The `tiered_debug` package provides tools for multi-level debug logging
with configurable stack tracing. It is designed for projects requiring
detailed debugging, such as Elasticsearch workflows. The main class,
`TieredDebug`, supports logging at levels 1-5 with adjustable stack
levels for accurate caller reporting. The `DebugLevel` type hint defines
valid debug levels (1-5).
Examples:
>>> from tiered_debug import TieredDebug, DebugLevel
>>> debug = TieredDebug(level=2)
>>> debug.level
2
>>> isinstance(DebugLevel(1), int)
True
>>> __version__
'1.3.0'
>>> __author__
'Aaron Mildenstein'
Note:
Configure a logger with `TieredDebug.add_handler` to enable logging
output. See `_base.py` for implementation details and `debug.py` for
a sample usage with a global debug instance and decorator.
"""
from datetime import datetime
from ._base import TieredDebug, DebugLevel
FIRST_YEAR = 2025
now = datetime.now()
if now.year == FIRST_YEAR:
COPYRIGHT_YEARS = "2025"
else:
COPYRIGHT_YEARS = f"2025-{now.year}"
__version__ = "1.4.0"
__author__ = "Aaron Mildenstein"
__copyright__ = f"{COPYRIGHT_YEARS}, Aaron Mildenstein"
__license__ = "Apache 2.0"
__status__ = "Development"
__description__ = "Tiered debug logging for multiple levels with stack tracing."
__url__ = "https://github.com/untergeek/tiered-debug"
__email__ = "aaron@mildensteins.com"
__maintainer__ = "Aaron Mildenstein"
__maintainer_email__ = __email__
__keywords__ = ["debug", "logging", "tiered-debug"]
__classifiers__ = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Operating System :: OS Independent",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
__all__ = ["TieredDebug", "DebugLevel", "__author__", "__copyright__", "__version__"]
python-tiered-debug-1.4.0/src/tiered_debug/_base.py 0000664 0000000 0000000 00000037700 15071204466 0022243 0 ustar 00root root 0000000 0000000 """Base implementation for tiered debug logging.
The `TieredDebug` class provides multi-level debug logging with
configurable stack tracing for accurate caller reporting. It supports
logging at levels 1-5, with level 1 always logged and levels 2-5
conditional on the configured debug level. Designed for projects like
ElasticKeeper and ElasticCheckpoint, it allows flexible logger
configuration and stack level adjustments.
Examples:
>>> from tiered_debug._base import TieredDebug
>>> debug = TieredDebug(level=2)
>>> debug.level
2
>>> import logging
>>> handler = logging.StreamHandler()
>>> debug.add_handler(
... handler, logging.Formatter("%(message)s")
... )
>>> debug.lv1("Always logged")
>>> debug.lv3("Not logged") # Ignored (level 3 > 2)
"""
# pylint: disable=R0913,R0917,W0212
import logging
import sys
from contextlib import contextmanager
from functools import lru_cache
from typing import Any, Dict, Iterator, Literal, Optional
import platform
DebugLevel = Literal[1, 2, 3, 4, 5]
"""Type hint for debug level (1-5)."""
DEFAULTS = {"debug": 1, "stack": 3}
"""Default values for debug level (1) and stack level (3)."""
class TieredDebug:
"""Tiered debug logging with configurable levels and stack tracing.
Supports debug logging at levels 1-5, with level 1 always logged and
levels 2-5 conditional on the configured debug level. Allows custom
stack levels for accurate caller reporting and flexible logger
configuration via handlers.
Args:
level: Debug level (1-5, default 1). (int)
stacklevel: Stack level for caller reporting (1-9, default 3). (int)
logger_name: Name for the logger (default "tiered_debug._base"). (str)
Attributes:
level: Current debug level (1-5). (int)
stacklevel: Current stack level for caller reporting (1-9). (int)
logger: Configured logger instance. (logging.Logger)
Examples:
>>> debug = TieredDebug(level=2)
>>> debug.level
2
>>> import logging
>>> handler = logging.StreamHandler()
>>> debug.add_handler(
... handler, logging.Formatter("%(message)s")
... )
>>> debug.lv1("Level 1 message")
>>> debug.lv3("Level 3 message") # Not logged
"""
def __init__(
self,
level: int = DEFAULTS["debug"],
stacklevel: int = DEFAULTS["stack"],
logger_name: str = "tiered_debug._base",
) -> None:
"""Initialize a TieredDebug instance with specified settings."""
self._logger = logging.getLogger(logger_name)
self._level = self.check_val(level, "debug")
self._stacklevel = self.check_val(stacklevel, "stack")
@property
def level(self) -> int:
"""Get the current debug level (1-5).
Returns:
int: Current debug level.
Examples:
>>> debug = TieredDebug(level=3)
>>> debug.level
3
"""
return self._level
@level.setter
def level(self, value: int) -> None:
"""Set the debug level, validating it is between 1 and 5.
Args:
value: Debug level to set (1-5). (int)
"""
self._level = self.check_val(value, "debug")
@property
def stacklevel(self) -> int:
"""Get the current stack level for caller reporting (1-9).
Returns:
int: Current stack level.
Examples:
>>> debug = TieredDebug(stacklevel=4)
>>> debug.stacklevel
4
"""
return self._stacklevel
@stacklevel.setter
def stacklevel(self, value: int) -> None:
"""Set the stack level, validating it is between 1 and 9.
Args:
value: Stack level to set (1-9). (int)
"""
self._stacklevel = self.check_val(value, "stack")
@property
def logger(self) -> logging.Logger:
"""Get the configured logger instance.
Returns:
logging.Logger: Logger instance for this TieredDebug object.
Examples:
>>> debug = TieredDebug()
>>> isinstance(debug.logger, logging.Logger)
True
"""
return self._logger
def check_val(self, val: int, kind: str) -> int:
"""Validate and return a debug or stack level, or default if invalid.
Args:
val: Value to validate. (int)
kind: Type of value ("debug" or "stack"). (str)
Returns:
int: Validated value or default if invalid.
Raises:
ValueError: If kind is neither "debug" nor "stack".
Examples:
>>> debug = TieredDebug()
>>> debug.check_val(3, "debug")
3
>>> debug.check_val(0, "debug") # Invalid, returns default
1
"""
if kind == "debug":
valid = 1 <= val <= 5
elif kind == "stack":
valid = 1 <= val <= 9
else:
raise ValueError(f"Invalid kind: {kind}. Must be 'debug' or 'stack'")
if not valid:
self.logger.warning(
f"Invalid {kind} level: {val}. Using default: {DEFAULTS[kind]}"
)
return DEFAULTS[kind]
return val
def add_handler(
self,
handler: logging.Handler,
formatter: Optional[logging.Formatter] = None,
) -> None:
"""Add a handler to the logger if not already present.
Args:
handler: Handler to add to the logger. (logging.Handler)
formatter: Optional formatter for the handler. (logging.Formatter)
Examples:
>>> debug = TieredDebug()
>>> import logging
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> handler in debug.logger.handlers
True
"""
if handler not in self.logger.handlers:
if formatter:
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
self.logger.addHandler(handler)
else:
self.logger.info("Handler already attached to logger, skipping")
@lru_cache(maxsize=1)
def _select_frame_getter(self) -> Any:
"""Select the appropriate frame getter based on Python implementation.
Returns:
Callable: sys._getframe for CPython, inspect.currentframe otherwise.
Examples:
>>> debug = TieredDebug()
>>> import platform
>>> if platform.python_implementation() == "CPython":
... assert debug._select_frame_getter() is sys._getframe
"""
return (
sys._getframe
if platform.python_implementation() == "CPython"
else sys.modules["inspect"].currentframe
)
def _get_logger_name(self, stack_level: int) -> str:
"""Get the module name from the call stack at the specified level.
Args:
stack_level: Stack level to inspect (1-9). (int)
Returns:
str: Module name or "unknown" if not found.
Examples:
>>> debug = TieredDebug()
>>> debug._get_logger_name(1)
'__main__'
"""
try:
frame = self._select_frame_getter()(stack_level)
return frame.f_globals.get("__name__", "unknown")
except (ValueError, AttributeError):
return "unknown"
@contextmanager
def change_level(self, level: int) -> Iterator[None]:
"""Temporarily change the debug level within a context.
Args:
level: Debug level to set temporarily (1-5). (int)
Examples:
>>> debug = TieredDebug(level=2)
>>> with debug.change_level(4):
... assert debug.level == 4
>>> debug.level
2
"""
original_level = self.level
self.level = level
try:
yield
finally:
self.level = original_level
def log(
self,
level: DebugLevel,
msg: str,
*args,
exc_info: Optional[bool] = None,
stack_info: Optional[bool] = None,
stacklevel: Optional[int] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
"""Log a message at the specified debug level.
Args:
level: Debug level for the message (1-5). (DebugLevel)
msg: Message to log, optionally with format specifiers. (str)
*args: Arguments for message formatting.
exc_info: Include exception info if True. (bool)
stack_info: Include stack trace if True. (bool)
stacklevel: Stack level for caller reporting (1-9). (int)
extra: Extra metadata dictionary. (Dict[str, Any])
Raises:
ValueError: If level is not between 1 and 5.
TypeError: If extra is not a dictionary or None.
Examples:
>>> debug = TieredDebug(level=2)
>>> import logging
>>> debug.add_handler(logging.StreamHandler())
>>> debug.log(1, "Level 1 message: %s", "test")
>>> debug.log(3, "Level 3 message") # Not logged
"""
if not 1 <= level <= 5:
raise ValueError("Debug level must be 1-5")
if level > self.level:
return
if exc_info is None:
exc_info = False
if stack_info is None:
stack_info = False
if extra is not None and not isinstance(extra, dict):
raise TypeError("extra must be a dictionary or None")
if extra is None:
extra = {}
effective_stacklevel = self.stacklevel if stacklevel is None else stacklevel
effective_stacklevel = self.check_val(effective_stacklevel, "stack")
logger_name = self._get_logger_name(effective_stacklevel)
logger = logging.getLogger(logger_name)
logger.debug(
f"DEBUG{level} {msg}",
*args,
exc_info=exc_info,
stack_info=stack_info,
stacklevel=effective_stacklevel,
extra=extra,
)
def lv1(
self,
msg: str,
*args,
exc_info: Optional[bool] = None,
stack_info: Optional[bool] = None,
stacklevel: Optional[int] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
"""Log a message at debug level 1 (always logged).
Args:
msg: Message to log, optionally with format specifiers. (str)
*args: Arguments for message formatting.
exc_info: Include exception info if True. (bool)
stack_info: Include stack trace if True. (bool)
stacklevel: Stack level for caller reporting (1-9). (int)
extra: Extra metadata dictionary. (Dict[str, Any])
Raises:
TypeError: If extra is not a dictionary or None.
Examples:
>>> debug = TieredDebug(level=2)
>>> import logging
>>> debug.add_handler(logging.StreamHandler())
>>> debug.lv1("Level 1 message: %s", "test")
"""
self.log(
1,
msg,
*args,
exc_info=exc_info,
stack_info=stack_info,
stacklevel=stacklevel,
extra=extra,
)
def lv2(
self,
msg: str,
*args,
exc_info: Optional[bool] = None,
stack_info: Optional[bool] = None,
stacklevel: Optional[int] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
"""Log a message at debug level 2 (logged if level >= 2).
Args:
msg: Message to log, optionally with format specifiers. (str)
*args: Arguments for message formatting.
exc_info: Include exception info if True. (bool)
stack_info: Include stack trace if True. (bool)
stacklevel: Stack level for caller reporting (1-9). (int)
extra: Extra metadata dictionary. (Dict[str, Any])
Raises:
TypeError: If extra is not a dictionary or None.
Examples:
>>> debug = TieredDebug(level=2)
>>> import logging
>>> debug.add_handler(logging.StreamHandler())
>>> debug.lv2("Level 2 message: %s", "test")
"""
self.log(
2,
msg,
*args,
exc_info=exc_info,
stack_info=stack_info,
stacklevel=stacklevel,
extra=extra,
)
def lv3(
self,
msg: str,
*args,
exc_info: Optional[bool] = None,
stack_info: Optional[bool] = None,
stacklevel: Optional[int] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
"""Log a message at debug level 3 (logged if level >= 3).
Args:
msg: Message to log, optionally with format specifiers. (str)
*args: Arguments for message formatting.
exc_info: Include exception info if True. (bool)
stack_info: Include stack trace if True. (bool)
stacklevel: Stack level for caller reporting (1-9). (int)
extra: Extra metadata dictionary. (Dict[str, Any])
Raises:
TypeError: If extra is not a dictionary or None.
Examples:
>>> debug = TieredDebug(level=3)
>>> import logging
>>> debug.add_handler(logging.StreamHandler())
>>> debug.lv3("Level 3 message: %s", "test")
"""
self.log(
3,
msg,
*args,
exc_info=exc_info,
stack_info=stack_info,
stacklevel=stacklevel,
extra=extra,
)
def lv4(
self,
msg: str,
*args,
exc_info: Optional[bool] = None,
stack_info: Optional[bool] = None,
stacklevel: Optional[int] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
"""Log a message at debug level 4 (logged if level >= 4).
Args:
msg: Message to log, optionally with format specifiers. (str)
*args: Arguments for message formatting.
exc_info: Include exception info if True. (bool)
stack_info: Include stack trace if True. (bool)
stacklevel: Stack level for caller reporting (1-9). (int)
extra: Extra metadata dictionary. (Dict[str, Any])
Raises:
TypeError: If extra is not a dictionary or None.
Examples:
>>> debug = TieredDebug(level=4)
>>> import logging
>>> debug.add_handler(logging.StreamHandler())
>>> debug.lv4("Level 4 message: %s", "test")
"""
self.log(
4,
msg,
*args,
exc_info=exc_info,
stack_info=stack_info,
stacklevel=stacklevel,
extra=extra,
)
def lv5(
self,
msg: str,
*args,
exc_info: Optional[bool] = None,
stack_info: Optional[bool] = None,
stacklevel: Optional[int] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
"""Log a message at debug level 5 (logged if level >= 5).
Args:
msg: Message to log, optionally with format specifiers. (str)
*args: Arguments for message formatting.
exc_info: Include exception info if True. (bool)
stack_info: Include stack trace if True. (bool)
stacklevel: Stack level for caller reporting (1-9). (int)
extra: Extra metadata dictionary. (Dict[str, Any])
Raises:
TypeError: If extra is not a dictionary or None.
Examples:
>>> debug = TieredDebug(level=5)
>>> import logging
>>> debug.add_handler(logging.StreamHandler())
>>> debug.lv5("Level 5 message: %s", "test")
"""
self.log(
5,
msg,
*args,
exc_info=exc_info,
stack_info=stack_info,
stacklevel=stacklevel,
extra=extra,
)
python-tiered-debug-1.4.0/src/tiered_debug/debug.py 0000664 0000000 0000000 00000005555 15071204466 0022263 0 ustar 00root root 0000000 0000000 """Sample usage of tiered debug logging with a global instance and decorator.
Provides a global `TieredDebug` instance and a `begin_end` decorator to
log function entry and exit at specified debug levels. Designed for use
in projects like ElasticKeeper and ElasticCheckpoint to trace function
execution with configurable stack levels.
Examples:
>>> from tiered_debug.debug import debug, begin_end
>>> debug.level = 3
>>> import logging
>>> debug.add_handler(logging.StreamHandler())
>>> @begin_end(debug, begin=2, end=3, stacklevel=2, extra={"func": "test"})
... def example():
... return "Test"
>>> example()
'Test'
"""
from functools import wraps
from typing import Any, Dict, Literal, Optional
from ._base import TieredDebug
DEFAULT_BEGIN = 2
"""Default debug level for BEGIN messages."""
DEFAULT_END = 3
"""Default debug level for END messages."""
debug = TieredDebug(level=1, stacklevel=3)
"""Global TieredDebug instance with default level 1 and stacklevel 3."""
def begin_end(
debug_obj: Optional[TieredDebug] = None,
begin: Literal[1, 2, 3, 4, 5] = DEFAULT_BEGIN,
end: Literal[1, 2, 3, 4, 5] = DEFAULT_END,
stacklevel: int = 2,
extra: Optional[Dict[str, Any]] = None,
):
"""Decorator to log function entry and exit at specified debug levels.
Logs "BEGIN CALL" at the `begin` level and "END CALL" at the `end`
level using the provided or global debug instance. Adjusts the
stacklevel by 1 to report the correct caller.
Args:
debug_obj: TieredDebug instance to use (default: global debug).
begin: Debug level for BEGIN message (1-5, default 2). (int)
end: Debug level for END message (1-5, default 3). (int)
stacklevel: Stack level for reporting (1-9, default 2). (int)
extra: Extra metadata dictionary (default None). (Dict[str, Any])
Returns:
Callable: Decorated function with logging.
Examples:
>>> debug.level = 3
>>> import logging
>>> debug.add_handler(logging.StreamHandler())
>>> @begin_end(debug, begin=2, end=3)
... def test_func():
... return "Result"
>>> test_func()
'Result'
"""
debug_instance = debug_obj if debug_obj is not None else debug
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
effective_stacklevel = stacklevel + 1
debug_instance.log(
begin,
f"BEGIN CALL: {func.__name__}()",
stacklevel=effective_stacklevel,
extra=extra,
)
result = func(*args, **kwargs)
debug_instance.log(
end,
f"END CALL: {func.__name__}()",
stacklevel=effective_stacklevel,
extra=extra,
)
return result
return wrapper
return decorator
python-tiered-debug-1.4.0/src/tiered_debug/py.typed 0000664 0000000 0000000 00000000000 15071204466 0022276 0 ustar 00root root 0000000 0000000 python-tiered-debug-1.4.0/tests/ 0000775 0000000 0000000 00000000000 15071204466 0016542 5 ustar 00root root 0000000 0000000 python-tiered-debug-1.4.0/tests/__init__.py 0000664 0000000 0000000 00000000163 15071204466 0020653 0 ustar 00root root 0000000 0000000 # SPDX-FileCopyrightText: 2025-present Aaron Mildenstein
#
# SPDX-License-Identifier: MIT
python-tiered-debug-1.4.0/tests/test_base.py 0000664 0000000 0000000 00000075630 15071204466 0021100 0 ustar 00root root 0000000 0000000 """Unit tests for the tiered_debug._base module.
Tests the `TieredDebug` class, which provides tiered debug logging at
levels 1-5 with configurable stack levels for caller reporting. Covers
initialization, level and stacklevel properties, logger configuration,
logging behavior, and parameters (`exc_info`, `stack_info`, `stacklevel`,
`extra`). Designed for use in projects like ElasticKeeper and
ElasticCheckpoint.
Examples:
>>> from tiered_debug._base import TieredDebug
>>> import logging
>>> debug = TieredDebug(level=2)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler, logging.Formatter("%(message)s"))
>>> debug.lv1("Test message: %s", "value") # Logs at level 1
>>> debug.lv3("Not logged") # Ignored (level 3 > 2)
"""
# pylint: disable=W0212,W0621
import logging
import sys
import platform
import pytest
from tiered_debug._base import DEFAULTS, TieredDebug
BASENAME = "tiered_debug._base"
"""Module name for debug.logger"""
@pytest.fixture
def debug():
"""Create a fresh TieredDebug instance for each test.
Returns:
TieredDebug: Instance with default settings.
Examples:
>>> debug = TieredDebug()
>>> isinstance(debug, TieredDebug)
True
"""
return TieredDebug()
# Tests for initialization
def test_default_initialization(debug):
"""Test default initialization values.
Verifies that a new TieredDebug instance uses default debug and stack
levels.
Examples:
>>> debug = TieredDebug()
>>> debug.level
1
>>> debug.stacklevel
3
"""
assert debug.level == DEFAULTS["debug"]
assert debug.stacklevel == DEFAULTS["stack"]
assert debug.logger.name == BASENAME
def test_custom_initialization():
"""Test initialization with custom values.
Verifies that custom level, stacklevel, and logger_name are set
correctly.
Examples:
>>> debug = TieredDebug(level=3, stacklevel=4, logger_name="custom")
>>> debug.level
3
>>> debug.stacklevel
4
>>> debug.logger.name
'custom'
"""
instance = TieredDebug(level=3, stacklevel=4, logger_name="custom")
assert instance.level == 3
assert instance.stacklevel == 4
assert instance.logger.name == "custom"
# Tests for level property and setter
def test_level_property(debug):
"""Test that level property returns the current level.
Examples:
>>> debug = TieredDebug()
>>> debug._level = 3
>>> debug.level
3
"""
debug._level = 3
assert debug.level == 3
def test_level_setter_valid(debug):
"""Test that level setter sets level correctly for valid inputs.
Examples:
>>> debug = TieredDebug()
>>> debug.level = 3
>>> debug.level
3
"""
debug.level = 1
assert debug._level == 1
debug.level = 3
assert debug._level == 3
debug.level = 5
assert debug._level == 5
def test_level_setter_invalid(debug, caplog):
"""Test that level setter handles invalid inputs correctly.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug()
>>> debug.level = 0 # Logs warning, uses default
>>> debug.level
1
"""
with caplog.at_level(logging.WARNING, logger=debug.logger.name):
debug.level = 0
assert "Invalid debug level: 0" in caplog.text
assert debug.level == DEFAULTS["debug"]
caplog.clear()
with caplog.at_level(logging.WARNING, logger=debug.logger.name):
debug.level = 6
assert "Invalid debug level: 6" in caplog.text
assert debug.level == DEFAULTS["debug"]
# Tests for stacklevel property and setter
def test_stacklevel_property(debug):
"""Test that stacklevel property returns the current stacklevel.
Examples:
>>> debug = TieredDebug()
>>> debug._stacklevel = 3
>>> debug.stacklevel
3
"""
debug._stacklevel = 3
assert debug.stacklevel == 3
def test_stacklevel_setter_valid(debug):
"""Test that stacklevel setter sets stacklevel correctly for valid inputs.
Examples:
>>> debug = TieredDebug()
>>> debug.stacklevel = 3
>>> debug.stacklevel
3
"""
debug.stacklevel = 1
assert debug._stacklevel == 1
debug.stacklevel = 3
assert debug._stacklevel == 3
debug.stacklevel = 9
assert debug._stacklevel == 9
def test_stacklevel_setter_invalid(debug, caplog):
"""Test that stacklevel setter handles invalid inputs correctly.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug()
>>> debug.stacklevel = 0 # Logs warning, uses default
>>> debug.stacklevel
3
"""
with caplog.at_level(logging.WARNING, logger=debug.logger.name):
debug.stacklevel = 0
assert "Invalid stack level: 0" in caplog.text
assert debug.stacklevel == DEFAULTS["stack"]
caplog.clear()
with caplog.at_level(logging.WARNING, logger=debug.logger.name):
debug.stacklevel = 10
assert "Invalid stack level: 10" in caplog.text
assert debug.stacklevel == DEFAULTS["stack"]
# Tests for logger property
def test_logger_property(debug):
"""Test that logger property returns the instance-level logger.
Examples:
>>> debug = TieredDebug()
>>> isinstance(debug.logger, logging.Logger)
True
"""
assert isinstance(debug.logger, logging.Logger)
assert debug.logger.name == BASENAME
def test_logger_custom_name():
"""Test that logger property reflects custom logger name.
Examples:
>>> debug = TieredDebug(logger_name="test.logger")
>>> debug.logger.name
'test.logger'
"""
debug = TieredDebug(logger_name="test.logger")
assert debug.logger.name == "test.logger"
# Tests for add_handler method
def test_add_handler(debug, caplog):
"""Test that add_handler adds a handler and logs correctly.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug()
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> handler in debug.logger.handlers
True
"""
caplog.set_level(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter("%(funcName)s:%(lineno)d %(message)s")
debug.add_handler(handler, formatter=formatter)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1("Test message")
assert "DEBUG1 Test message" in caplog.text
assert handler in debug.logger.handlers
def test_add_handler_duplicate(debug, caplog):
"""Test that add_handler skips duplicate handlers with an info message.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug()
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.add_handler(handler) # Logs info, skips
"""
caplog.set_level(logging.DEBUG)
handler = logging.StreamHandler()
debug.add_handler(handler)
post_add = len(debug.logger.handlers)
with caplog.at_level(logging.INFO, logger=debug.logger.name):
debug.add_handler(handler)
assert "Handler already attached to logger, skipping" in caplog.text
assert len(debug.logger.handlers) == post_add # No duplicate added
# Tests for check_val method
def test_check_val_valid(debug):
"""Test that check_val returns valid values unchanged.
Args:
debug: TieredDebug instance. (TieredDebug)
Examples:
>>> debug = TieredDebug()
>>> debug.check_val(3, "debug")
3
"""
assert debug.check_val(3, "debug") == 3
assert debug.check_val(5, "debug") == 5
assert debug.check_val(3, "stack") == 3
assert debug.check_val(9, "stack") == 9
def test_check_val_invalid(debug, caplog):
"""Test that check_val returns default values for invalid inputs.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug()
>>> debug.check_val(0, "debug") # Logs warning
1
"""
with caplog.at_level(logging.WARNING, logger=debug.logger.name):
assert debug.check_val(0, "debug") == DEFAULTS["debug"]
assert "Invalid debug level: 0" in caplog.text
caplog.clear()
with caplog.at_level(logging.WARNING, logger=debug.logger.name):
assert debug.check_val(10, "stack") == DEFAULTS["stack"]
assert "Invalid stack level: 10" in caplog.text
def test_check_val_invalid_kind(debug):
"""Test that check_val raises ValueError for invalid kind.
Args:
debug: TieredDebug instance. (TieredDebug)
Examples:
>>> debug = TieredDebug()
>>> try:
... debug.check_val(3, "invalid")
... except ValueError as e:
... print(str(e))
Invalid kind: invalid. Must be 'debug' or 'stack'
"""
with pytest.raises(ValueError, match="Invalid kind: invalid"):
debug.check_val(3, "invalid")
# Tests for _select_frame_getter and _get_logger_name
def test_get_logger_name_valid(debug):
"""Test that _get_logger_name returns correct module name.
Args:
debug: TieredDebug instance. (TieredDebug)
Examples:
>>> debug = TieredDebug()
>>> debug._get_logger_name(1)
'__main__'
"""
name = debug._get_logger_name(1)
assert name == __name__
def test_get_logger_name_invalid_stack(debug):
"""Test that _get_logger_name handles invalid stack levels.
Args:
debug: TieredDebug instance. (TieredDebug)
Examples:
>>> debug = TieredDebug()
>>> debug._get_logger_name(100)
'unknown'
"""
name = debug._get_logger_name(100) # Too deep
assert name == "unknown"
def test_select_frame_getter_cpython(debug, monkeypatch):
"""Test that _select_frame_getter uses sys._getframe in CPython.
Args:
debug: TieredDebug instance. (TieredDebug)
monkeypatch: Pytest monkeypatch fixture.
Examples:
>>> debug = TieredDebug()
>>> import platform
>>> if platform.python_implementation() == "CPython":
... assert debug._select_frame_getter() is sys._getframe
"""
monkeypatch.setattr(platform, "python_implementation", lambda: "CPython")
getter = debug._select_frame_getter()
assert getter is sys._getframe
def test_select_frame_getter_non_cpython(debug, monkeypatch):
"""Test that _select_frame_getter uses inspect.currentframe in non-CPython.
Args:
debug: TieredDebug instance. (TieredDebug)
monkeypatch: Pytest monkeypatch fixture.
Examples:
>>> debug = TieredDebug()
>>> import platform
>>> import inspect
>>> if platform.python_implementation() != "CPython":
... frame = debug._select_frame_getter()()
... assert frame is not None
"""
monkeypatch.setattr(platform, "python_implementation", lambda: "PyPy")
getter = debug._select_frame_getter()
frame = getter()
assert frame is not None # Returns a frame object
assert frame.f_back is not None # Can access parent frame
# Tests for change_level context manager
def test_change_level(debug):
"""Test that change_level temporarily changes the level.
Args:
debug: TieredDebug instance. (TieredDebug)
Examples:
>>> debug = TieredDebug(level=2)
>>> with debug.change_level(4):
... assert debug.level == 4
>>> debug.level
2
"""
debug.level = 2
assert debug.level == 2
with debug.change_level(4):
assert debug.level == 4
assert debug.level == 2 # Restored
def test_change_level_with_exception(debug):
"""Test that change_level restores level despite exceptions.
Args:
debug: TieredDebug instance. (TieredDebug)
Examples:
>>> debug = TieredDebug(level=2)
>>> try:
... with debug.change_level(4):
... raise RuntimeError
... except RuntimeError:
... pass
>>> debug.level
2
"""
debug.level = 2
try:
with debug.change_level(4):
assert debug.level == 4
raise RuntimeError("Test exception")
except RuntimeError:
pass
assert debug.level == 2 # Restored
# Tests for log method
def test_log_valid_level(debug, caplog):
"""Test that log method logs messages at valid levels with args.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=3)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.log(2, "Test: %s", "value")
"""
caplog.set_level(logging.DEBUG)
debug.level = 3
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.log(2, "Test message: %s", "value", stacklevel=1)
assert "DEBUG2 Test message: value" in caplog.text
def test_log_invalid_level(debug):
"""Test that log method raises ValueError for invalid levels.
Args:
debug: TieredDebug instance. (TieredDebug)
Examples:
>>> debug = TieredDebug()
>>> try:
... debug.log(6, "Invalid")
... except ValueError as e:
... print(str(e))
Debug level must be 1-5
"""
with pytest.raises(ValueError, match="Debug level must be 1-5"):
debug.log(6, "Invalid level")
def test_log_with_default_stacklevel(debug, caplog):
"""Test that log uses default stacklevel if none provided.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug()
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.log(1, "Test: %s", "value")
"""
caplog.set_level(logging.DEBUG)
debug.stacklevel = 3
expected = debug._get_logger_name(2)
# So why does expected look for level 2? Because the logger name is determined
# by the frame at the stack level from which it was called. This is confusing
# because log() itself calls _get_logger_name. However, when _get_logger_name
# is called by log(), an extra level of indirection is added because it is
# being called from inside log(). In other words, in order to get the correct
# caller, it has to go up one more level than if calling _get_logger_name
# from outside of log(). When debug._get_logger_name is called directly from
# this function, it points to the caller of debug._get_logger_name, which is
# the same as the caller of log(), which is why we call it with 2 here.
# Perhaps seeing it laid out will help:
# logger=debug.logger.name here is tests.test_base, this module.
# (this function) -> log() -> _get_logger_name() = 3
# (this function) -> _get_logger_name() = 2
# Calling _get_logger_name() only needs to go up 2 levels to get the same name
# as when log() is called by this function.
# Make sense? I hope so.
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.log(1, "Test message: %s", "value")
# The name should match the logger name up two levels.
assert caplog.records[0].name == expected
def test_log_with_custom_stacklevel(debug, caplog):
"""Test that log uses provided stacklevel.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug()
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.log(1, "Test: %s", "value", stacklevel=4)
"""
caplog.set_level(logging.DEBUG)
expected = "pluggy._callers"
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.log(1, "Test message: %s", "value", stacklevel=4)
assert (
caplog.records[0].name == expected
) # In testing, stacklevel 4 points to "pluggy._callers"
# Tests for logging functions
@pytest.mark.parametrize(
"debug_level,log_level,should_log",
[
(1, 1, True), # lv1 always logs
(1, 2, False), # lv2 shouldn't log at debug level 1
(1, 3, False), # lv3 shouldn't log at debug level 1
(3, 1, True), # lv1 always logs
(3, 2, True), # lv2 should log at debug level 3
(3, 3, True), # lv3 should log at debug level 3
(3, 4, False), # lv4 shouldn't log at debug level 3
(5, 1, True), # lv1 always logs
(5, 5, True), # lv5 should log at debug level 5
],
)
def test_log_levels(debug, caplog, debug_level, log_level, should_log):
"""Test that log functions respect the current debug level.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
debug_level: Debug level to set (1-5). (int)
log_level: Log level to test (1-5). (int)
should_log: Whether the message should be logged. (bool)
Examples:
>>> debug = TieredDebug(level=3)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv2("Test: %s", "value") # Should log
>>> debug.lv4("Test") # Should not log
"""
caplog.set_level(logging.DEBUG)
debug.level = debug_level
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
log_methods = {
1: debug.lv1,
2: debug.lv2,
3: debug.lv3,
4: debug.lv4,
5: debug.lv5,
}
expected = debug._get_logger_name(1)
# So why does expected look for level 1 here?
# It looks for 2 in test_log_with_default_stacklevel, so why 1 here?
# Because it's parametrized and called from within a loop, so the stack level
# is different. When log() is called from within lvX(), it adds an extra
# level of indirection, so to get the caller of lvX(), we have to go up one
# more level than when calling log() directly. When debug._get_logger_name is
# called directly from this function, it points to the caller of
# debug._get_logger_name, which is this function.
# The caller of log() is lvX(), and the caller of lvX() is this function.
# Therefore, we need to go up one level to get the same name as
# when log() is called by lvX(), which is why we call it with 1 here.
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
log_methods[log_level](f"Test message level {log_level}: %s", "value")
msg = f"DEBUG{log_level} Test message level {log_level}: value"
assert (msg in caplog.text) == should_log
if should_log:
# Should match the logger name assigned in the fixture
assert caplog.records[0].name == expected
def test_lv1_logs_unconditionally(debug, caplog):
"""Test that lv1 logs messages without checking debug level.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv1("Test: %s", "value")
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1("Unconditional message: %s", "value")
debug.lv2("Conditional message")
assert "DEBUG1 Unconditional message: value" in caplog.text
assert "DEBUG2 Conditional message" not in caplog.text
# Tests for exc_info, stack_info, and extra parameters
def test_log_with_exc_info(debug, caplog):
"""Test that log method includes exception info when exc_info=True.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> try:
... raise ValueError("Test")
... except ValueError:
... debug.lv1("Error: %s", "info", exc_info=True)
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
try:
raise ValueError("Test error")
except ValueError:
debug.lv1("Error occurred: %s", "info", exc_info=True)
assert "DEBUG1 Error occurred: info" in caplog.text
assert "ValueError: Test error" in caplog.text
def test_log_without_exc_info(debug, caplog):
"""Test that log method excludes exception info when exc_info=False.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> try:
... raise ValueError("Test")
... except ValueError:
... debug.lv1("Error: %s", "info", exc_info=False)
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
try:
raise ValueError("Test error")
except ValueError:
debug.lv1("Error occurred: %s", "info", exc_info=False)
assert "DEBUG1 Error occurred: info" in caplog.text
assert "ValueError: Test error" not in caplog.text
def test_log_with_stack_info(debug, caplog):
"""Test that log method includes stack info when stack_info=True.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv1("Test: %s", "value", stack_info=True)
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1("Stack info test: %s", "value", stack_info=True)
assert "DEBUG1 Stack info test: value" in caplog.text
assert "Stack (most recent call last):" in caplog.text
def test_log_without_stack_info(debug, caplog):
"""Test that log method excludes stack info when stack_info=False.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv1("Test: %s", "value", stack_info=False)
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1("No stack info test: %s", "value", stack_info=False)
assert "DEBUG1 No stack info test: value" in caplog.text
assert "Stack (most recent call last):" not in caplog.text
def test_log_with_extra(debug, caplog):
"""Test that log method includes extra metadata when provided.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv1("Test: %s", "value", extra={"custom": "value"})
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1(
"Extra test: %s",
"value",
extra={"custom": "custom_value"},
)
assert "DEBUG1 Extra test: value" in caplog.text
assert caplog.records[0].custom == "custom_value"
def test_log_with_extra_none(debug, caplog):
"""Test that log method handles extra=None by setting it to empty dict.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv1("Test: %s", "value", extra=None)
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1("Extra none test: %s", "value", extra=None)
assert "DEBUG1 Extra none test: value" in caplog.text
# No errors, logs successfully with extra={}
def test_log_all_parameters_combined(debug, caplog):
"""Test log method with exc_info, stack_info, and extra combined.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> try:
... raise ValueError("Test")
... except ValueError:
... debug.lv1("Test: %s", "value", exc_info=True, stack_info=True,
... extra={"custom": "value"})
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
try:
raise ValueError("Combined test error")
except ValueError:
debug.lv1(
"Combined test: %s",
"value",
exc_info=True,
stack_info=True,
stacklevel=4,
extra={"custom": "combined_value"},
)
assert "DEBUG1 Combined test: value" in caplog.text
assert "ValueError: Combined test error" in caplog.text
assert "Stack (most recent call last):" in caplog.text
assert caplog.records[0].custom == "combined_value"
def test_log_with_invalid_extra_type(debug, caplog):
"""Test that log method handles invalid extra type gracefully.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv1("Test: %s", "value", extra="invalid") # Raises TypeError
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
expected = "Invalid extra test"
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
with pytest.raises(TypeError):
debug.lv1(
"Invalid extra test: %s",
"value",
extra="not_a_dict",
)
assert expected not in caplog.text
def test_log_with_empty_message(debug, caplog):
"""Test that log method handles empty message.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv1("") # Should log empty message
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1("")
assert "DEBUG1 " in caplog.text # Empty message logged
def test_log_with_multiple_handlers(debug, caplog):
"""Test that log method works with multiple handlers.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler1 = logging.StreamHandler()
>>> handler2 = logging.StreamHandler()
>>> debug.add_handler(handler1)
>>> debug.add_handler(handler2)
>>> debug.lv1("Test: %s", "value")
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
handler1 = logging.StreamHandler()
handler2 = logging.StreamHandler()
formatter = logging.Formatter("%(funcName)s:%(lineno)d %(message)s")
before = len(debug.logger.handlers)
debug.add_handler(handler1, formatter=formatter)
debug.add_handler(handler2, formatter=formatter)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1("Multi-handler test: %s", "value")
assert "DEBUG1 Multi-handler test: value" in caplog.text
assert len(debug.logger.handlers) == before + 2 # Two handlers added
def test_log_performance(debug, caplog):
"""Test that log method performs efficiently with new parameters.
Args:
debug: TieredDebug instance. (TieredDebug)
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> debug = TieredDebug(level=1)
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> for _ in range(10):
... debug.lv1("Test: %s", "value")
"""
caplog.set_level(logging.DEBUG)
debug.level = 1
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
for _ in range(100): # Test 100 log calls
debug.lv1(
"Performance test: %s",
str(_),
extra={"count": _},
)
assert "DEBUG1 Performance test:" in caplog.text
assert len(caplog.records) == 100 # All calls logged
python-tiered-debug-1.4.0/tests/test_debug.py 0000664 0000000 0000000 00000026260 15071204466 0021247 0 ustar 00root root 0000000 0000000 """Unit tests for the tiered_debug.debug module.
Tests the global `debug` instance and `begin_end` decorator in `debug.py`,
which provide project-wide debugging for tiered logging at levels 1-5.
Tests cover initialization, decorator behavior, and logging integration,
designed for projects like ElasticKeeper and ElasticCheckpoint.
Examples:
>>> from tiered_debug.debug import debug, begin_end
>>> import logging
>>> debug.level = 3
>>> debug.add_handler(logging.StreamHandler(), logging.Formatter(
... "%(funcName)s:%(lineno)d %(message)s"))
>>> @begin_end(debug, begin=2, end=3, extra={"func": "test"})
... def test_func():
... debug.lv1("Inside")
>>> test_func() # Logs BEGIN at 2, Inside at 1, END at 3
"""
# pylint: disable=W0107,W0212,W0621
import logging
import pytest
from tiered_debug import TieredDebug
from tiered_debug.debug import begin_end, DEFAULT_BEGIN, DEFAULT_END
from tiered_debug.debug import debug as sample_debug
BASENAME = "tiered_debug.debug"
"""Module name for debug.logger"""
@pytest.fixture
def debug():
"""Create a fresh TieredDebug instance for each test.
Returns:
TieredDebug: Instance with default settings.
Examples:
>>> debug = TieredDebug()
>>> isinstance(debug, TieredDebug)
True
"""
sample_debug._logger.name = __name__
return sample_debug
@pytest.fixture
def reset_debug(monkeypatch):
"""Reset the global debug instance for each test.
Args:
monkeypatch: Pytest monkeypatch fixture.
Returns:
TieredDebug: Fresh TieredDebug instance as global debug.
Examples:
>>> from tiered_debug import debug
>>> isinstance(debug, TieredDebug)
True
"""
new_debug = TieredDebug()
monkeypatch.setattr("tiered_debug.debug.debug", new_debug)
return new_debug
# Tests for global debug instance
def test_debug_instance(debug):
"""Test that global debug is a TieredDebug instance with defaults.
Args:
debug: Global TieredDebug instance.
Examples:
>>> from tiered_debug import debug
>>> isinstance(debug, TieredDebug)
True
"""
assert isinstance(debug, TieredDebug)
assert debug.level == 1 # Default debug level
assert debug.stacklevel == 3 # Default stack level
assert debug.logger.name == __name__
def test_debug_add_handler(debug, caplog):
"""Test that global debug supports handler configuration.
Args:
debug: Global TieredDebug instance.
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> from tiered_debug import debug
>>> import logging
>>> handler = logging.StreamHandler()
>>> debug.add_handler(handler)
>>> debug.lv1("Test")
"""
caplog.set_level(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter("%(funcName)s:%(lineno)d %(message)s")
debug.add_handler(handler, formatter=formatter)
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
debug.lv1("Test message")
assert "DEBUG1 Test message" in caplog.text
assert handler in debug.logger.handlers
# Tests for constants
def test_default_begin():
"""Test that DEFAULT_BEGIN is correctly set.
Examples:
>>> from tiered_debug.debug import DEFAULT_BEGIN
>>> DEFAULT_BEGIN
2
"""
assert DEFAULT_BEGIN == 2
assert isinstance(DEFAULT_BEGIN, int)
def test_default_end():
"""Test that DEFAULT_END is correctly set.
Examples:
>>> from tiered_debug.debug import DEFAULT_END
>>> DEFAULT_END
3
"""
assert DEFAULT_END == 3
assert isinstance(DEFAULT_END, int)
# Tests for begin_end decorator
def test_begin_end_default_levels(debug, caplog):
"""Test that begin_end logs BEGIN and END at default levels.
Args:
debug: Global TieredDebug instance.
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> from tiered_debug.debug import begin_end
>>> import logging
>>> debug = TieredDebug(level=3)
>>> debug.add_handler(logging.StreamHandler())
>>> @begin_end()
... def test_func():
... debug.lv1("Inside")
>>> test_func()
"""
caplog.set_level(logging.DEBUG)
debug.level = 3
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
@begin_end()
def test_func():
debug.lv1("Inside")
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
test_func()
assert "DEBUG2 BEGIN CALL: test_func()" in caplog.text
assert "DEBUG1 Inside" in caplog.text
assert "DEBUG3 END CALL: test_func()" in caplog.text
@pytest.mark.parametrize(
"begin,end,should_log_begin,should_log_end",
[
(1, 1, True, True), # Both log at level 1
(2, 3, True, False), # Begin logs, end doesn't (level=2)
(3, 2, False, True), # End logs, begin doesn't (level=2)
(4, 4, False, False), # Neither logs (level=2)
],
)
def test_begin_end_custom_levels(
debug, caplog, begin, end, should_log_begin, should_log_end
):
"""Test that begin_end respects custom begin and end levels.
Args:
debug: Global TieredDebug instance.
caplog: Pytest caplog fixture for capturing logs.
begin: Debug level for BEGIN message. (int)
end: Debug level for END message. (int)
should_log_begin: Whether BEGIN should be logged. (bool)
should_log_end: Whether END should be logged. (bool)
Examples:
>>> from tiered_debug.debug import begin_end
>>> import logging
>>> debug = TieredDebug(level=2)
>>> debug.add_handler(logging.StreamHandler())
>>> @begin_end(begin=1, end=2)
... def test_func():
... pass
>>> test_func()
"""
caplog.set_level(logging.DEBUG)
debug.level = 2
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
@begin_end(begin=begin, end=end)
def test_func():
pass
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
test_func()
begin_msg = f"DEBUG{begin} BEGIN CALL: test_func()"
end_msg = f"DEBUG{end} END CALL: test_func()"
assert (begin_msg in caplog.text) == should_log_begin
assert (end_msg in caplog.text) == should_log_end
def test_begin_end_invalid_levels(debug, caplog):
"""Test that begin_end handles invalid begin/end levels with defaults.
Args:
debug: Global TieredDebug instance.
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> from tiered_debug.debug import begin_end
>>> import logging
>>> debug = TieredDebug(level=3)
>>> debug.add_handler(logging.StreamHandler())
>>> @begin_end(begin=6, end=7)
... def test_func():
... pass
>>> test_func()
"""
caplog.set_level(logging.DEBUG)
debug.level = 3
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
@begin_end(begin=6, end=7)
def test_func():
pass
with caplog.at_level(logging.DEBUG):
with pytest.raises(ValueError):
test_func()
assert len(caplog.text) == 0
def test_begin_end_custom_debug_instance(caplog):
"""Test that begin_end works with a custom TieredDebug instance.
Args:
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> from tiered_debug.debug import begin_end
>>> import logging
>>> debug = TieredDebug(level=3)
>>> debug.add_handler(logging.StreamHandler())
>>> @begin_end(debug, begin=2, end=3)
... def test_func():
... debug.lv1("Inside")
>>> test_func()
"""
caplog.set_level(logging.DEBUG)
custom_debug = TieredDebug(level=3)
custom_debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
@begin_end(debug_obj=custom_debug, begin=2, end=3)
def test_func():
custom_debug.lv1("Inside")
with caplog.at_level(logging.DEBUG, logger=custom_debug.logger.name):
test_func()
assert "DEBUG2 BEGIN CALL: test_func()" in caplog.text
assert "DEBUG1 Inside" in caplog.text
assert "DEBUG3 END CALL: test_func()" in caplog.text
assert len(caplog.records) == 3
def test_begin_end_custom_stacklevel(debug, caplog):
"""Test that begin_end uses custom stacklevel correctly.
Args:
debug: Global TieredDebug instance.
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> from tiered_debug.debug import begin_end
>>> import logging
>>> debug = TieredDebug(level=3)
>>> debug.add_handler(logging.StreamHandler())
>>> @begin_end(begin=2, end=3, stacklevel=4)
... def test_func():
... pass
>>> test_func()
"""
caplog.set_level(logging.DEBUG)
debug.level = 3
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
expected = "_pytest.python"
@begin_end(begin=2, end=3, stacklevel=3)
def test_func():
pass
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
test_func()
assert caplog.records[0].name == expected
def test_begin_end_with_extra(debug, caplog):
"""Test that begin_end passes extra metadata correctly.
Args:
debug: Global TieredDebug instance.
caplog: Pytest caplog fixture for capturing logs.
Examples:
>>> from tiered_debug.debug import begin_end
>>> import logging
>>> debug = TieredDebug(level=3)
>>> debug.add_handler(logging.StreamHandler())
>>> @begin_end(begin=2, end=3, extra={"func": "test"})
... def test_func():
... pass
>>> test_func()
"""
caplog.set_level(logging.DEBUG)
debug.level = 3
debug.add_handler(
logging.StreamHandler(),
formatter=logging.Formatter("%(funcName)s:%(lineno)d %(message)s"),
)
@begin_end(begin=2, end=3, extra={"func": "test"})
def test_func():
pass
with caplog.at_level(logging.DEBUG, logger=debug.logger.name):
test_func()
assert "DEBUG2 BEGIN CALL: test_func()" in caplog.text
assert "DEBUG3 END CALL: test_func()" in caplog.text
assert caplog.records[0].func == "test"
assert caplog.records[1].func == "test"
def test_begin_end_preserves_function_metadata():
"""Test that begin_end preserves function metadata via functools.wraps.
Examples:
>>> from tiered_debug.debug import begin_end
>>> def test_func():
... '''Test docstring.'''
... pass
>>> decorated = begin_end()(test_func)
>>> decorated.__name__
'test_func'
"""
def test_func():
"""Test function docstring."""
pass
decorated = begin_end()(test_func)
assert decorated.__name__ == "test_func"
assert decorated.__doc__ == "Test function docstring."