pax_global_header00006660000000000000000000000064147212142170014513gustar00rootroot0000000000000052 comment=798bfa227918b5bef894da8aa80fb4eddc58a03c python-utils-3.9.1/000077500000000000000000000000001472121421700142045ustar00rootroot00000000000000python-utils-3.9.1/.coveragerc000066400000000000000000000007751472121421700163360ustar00rootroot00000000000000[run] branch = True source = python_utils _python_utils_tests omit = */mock/* */nose/* [paths] source = python_utils [report] fail_under = 100 exclude_lines = pragma: no cover @abc.abstractmethod def __repr__ if self.debug: if settings.DEBUG raise AssertionError raise NotImplementedError if 0: if __name__ == .__main__.: if typing.TYPE_CHECKING: if types.TYPE_CHECKING: @overload @types.overload @typing.overload types.Protocol python-utils-3.9.1/.github/000077500000000000000000000000001472121421700155445ustar00rootroot00000000000000python-utils-3.9.1/.github/workflows/000077500000000000000000000000001472121421700176015ustar00rootroot00000000000000python-utils-3.9.1/.github/workflows/codeql.yml000066400000000000000000000015071472121421700215760ustar00rootroot00000000000000name: "CodeQL" on: push: branches: [ "develop" ] pull_request: branches: [ "develop" ] schedule: - cron: "46 1 * * 3" jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ python ] steps: - name: Checkout uses: actions/checkout@v3 - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} queries: +security-and-quality - name: Autobuild uses: github/codeql-action/autobuild@v2 - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 with: category: "/language:${{ matrix.language }}" python-utils-3.9.1/.github/workflows/main.yml000066400000000000000000000026541472121421700212570ustar00rootroot00000000000000name: pytest on: push: pull_request: workflow_dispatch: jobs: build: runs-on: ubuntu-latest timeout-minutes: 4 strategy: matrix: python-version: ['pypy3.9', 'pypy3.10', '3.9', '3.10', '3.11', '3.12'] # Maybe soon?, '3.13'] steps: - uses: actions/checkout@v4 with: fetch-depth: 1 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip setuptools ruff pip install -e '.[tests]' - name: Get versions run: | python -V pip freeze - name: ruff run: ruff check --output-format=github - name: pytest run: py.test docs_and_lint: runs-on: ubuntu-latest timeout-minutes: 2 steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 with: python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip setuptools pip install -e '.[docs,tests]' pyright ruff mypy - name: build docs run: make html working-directory: docs/ - name: ruff run: ruff check --output-format=github - name: mypy run: mypy python_utils setup.py - name: pyright run: pyright python-utils-3.9.1/.github/workflows/stale.yml000066400000000000000000000006041472121421700214340ustar00rootroot00000000000000name: Close stale issues and pull requests on: workflow_dispatch: schedule: - cron: '0 0 * * *' # Run every day at midnight jobs: stale: runs-on: ubuntu-latest steps: - uses: actions/stale@v8 with: days-before-stale: 30 exempt-issue-labels: in-progress,help-wanted,pinned,security,enhancement exempt-all-pr-assignees: true python-utils-3.9.1/.gitignore000066400000000000000000000000671472121421700161770ustar00rootroot00000000000000/build /dist /*.egg-info /docs/_build /cover /.eggs /.*python-utils-3.9.1/.readthedocs.yaml000066400000000000000000000017761472121421700174460ustar00rootroot00000000000000# Read the Docs configuration file for Sphinx projects # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Set the OS, Python version and other tools you might need build: os: ubuntu-22.04 tools: python: "3.12" # You can also specify other tool versions: # nodejs: "20" # rust: "1.70" # golang: "1.20" # Build documentation in the "docs/" directory with Sphinx sphinx: configuration: docs/conf.py # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs # builder: "dirhtml" # Fail on all warnings to avoid broken references # fail_on_warning: true # Optionally build your docs in additional formats such as PDF and ePub formats: - pdf - epub # Optional but recommended, declare the Python requirements required # to build your documentation # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html python: install: - requirements: docs/requirements.txt python-utils-3.9.1/CONTRIBUTING.md000066400000000000000000000042441472121421700164410ustar00rootroot00000000000000# Contributing to python-utils Bug reports, code and documentation contributions are welcome. You can help this project also by using the development version and by reporting any bugs you might encounter ## 1. Reporting bugs It's important to provide following details when submitting a bug - Python version - python-utils version - OS details If possible also provide a minimum reproducible working code. ## 2. Contributing Code and Docs Before working on a new feature or a bug, please browse [existing issues](https://github.com/WoLpH/python-utils/issues) to see whether it has previously been discussed. If your change alters python-util's behaviour or interface, it's a good idea to discuss it before you start working on it. If you are fixing an issue, the first step should be to create a test case that reproduces the incorrect behaviour. That will also help you to build an understanding of the issue at hand. Make sure to add relevant tests and update documentation in order to get your PRs merged. We strictly adhere to 100% code coverage. ### Development Environment #### Getting the code Go to and fork the project repository. ```bash # Clone your fork $ git clone git@github.com:/python-utils.git # Enter the project directory $ cd python-utils # Create a branch for your changes $ git checkout -b my_awesome_branch ``` #### Testing Before submitting any PR make sure your code passes all the tests. To run the full test-suite, make sure you have `tox` installed and run the following command: ```bash $ tox ``` Or to speed it up (replace 8 with your number of cores), run: ```bash $ tox -p8 ``` During development I recommend using pytest directly and installing the package in development mode. Create virtual environment and activate ```bash $ python3 -m venv venv $ source venv/bin/activate ``` Install test requirements ```bash $ cd python-utils $ pip install -e ".[tests]" ``` Run tests ```bash $ py.test ``` Note that this won't run `ruff` yet, so once all the tests succeed you can run `ruff check` to check for code style errors. ```bash $ ruff check ``` Lastly we test the types using `pyright`: ```bash $ pyright ``` python-utils-3.9.1/LICENSE000066400000000000000000000027351472121421700152200ustar00rootroot00000000000000Copyright (c) 2016, Rick van Hattem All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. python-utils-3.9.1/MANIFEST.in000066400000000000000000000004311472121421700157400ustar00rootroot00000000000000include coverage.rc include LICENSE include MANIFEST.in include pytest.ini include README.rst include requirements.txt include setup.cfg include setup.py include tox.ini include python_utils/py.typed recursive-include _python_utils_tests *.py *.txt recursive-exclude __pycache__ * python-utils-3.9.1/README.rst000066400000000000000000000207021472121421700156740ustar00rootroot00000000000000Useful Python Utils ============================================================================== .. image:: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master :target: https://github.com/WoLpH/python-utils/actions/workflows/main.yml .. image:: https://coveralls.io/repos/WoLpH/python-utils/badge.svg?branch=master :target: https://coveralls.io/r/WoLpH/python-utils?branch=master Python Utils is a collection of small Python functions and classes which make common patterns shorter and easier. It is by no means a complete collection but it has served me quite a bit in the past and I will keep extending it. One of the libraries using Python Utils is Django Utils. Documentation is available at: https://python-utils.readthedocs.org/en/latest/ Links ----- - The source: https://github.com/WoLpH/python-utils - Project page: https://pypi.python.org/pypi/python-utils - Reporting bugs: https://github.com/WoLpH/python-utils/issues - Documentation: https://python-utils.readthedocs.io/en/latest/ - My blog: https://wol.ph/ Security contact information ------------------------------------------------------------------------------ To report a security vulnerability, please use the `Tidelift security contact `_. Tidelift will coordinate the fix and disclosure. Requirements for installing: ------------------------------------------------------------------------------ For the Python 3+ release (i.e. v3.0.0 or higher) there are no requirements. For the Python 2 compatible version (v2.x.x) the `six` package is needed. Installation: ------------------------------------------------------------------------------ The package can be installed through `pip` (this is the recommended method): .. code-block:: bash pip install python-utils Or if `pip` is not available, `easy_install` should work as well: .. code-block:: bash easy_install python-utils Or download the latest release from Pypi (https://pypi.python.org/pypi/python-utils) or Github. Note that the releases on Pypi are signed with my GPG key (https://pgp.mit.edu/pks/lookup?op=vindex&search=0xE81444E9CE1F695D) and can be checked using GPG: .. code-block:: bash gpg --verify python-utils-.tar.gz.asc python-utils-.tar.gz Quickstart ------------------------------------------------------------------------------ This module makes it easy to execute common tasks in Python scripts such as converting text to numbers and making sure a string is in unicode or bytes format. Examples ------------------------------------------------------------------------------ Automatically converting a generator to a list, dict or other collections using a decorator: .. code-block:: pycon >>> @decorators.listify() ... def generate_list(): ... yield 1 ... yield 2 ... yield 3 ... >>> generate_list() [1, 2, 3] >>> @listify(collection=dict) ... def dict_generator(): ... yield 'a', 1 ... yield 'b', 2 >>> dict_generator() {'a': 1, 'b': 2} Retrying until timeout ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To easily retry a block of code with a configurable timeout, you can use the `time.timeout_generator`: .. code-block:: pycon >>> for i in time.timeout_generator(10): ... try: ... # Run your code here ... except Exception as e: ... # Handle the exception Formatting of timestamps, dates and times ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Easy formatting of timestamps and calculating the time since: .. code-block:: pycon >>> time.format_time('1') '0:00:01' >>> time.format_time(1.234) '0:00:01' >>> time.format_time(1) '0:00:01' >>> time.format_time(datetime.datetime(2000, 1, 2, 3, 4, 5, 6)) '2000-01-02 03:04:05' >>> time.format_time(datetime.date(2000, 1, 2)) '2000-01-02' >>> time.format_time(datetime.timedelta(seconds=3661)) '1:01:01' >>> time.format_time(None) '--:--:--' >>> formatters.timesince(now) 'just now' >>> formatters.timesince(now - datetime.timedelta(seconds=1)) '1 second ago' >>> formatters.timesince(now - datetime.timedelta(seconds=2)) '2 seconds ago' >>> formatters.timesince(now - datetime.timedelta(seconds=60)) '1 minute ago' Converting your test from camel-case to underscores: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: pycon >>> camel_to_underscore('SpamEggsAndBacon') 'spam_eggs_and_bacon' Attribute setting decorator. Very useful for the Django admin ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A convenient decorator to set function attributes using a decorator: .. code-block:: pycon You can use: >>> @decorators.set_attributes(short_description='Name') ... def upper_case_name(self, obj): ... return ("%s %s" % (obj.first_name, obj.last_name)).upper() Instead of: >>> def upper_case_name(obj): ... return ("%s %s" % (obj.first_name, obj.last_name)).upper() >>> upper_case_name.short_description = 'Name' This can be very useful for the Django admin as it allows you to have all metadata in one place. Scaling numbers between ranges ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: pycon >>> converters.remap(500, old_min=0, old_max=1000, new_min=0, new_max=100) 50 # Or with decimals: >>> remap(decimal.Decimal('250.0'), 0.0, 1000.0, 0.0, 100.0) Decimal('25.0') Get the screen/window/terminal size in characters: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: pycon >>> terminal.get_terminal_size() (80, 24) That method supports IPython and Jupyter as well as regular shells, using `blessings` and other modules depending on what is available. Extracting numbers from nearly every string: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: pycon >>> converters.to_int('spam15eggs') 15 >>> converters.to_int('spam') 0 >>> number = converters.to_int('spam', default=1) 1 Doing a global import of all the modules in a package programmatically: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To do a global import programmatically you can use the `import_global` function. This effectively emulates a `from ... import *` .. code-block:: python from python_utils.import_ import import_global # The following is the equivalent of `from some_module import *` import_global('some_module') Automatically named logger for classes: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Or add a correclty named logger to your classes which can be easily accessed: .. code-block:: python class MyClass(Logged): def __init__(self): Logged.__init__(self) my_class = MyClass() # Accessing the logging method: my_class.error('error') # With formatting: my_class.error('The logger supports %(formatting)s', formatting='named parameters') # Or to access the actual log function (overwriting the log formatting can # be done n the log method) import logging my_class.log(logging.ERROR, 'log') Alternatively loguru is also supported. It is largely a drop-in replacement for the logging module which is a bit more convenient to configure: First install the extra loguru package: .. code-block:: bash pip install 'python-utils[loguru]' .. code-block:: python class MyClass(Logurud): ... Now you can use the `Logurud` class to make functions such as `self.info()` available. The benefit of this approach is that you can add extra context or options to you specific loguru instance (i.e. `self.logger`): Convenient type aliases and some commonly used types: .. code-block:: python # For type hinting scopes such as locals/globals/vars Scope = Dict[str, Any] OptionalScope = O[Scope] # Note that Number is only useful for extra clarity since float # will work for both int and float in practice. Number = U[int, float] DecimalNumber = U[Number, decimal.Decimal] # To accept an exception or list of exceptions ExceptionType = Type[Exception] ExceptionsType = U[Tuple[ExceptionType, ...], ExceptionType] # Matching string/bytes types: StringTypes = U[str, bytes] python-utils-3.9.1/_python_utils_tests/000077500000000000000000000000001472121421700203265ustar00rootroot00000000000000python-utils-3.9.1/_python_utils_tests/__init__.py000066400000000000000000000000001472121421700224250ustar00rootroot00000000000000python-utils-3.9.1/_python_utils_tests/requirements.txt000066400000000000000000000000141472121421700236050ustar00rootroot00000000000000-e .[tests] python-utils-3.9.1/_python_utils_tests/test_aio.py000066400000000000000000000041251472121421700225110ustar00rootroot00000000000000import asyncio import pytest from python_utils import types from python_utils.aio import acontainer, acount, adict @pytest.mark.asyncio async def test_acount(monkeypatch: pytest.MonkeyPatch) -> None: sleeps: types.List[float] = [] async def mock_sleep(delay: float) -> None: sleeps.append(delay) monkeypatch.setattr(asyncio, 'sleep', mock_sleep) async for _i in acount(delay=1, stop=3.5): pass assert len(sleeps) == 4 assert sum(sleeps) == 4 @pytest.mark.asyncio async def test_acontainer() -> None: async def async_gen() -> types.AsyncIterable[int]: yield 1 yield 2 yield 3 async def empty_gen() -> types.AsyncIterable[int]: if False: yield 1 assert await acontainer(async_gen) == [1, 2, 3] assert await acontainer(async_gen()) == [1, 2, 3] assert await acontainer(async_gen, set) == {1, 2, 3} assert await acontainer(async_gen(), set) == {1, 2, 3} assert await acontainer(async_gen, list) == [1, 2, 3] assert await acontainer(async_gen(), list) == [1, 2, 3] assert await acontainer(async_gen, tuple) == (1, 2, 3) assert await acontainer(async_gen(), tuple) == (1, 2, 3) assert await acontainer(empty_gen) == [] assert await acontainer(empty_gen()) == [] assert await acontainer(empty_gen, set) == set() assert await acontainer(empty_gen(), set) == set() assert await acontainer(empty_gen, list) == list() assert await acontainer(empty_gen(), list) == list() assert await acontainer(empty_gen, tuple) == tuple() assert await acontainer(empty_gen(), tuple) == tuple() @pytest.mark.asyncio async def test_adict() -> None: async def async_gen() -> types.AsyncIterable[types.Tuple[int, int]]: yield 1, 2 yield 3, 4 yield 5, 6 async def empty_gen() -> types.AsyncIterable[types.Tuple[int, int]]: if False: yield 1, 2 assert await adict(async_gen) == {1: 2, 3: 4, 5: 6} assert await adict(async_gen()) == {1: 2, 3: 4, 5: 6} assert await adict(empty_gen) == {} assert await adict(empty_gen()) == {} python-utils-3.9.1/_python_utils_tests/test_containers.py000066400000000000000000000033541472121421700241110ustar00rootroot00000000000000import pytest from python_utils import containers def test_unique_list_ignore() -> None: a: containers.UniqueList[int] = containers.UniqueList() a.append(1) a.append(1) assert a == [1] a = containers.UniqueList(*range(20)) with pytest.raises(RuntimeError): a[10:20:2] = [1, 2, 3, 4, 5] a[3] = 5 def test_unique_list_raise() -> None: a: containers.UniqueList[int] = containers.UniqueList( *range(20), on_duplicate='raise' ) with pytest.raises(ValueError): a[10:20:2] = [1, 2, 3, 4, 5] a[10:20:2] = [21, 22, 23, 24, 25] with pytest.raises(ValueError): a[3] = 5 del a[10] del a[5:15] def test_sliceable_deque() -> None: d: containers.SliceableDeque[int] = containers.SliceableDeque(range(10)) assert d[0] == 0 assert d[-1] == 9 assert d[1:3] == [1, 2] assert d[1:3:2] == [1] assert d[1:3:-1] == [] assert d[3:1] == [] assert d[3:1:-1] == [3, 2] assert d[3:1:-2] == [3] with pytest.raises(ValueError): assert d[1:3:0] assert d[1:3:1] == [1, 2] assert d[1:3:2] == [1] assert d[1:3:-1] == [] def test_sliceable_deque_pop() -> None: d: containers.SliceableDeque[int] = containers.SliceableDeque(range(10)) assert d.pop() == 9 == 9 assert d.pop(0) == 0 with pytest.raises(IndexError): assert d.pop(100) with pytest.raises(IndexError): assert d.pop(2) with pytest.raises(IndexError): assert d.pop(-2) def test_sliceable_deque_eq() -> None: d: containers.SliceableDeque[int] = containers.SliceableDeque([1, 2, 3]) assert d == [1, 2, 3] assert d == (1, 2, 3) assert d == {1, 2, 3} assert d == d assert d == containers.SliceableDeque([1, 2, 3]) python-utils-3.9.1/_python_utils_tests/test_decorators.py000066400000000000000000000036261472121421700241130ustar00rootroot00000000000000import typing from unittest.mock import MagicMock import pytest from python_utils.decorators import sample, wraps_classmethod T = typing.TypeVar('T') @pytest.fixture def random(monkeypatch: pytest.MonkeyPatch) -> MagicMock: mock = MagicMock() monkeypatch.setattr( 'python_utils.decorators.random.random', mock, raising=True ) return mock def test_sample_called(random: MagicMock) -> None: demo_function = MagicMock() decorated = sample(0.5)(demo_function) random.return_value = 0.4 decorated() random.return_value = 0.0 decorated() args = [1, 2] kwargs = {'1': 1, '2': 2} decorated(*args, **kwargs) demo_function.assert_called_with(*args, **kwargs) assert demo_function.call_count == 3 def test_sample_not_called(random: MagicMock) -> None: demo_function = MagicMock() decorated = sample(0.5)(demo_function) random.return_value = 0.5 decorated() random.return_value = 1.0 decorated() assert demo_function.call_count == 0 class SomeClass: @classmethod def some_classmethod(cls, arg: T) -> T: return arg @classmethod def some_annotated_classmethod(cls, arg: int) -> int: return arg def test_wraps_classmethod() -> None: some_class = SomeClass() some_class.some_classmethod = MagicMock() # type: ignore[method-assign] wrapped_method = wraps_classmethod(SomeClass.some_classmethod)( some_class.some_classmethod ) wrapped_method(123) some_class.some_classmethod.assert_called_with(123) def test_wraps_annotated_classmethod() -> None: some_class = SomeClass() some_class.some_annotated_classmethod = MagicMock() # type: ignore[method-assign] wrapped_method = wraps_classmethod(SomeClass.some_annotated_classmethod)( some_class.some_annotated_classmethod ) wrapped_method(123) some_class.some_annotated_classmethod.assert_called_with(123) python-utils-3.9.1/_python_utils_tests/test_generators.py000066400000000000000000000034211472121421700241100ustar00rootroot00000000000000import asyncio import pytest import python_utils from python_utils import types @pytest.mark.asyncio async def test_abatcher() -> None: async for batch in python_utils.abatcher(python_utils.acount(stop=9), 3): assert len(batch) == 3 async for batch in python_utils.abatcher(python_utils.acount(stop=2), 3): assert len(batch) == 2 @pytest.mark.asyncio async def test_abatcher_timed() -> None: batches: types.List[types.List[int]] = [] async for batch in python_utils.abatcher( python_utils.acount(stop=10, delay=0.08), interval=0.1 ): batches.append(batch) assert batches == [[0, 1, 2], [3, 4], [5, 6], [7, 8], [9]] assert len(batches) == 5 @pytest.mark.asyncio async def test_abatcher_timed_with_timeout() -> None: async def generator() -> types.AsyncIterator[int]: # Test if the timeout is respected yield 0 yield 1 await asyncio.sleep(0.11) # Test if the timeout is respected yield 2 yield 3 await asyncio.sleep(0.11) # Test if exceptions are handled correctly await asyncio.wait_for(asyncio.sleep(1), timeout=0.05) # Test if StopAsyncIteration is handled correctly yield 4 batcher = python_utils.abatcher(generator(), interval=0.1) assert await batcher.__anext__() == [0, 1] assert await batcher.__anext__() == [2, 3] with pytest.raises(asyncio.TimeoutError): await batcher.__anext__() with pytest.raises(StopAsyncIteration): await batcher.__anext__() def test_batcher() -> None: batch = [] for batch in python_utils.batcher(range(9), 3): assert len(batch) == 3 for batch in python_utils.batcher(range(4), 3): assert batch is not None assert len(batch) == 1 python-utils-3.9.1/_python_utils_tests/test_import.py000066400000000000000000000031471472121421700232560ustar00rootroot00000000000000from python_utils import import_, types def test_import_globals_relative_import() -> None: for i in range(-1, 5): relative_import(i) def relative_import(level: int) -> None: locals_: types.Dict[str, types.Any] = {} globals_ = {'__name__': 'python_utils.import_'} import_.import_global('.formatters', locals_=locals_, globals_=globals_) assert 'camel_to_underscore' in globals_ def test_import_globals_without_inspection() -> None: locals_: types.Dict[str, types.Any] = {} globals_: types.Dict[str, types.Any] = {'__name__': __name__} import_.import_global( 'python_utils.formatters', locals_=locals_, globals_=globals_ ) assert 'camel_to_underscore' in globals_ def test_import_globals_single_method() -> None: locals_: types.Dict[str, types.Any] = {} globals_: types.Dict[str, types.Any] = {'__name__': __name__} import_.import_global( 'python_utils.formatters', ['camel_to_underscore'], locals_=locals_, globals_=globals_, ) assert 'camel_to_underscore' in globals_ def test_import_globals_with_inspection() -> None: import_.import_global('python_utils.formatters') assert 'camel_to_underscore' in globals() def test_import_globals_missing_module() -> None: import_.import_global( 'python_utils.spam', exceptions=ImportError, locals_=locals() ) assert 'camel_to_underscore' in globals() def test_import_locals_missing_module() -> None: import_.import_global( 'python_utils.spam', exceptions=ImportError, globals_=globals() ) assert 'camel_to_underscore' in globals() python-utils-3.9.1/_python_utils_tests/test_logger.py000066400000000000000000000006631472121421700232230ustar00rootroot00000000000000# mypy: disable-error-code=misc import pytest from python_utils.loguru import Logurud loguru = pytest.importorskip('loguru') def test_logurud() -> None: class MyClass(Logurud): pass my_class = MyClass() my_class.debug('debug') my_class.info('info') my_class.warning('warning') my_class.error('error') my_class.critical('critical') my_class.exception('exception') my_class.log(0, 'log') python-utils-3.9.1/_python_utils_tests/test_python_utils.py000066400000000000000000000004271472121421700245030ustar00rootroot00000000000000from python_utils import __about__ def test_definitions() -> None: # The setup.py requires this so we better make sure they exist :) assert __about__.__version__ assert __about__.__author__ assert __about__.__author_email__ assert __about__.__description__ python-utils-3.9.1/_python_utils_tests/test_time.py000066400000000000000000000121041472121421700226730ustar00rootroot00000000000000import asyncio import itertools from datetime import timedelta import pytest import python_utils from python_utils import types @pytest.mark.parametrize( 'timeout,interval,interval_multiplier,maximum_interval,iterable,result', [ (0.2, 0.1, 0.4, 0.2, python_utils.acount, 2), (0.3, 0.1, 0.4, 0.2, python_utils.acount(), 3), (0.3, 0.06, 1.0, None, python_utils.acount, 5), ( timedelta(seconds=0.1), timedelta(seconds=0.06), 2.0, timedelta(seconds=0.1), python_utils.acount, 2, ), ], ) @pytest.mark.asyncio async def test_aio_timeout_generator( timeout: float, interval: float, interval_multiplier: float, maximum_interval: float, iterable: types.AsyncIterable[types.Any], result: int, ) -> None: i = None async for i in python_utils.aio_timeout_generator( timeout, interval, iterable, maximum_interval=maximum_interval ): pass assert i == result @pytest.mark.parametrize( 'timeout,interval,interval_multiplier,maximum_interval,iterable,result', [ (0.1, 0.06, 0.5, 0.1, 'abc', 'c'), (0.1, 0.07, 0.5, 0.1, itertools.count, 2), (0.1, 0.07, 0.5, 0.1, itertools.count(), 2), (0.1, 0.06, 1.0, None, 'abc', 'c'), ( timedelta(seconds=0.1), timedelta(seconds=0.06), 2.0, timedelta(seconds=0.1), itertools.count, 2, ), ], ) def test_timeout_generator( timeout: float, interval: float, interval_multiplier: float, maximum_interval: float, iterable: types.Union[ str, types.Iterable[types.Any], types.Callable[..., types.Iterable[types.Any]], ], result: int, ) -> None: i = None for i in python_utils.timeout_generator( timeout=timeout, interval=interval, interval_multiplier=interval_multiplier, iterable=iterable, maximum_interval=maximum_interval, ): assert i is not None assert i == result @pytest.mark.asyncio async def test_aio_generator_timeout_detector() -> None: # Make pyright happy i = None async def generator() -> types.AsyncGenerator[int, None]: for i in range(10): await asyncio.sleep(i / 20.0) yield i detector = python_utils.aio_generator_timeout_detector # Test regular timeout with reraise with pytest.raises(asyncio.TimeoutError): async for i in detector(generator(), 0.25): pass # Test regular timeout with clean exit async for i in detector(generator(), 0.25, on_timeout=None): pass assert i == 4 # Test total timeout with reraise with pytest.raises(asyncio.TimeoutError): async for i in detector(generator(), total_timeout=0.5): pass # Test total timeout with clean exit async for i in detector(generator(), total_timeout=0.5, on_timeout=None): pass assert i == 4 # Test stop iteration async for i in detector(generator(), on_timeout=None): pass @pytest.mark.asyncio async def test_aio_generator_timeout_detector_decorator_reraise() -> None: # Test regular timeout with reraise @python_utils.aio_generator_timeout_detector_decorator(timeout=0.05) async def generator_timeout() -> types.AsyncGenerator[int, None]: for i in range(10): await asyncio.sleep(i / 100.0) yield i with pytest.raises(asyncio.TimeoutError): async for _ in generator_timeout(): pass @pytest.mark.asyncio async def test_aio_generator_timeout_detector_decorator_clean_exit() -> None: # Make pyright happy i = None # Test regular timeout with clean exit @python_utils.aio_generator_timeout_detector_decorator( timeout=0.05, on_timeout=None ) async def generator_clean() -> types.AsyncGenerator[int, None]: for i in range(10): await asyncio.sleep(i / 100.0) yield i async for i in generator_clean(): pass assert i == 4 @pytest.mark.asyncio async def test_aio_generator_timeout_detector_decorator_reraise_total() -> ( None ): # Test total timeout with reraise @python_utils.aio_generator_timeout_detector_decorator(total_timeout=0.1) async def generator_reraise() -> types.AsyncGenerator[int, None]: for i in range(10): await asyncio.sleep(i / 100.0) yield i with pytest.raises(asyncio.TimeoutError): async for _ in generator_reraise(): pass @pytest.mark.asyncio async def test_aio_generator_timeout_detector_decorator_clean_total() -> None: # Make pyright happy i = None # Test total timeout with clean exit @python_utils.aio_generator_timeout_detector_decorator( total_timeout=0.1, on_timeout=None ) async def generator_clean_total() -> types.AsyncGenerator[int, None]: for i in range(10): await asyncio.sleep(i / 100.0) yield i async for i in generator_clean_total(): pass assert i == 4 python-utils-3.9.1/codecov.yml000066400000000000000000000000671472121421700163540ustar00rootroot00000000000000codecov: token: 046054bc-5013-4e26-b93e-f2720c0e7b84 python-utils-3.9.1/coverage.rc000066400000000000000000000000621472121421700163230ustar00rootroot00000000000000[run] source = python_utils,tests omit = */nose/* python-utils-3.9.1/docs/000077500000000000000000000000001472121421700151345ustar00rootroot00000000000000python-utils-3.9.1/docs/Makefile000066400000000000000000000011721472121421700165750ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) python-utils-3.9.1/docs/conf.py000066400000000000000000000043371472121421700164420ustar00rootroot00000000000000""" Configuration file for the Sphinx documentation builder. # This file only contains a selection of the most common options. For a full list see the documentation: https://www.sphinx-doc.org/en/master/usage/configuration.html -- Path setup -------------------------------------------------------------- If extensions (or modules to document with autodoc) are in another directory, add these directories to sys.path here. If the directory is relative to the documentation root, use os.path.abspath to make it absolute, like shown here. # """ import os import sys from datetime import date sys.path.insert(0, os.path.abspath('..')) from python_utils import __about__ # -- Project information ----------------------------------------------------- project = 'Python Utils' author = __about__.__author__ copyright = f'{date.today().year}, {author}' # The full version, including alpha/beta/rc tags release = __about__.__version__ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} python-utils-3.9.1/docs/index.rst000066400000000000000000000011201472121421700167670ustar00rootroot00000000000000Welcome to Python Utils's documentation! ======================================== .. image:: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master :target: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master .. image:: https://coveralls.io/repos/WoLpH/python-utils/badge.svg?branch=master :target: https://coveralls.io/r/WoLpH/python-utils?branch=master Contents: .. toctree:: :maxdepth: 4 usage python_utils Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` python-utils-3.9.1/docs/make.bat000066400000000000000000000013751472121421700165470ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd python-utils-3.9.1/docs/python_utils.rst000066400000000000000000000024241472121421700204310ustar00rootroot00000000000000python\_utils package ===================== Submodules ---------- python\_utils\.decorators module -------------------------------- .. automodule:: python_utils.decorators :members: :undoc-members: :show-inheritance: python\_utils\.converters module -------------------------------- .. automodule:: python_utils.converters :members: :undoc-members: :show-inheritance: python\_utils\.formatters module -------------------------------- .. automodule:: python_utils.formatters :members: :undoc-members: :show-inheritance: python\_utils\.import\_ module ------------------------------ .. automodule:: python_utils.import_ :members: :undoc-members: :show-inheritance: python\_utils\.logger module ---------------------------- .. automodule:: python_utils.logger :members: :undoc-members: :show-inheritance: python\_utils\.terminal module ------------------------------ .. automodule:: python_utils.terminal :members: :undoc-members: :show-inheritance: python\_utils\.time module -------------------------- .. automodule:: python_utils.time :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: python_utils :members: :undoc-members: :show-inheritance: python-utils-3.9.1/docs/requirements.txt000066400000000000000000000000131472121421700204120ustar00rootroot00000000000000-e .[docs] python-utils-3.9.1/docs/usage.rst000066400000000000000000000000351472121421700167700ustar00rootroot00000000000000 .. include:: ../README.rst python-utils-3.9.1/pyproject.toml000066400000000000000000000011631472121421700171210ustar00rootroot00000000000000[tool.black] line-length = 79 target-version = ['py37', 'py38', 'py39', 'py310', 'py311'] skip-string-normalization = true [tool.pyright] # include = ['python_utils'] include = ['python_utils', '_python_utils_tests', 'setup.py'] strict = ['python_utils', '_python_utils_tests', 'setup.py'] # The terminal file is very OS specific and dependent on imports so we're skipping it from type checking ignore = ['python_utils/terminal.py'] pythonVersion = '3.9' [tool.mypy] strict = true check_untyped_defs = true files = ['python_utils', '_python_utils_tests', 'setup.py'] [[tool.mypy.overrides]] module = '_python_utils_tests.*' python-utils-3.9.1/pytest.ini000066400000000000000000000003751472121421700162420ustar00rootroot00000000000000[pytest] python_files = python_utils/*.py _python_utils_tests/*.py addopts = --doctest-modules --cov python_utils --cov-report term-missing ; --mypy doctest_optionflags = ALLOW_UNICODE ALLOW_BYTES asyncio_mode = strict python-utils-3.9.1/python_utils/000077500000000000000000000000001472121421700167455ustar00rootroot00000000000000python-utils-3.9.1/python_utils/__about__.py000066400000000000000000000014441472121421700212300ustar00rootroot00000000000000""" This module contains metadata about the `python-utils` package. Attributes: __package_name__ (str): The name of the package. __author__ (str): The author of the package. __author_email__ (str): The email of the author. __description__ (str): A brief description of the package. __url__ (str): The URL of the package's repository. __version__ (str): The current version of the package. """ __package_name__: str = 'python-utils' __author__: str = 'Rick van Hattem' __author_email__: str = 'Wolph@wol.ph' __description__: str = ( 'Python Utils is a module with some convenient utilities not included ' 'with the standard Python install' ) __url__: str = 'https://github.com/WoLpH/python-utils' # Omit type info due to automatic versioning script __version__ = '3.9.1' python-utils-3.9.1/python_utils/__init__.py000066400000000000000000000046721472121421700210670ustar00rootroot00000000000000""" This module initializes the `python_utils` package by importing various submodules and functions. Submodules: aio converters decorators formatters generators import_ logger terminal time types Functions: acount remap scale_1024 to_float to_int to_str to_unicode listify set_attributes raise_exception reraise camel_to_underscore timesince abatcher batcher import_global get_terminal_size aio_generator_timeout_detector aio_generator_timeout_detector_decorator aio_timeout_generator delta_to_seconds delta_to_seconds_or_none format_time timedelta_to_seconds timeout_generator Classes: CastedDict LazyCastedDict UniqueList Logged LoggerBase """ from . import ( aio, converters, decorators, formatters, generators, import_, logger, terminal, time, types, ) from .aio import acount from .containers import CastedDict, LazyCastedDict, UniqueList from .converters import remap, scale_1024, to_float, to_int, to_str, to_unicode from .decorators import listify, set_attributes from .exceptions import raise_exception, reraise from .formatters import camel_to_underscore, timesince from .generators import abatcher, batcher from .import_ import import_global from .logger import Logged, LoggerBase from .terminal import get_terminal_size from .time import ( aio_generator_timeout_detector, aio_generator_timeout_detector_decorator, aio_timeout_generator, delta_to_seconds, delta_to_seconds_or_none, format_time, timedelta_to_seconds, timeout_generator, ) __all__ = [ 'CastedDict', 'LazyCastedDict', 'Logged', 'LoggerBase', 'UniqueList', 'abatcher', 'acount', 'aio', 'aio_generator_timeout_detector', 'aio_generator_timeout_detector_decorator', 'aio_timeout_generator', 'batcher', 'camel_to_underscore', 'converters', 'decorators', 'delta_to_seconds', 'delta_to_seconds_or_none', 'format_time', 'formatters', 'generators', 'get_terminal_size', 'import_', 'import_global', 'listify', 'logger', 'raise_exception', 'remap', 'reraise', 'scale_1024', 'set_attributes', 'terminal', 'time', 'timedelta_to_seconds', 'timeout_generator', 'timesince', 'to_float', 'to_int', 'to_str', 'to_unicode', 'types', ] python-utils-3.9.1/python_utils/aio.py000066400000000000000000000055531472121421700200770ustar00rootroot00000000000000"""Asyncio equivalents to regular Python functions.""" import asyncio import itertools import typing from . import types _N = types.TypeVar('_N', int, float) _T = types.TypeVar('_T') _K = types.TypeVar('_K') _V = types.TypeVar('_V') async def acount( start: _N = 0, step: _N = 1, delay: float = 0, stop: types.Optional[_N] = None, ) -> types.AsyncIterator[_N]: """Asyncio version of itertools.count().""" for item in itertools.count(start, step): # pragma: no branch if stop is not None and item >= stop: break yield item await asyncio.sleep(delay) @typing.overload async def acontainer( iterable: types.Union[ types.AsyncIterable[_T], types.Callable[..., types.AsyncIterable[_T]], ], container: types.Type[types.Tuple[_T, ...]], ) -> types.Tuple[_T, ...]: ... @typing.overload async def acontainer( iterable: types.Union[ types.AsyncIterable[_T], types.Callable[..., types.AsyncIterable[_T]], ], container: types.Type[types.List[_T]] = list, ) -> types.List[_T]: ... @typing.overload async def acontainer( iterable: types.Union[ types.AsyncIterable[_T], types.Callable[..., types.AsyncIterable[_T]], ], container: types.Type[types.Set[_T]], ) -> types.Set[_T]: ... async def acontainer( iterable: types.Union[ types.AsyncIterable[_T], types.Callable[..., types.AsyncIterable[_T]], ], container: types.Callable[ [types.Iterable[_T]], types.Collection[_T] ] = list, ) -> types.Collection[_T]: """ Asyncio version of list()/set()/tuple()/etc() using an async for loop. So instead of doing `[item async for item in iterable]` you can do `await acontainer(iterable)`. """ iterable_: types.AsyncIterable[_T] if callable(iterable): iterable_ = iterable() else: iterable_ = iterable item: _T items: types.List[_T] = [] async for item in iterable_: # pragma: no branch items.append(item) return container(items) async def adict( iterable: types.Union[ types.AsyncIterable[types.Tuple[_K, _V]], types.Callable[..., types.AsyncIterable[types.Tuple[_K, _V]]], ], container: types.Callable[ [types.Iterable[types.Tuple[_K, _V]]], types.Mapping[_K, _V] ] = dict, ) -> types.Mapping[_K, _V]: """ Asyncio version of dict() using an async for loop. So instead of doing `{key: value async for key, value in iterable}` you can do `await adict(iterable)`. """ iterable_: types.AsyncIterable[types.Tuple[_K, _V]] if callable(iterable): iterable_ = iterable() else: iterable_ = iterable item: types.Tuple[_K, _V] items: types.List[types.Tuple[_K, _V]] = [] async for item in iterable_: # pragma: no branch items.append(item) return container(items) python-utils-3.9.1/python_utils/containers.py000066400000000000000000000452331472121421700214730ustar00rootroot00000000000000""" This module provides custom container classes with enhanced functionality. Classes: CastedDictBase: Abstract base class for dictionaries that cast keys and values. CastedDict: Dictionary that casts keys and values to specified types. LazyCastedDict: Dictionary that lazily casts values to specified types upon access. UniqueList: List that only allows unique values, with configurable behavior on duplicates. SliceableDeque: Deque that supports slicing and enhanced equality checks. Type Aliases: KT: Type variable for dictionary keys. VT: Type variable for dictionary values. DT: Type alias for a dictionary with keys of type KT and values of type VT. KT_cast: Type alias for a callable that casts dictionary keys. VT_cast: Type alias for a callable that casts dictionary values. HT: Type variable for hashable values in UniqueList. T: Type variable for generic types. DictUpdateArgs: Union type for arguments that can be used to update a dictionary. OnDuplicate: Literal type for handling duplicate values in UniqueList. Usage: - CastedDict and LazyCastedDict can be used to create dictionaries with automatic type casting. - UniqueList ensures all elements are unique and can raise an error on duplicates. - SliceableDeque extends deque with slicing support and enhanced equality checks. Examples: >>> d = CastedDict(int, int) >>> d[1] = 2 >>> d['3'] = '4' >>> d.update({'5': '6'}) >>> d.update([('7', '8')]) >>> d {1: 2, 3: 4, 5: 6, 7: 8} >>> l = UniqueList(1, 2, 3) >>> l.append(4) >>> l.append(4) >>> l.insert(0, 4) >>> l.insert(0, 5) >>> l[1] = 10 >>> l [5, 10, 2, 3, 4] >>> d = SliceableDeque([1, 2, 3, 4, 5]) >>> d[1:4] SliceableDeque([2, 3, 4]) """ # pyright: reportIncompatibleMethodOverride=false import abc import collections import typing from . import types if typing.TYPE_CHECKING: import _typeshed # noqa: F401 #: A type alias for a type that can be used as a key in a dictionary. KT = types.TypeVar('KT') #: A type alias for a type that can be used as a value in a dictionary. VT = types.TypeVar('VT') #: A type alias for a dictionary with keys of type KT and values of type VT. DT = types.Dict[KT, VT] #: A type alias for the casted type of a dictionary key. KT_cast = types.Optional[types.Callable[..., KT]] #: A type alias for the casted type of a dictionary value. VT_cast = types.Optional[types.Callable[..., VT]] #: A type alias for the hashable values of the `UniqueList` HT = types.TypeVar('HT', bound=types.Hashable) #: A type alias for a regular generic type T = types.TypeVar('T') # Using types.Union instead of | since Python 3.7 doesn't fully support it DictUpdateArgs = types.Union[ types.Mapping[KT, VT], types.Iterable[types.Tuple[KT, VT]], types.Iterable[types.Mapping[KT, VT]], '_typeshed.SupportsKeysAndGetItem[KT, VT]', ] OnDuplicate = types.Literal['ignore', 'raise'] class CastedDictBase(types.Dict[KT, VT], abc.ABC): """ Abstract base class for dictionaries that cast keys and values. Attributes: _key_cast (KT_cast[KT]): Callable to cast dictionary keys. _value_cast (VT_cast[VT]): Callable to cast dictionary values. Methods: __init__(key_cast: KT_cast[KT] = None, value_cast: VT_cast[VT] = None, *args: DictUpdateArgs[KT, VT], **kwargs: VT) -> None: Initializes the dictionary with optional key and value casting callables. update(*args: DictUpdateArgs[types.Any, types.Any], **kwargs: types.Any) -> None: Updates the dictionary with the given arguments. __setitem__(key: types.Any, value: types.Any) -> None: Sets the item in the dictionary, casting the key if a key cast callable is provided. """ _key_cast: KT_cast[KT] _value_cast: VT_cast[VT] def __init__( self, key_cast: KT_cast[KT] = None, value_cast: VT_cast[VT] = None, *args: DictUpdateArgs[KT, VT], **kwargs: VT, ) -> None: """ Initializes the CastedDictBase with optional key and value casting callables. Args: key_cast (KT_cast[KT], optional): Callable to cast dictionary keys. Defaults to None. value_cast (VT_cast[VT], optional): Callable to cast dictionary values. Defaults to None. *args (DictUpdateArgs[KT, VT]): Arguments to initialize the dictionary. **kwargs (VT): Keyword arguments to initialize the dictionary. """ self._value_cast = value_cast self._key_cast = key_cast self.update(*args, **kwargs) def update( self, *args: DictUpdateArgs[types.Any, types.Any], **kwargs: types.Any ) -> None: """ Updates the dictionary with the given arguments. Args: *args (DictUpdateArgs[types.Any, types.Any]): Arguments to update the dictionary. **kwargs (types.Any): Keyword arguments to update the dictionary. """ if args: kwargs.update(*args) if kwargs: for key, value in kwargs.items(): self[key] = value def __setitem__(self, key: types.Any, value: types.Any) -> None: """ Sets the item in the dictionary, casting the key if a key cast callable is provided. Args: key (types.Any): The key to set in the dictionary. value (types.Any): The value to set in the dictionary. """ if self._key_cast is not None: key = self._key_cast(key) return super().__setitem__(key, value) class CastedDict(CastedDictBase[KT, VT]): """ Custom dictionary that casts keys and values to the specified typing. Note that you can specify the types for mypy and type hinting with: CastedDict[int, int](int, int) >>> d: CastedDict[int, int] = CastedDict(int, int) >>> d[1] = 2 >>> d['3'] = '4' >>> d.update({'5': '6'}) >>> d.update([('7', '8')]) >>> d {1: 2, 3: 4, 5: 6, 7: 8} >>> list(d.keys()) [1, 3, 5, 7] >>> list(d) [1, 3, 5, 7] >>> list(d.values()) [2, 4, 6, 8] >>> list(d.items()) [(1, 2), (3, 4), (5, 6), (7, 8)] >>> d[3] 4 # Casts are optional and can be disabled by passing None as the cast >>> d = CastedDict() >>> d[1] = 2 >>> d['3'] = '4' >>> d.update({'5': '6'}) >>> d.update([('7', '8')]) >>> d {1: 2, '3': '4', '5': '6', '7': '8'} """ def __setitem__(self, key: typing.Any, value: typing.Any) -> None: """Sets `key` to `cast(value)` in the dictionary.""" if self._value_cast is not None: value = self._value_cast(value) super().__setitem__(key, value) class LazyCastedDict(CastedDictBase[KT, VT]): """ Custom dictionary that casts keys and lazily casts values to the specified typing. Note that the values are cast only when they are accessed and are not cached between executions. Note that you can specify the types for mypy and type hinting with: LazyCastedDict[int, int](int, int) >>> d: LazyCastedDict[int, int] = LazyCastedDict(int, int) >>> d[1] = 2 >>> d['3'] = '4' >>> d.update({'5': '6'}) >>> d.update([('7', '8')]) >>> d {1: 2, 3: '4', 5: '6', 7: '8'} >>> list(d.keys()) [1, 3, 5, 7] >>> list(d) [1, 3, 5, 7] >>> list(d.values()) [2, 4, 6, 8] >>> list(d.items()) [(1, 2), (3, 4), (5, 6), (7, 8)] >>> d[3] 4 # Casts are optional and can be disabled by passing None as the cast >>> d = LazyCastedDict() >>> d[1] = 2 >>> d['3'] = '4' >>> d.update({'5': '6'}) >>> d.update([('7', '8')]) >>> d {1: 2, '3': '4', '5': '6', '7': '8'} >>> list(d.keys()) [1, '3', '5', '7'] >>> list(d.values()) [2, '4', '6', '8'] >>> list(d.items()) [(1, 2), ('3', '4'), ('5', '6'), ('7', '8')] >>> d['3'] '4' """ def __setitem__(self, key: types.Any, value: types.Any) -> None: """ Sets the item in the dictionary, casting the key if a key cast callable is provided. Args: key (types.Any): The key to set in the dictionary. value (types.Any): The value to set in the dictionary. """ if self._key_cast is not None: key = self._key_cast(key) super().__setitem__(key, value) def __getitem__(self, key: types.Any) -> VT: """ Gets the item from the dictionary, casting the value if a value cast callable is provided. Args: key (types.Any): The key to get from the dictionary. Returns: VT: The value from the dictionary. """ if self._key_cast is not None: key = self._key_cast(key) value = super().__getitem__(key) if self._value_cast is not None: value = self._value_cast(value) return value def items( # type: ignore[override] self, ) -> types.Generator[types.Tuple[KT, VT], None, None]: """ Returns a generator of the dictionary's items, casting the values if a value cast callable is provided. Yields: types.Generator[types.Tuple[KT, VT], None, None]: A generator of the dictionary's items. """ if self._value_cast is None: yield from super().items() else: for key, value in super().items(): yield key, self._value_cast(value) def values(self) -> types.Generator[VT, None, None]: # type: ignore[override] """ Returns a generator of the dictionary's values, casting the values if a value cast callable is provided. Yields: types.Generator[VT, None, None]: A generator of the dictionary's values. """ if self._value_cast is None: yield from super().values() else: for value in super().values(): yield self._value_cast(value) class UniqueList(types.List[HT]): """ A list that only allows unique values. Duplicate values are ignored by default, but can be configured to raise an exception instead. >>> l = UniqueList(1, 2, 3) >>> l.append(4) >>> l.append(4) >>> l.insert(0, 4) >>> l.insert(0, 5) >>> l[1] = 10 >>> l [5, 10, 2, 3, 4] >>> l = UniqueList(1, 2, 3, on_duplicate='raise') >>> l.append(4) >>> l.append(4) Traceback (most recent call last): ... ValueError: Duplicate value: 4 >>> l.insert(0, 4) Traceback (most recent call last): ... ValueError: Duplicate value: 4 >>> 4 in l True >>> l[0] 1 >>> l[1] = 4 Traceback (most recent call last): ... ValueError: Duplicate value: 4 """ _set: types.Set[HT] def __init__( self, *args: HT, on_duplicate: OnDuplicate = 'ignore', ): """ Initializes the UniqueList with optional duplicate handling behavior. Args: *args (HT): Initial values for the list. on_duplicate (OnDuplicate, optional): Behavior on duplicates. Defaults to 'ignore'. """ self.on_duplicate = on_duplicate self._set = set() super().__init__() for arg in args: self.append(arg) def insert(self, index: types.SupportsIndex, value: HT) -> None: """ Inserts a value at the specified index, ensuring uniqueness. Args: index (types.SupportsIndex): The index to insert the value at. value (HT): The value to insert. Raises: ValueError: If the value is a duplicate and `on_duplicate` is set to 'raise'. """ if value in self._set: if self.on_duplicate == 'raise': raise ValueError(f'Duplicate value: {value}') else: return self._set.add(value) super().insert(index, value) def append(self, value: HT) -> None: """ Appends a value to the list, ensuring uniqueness. Args: value (HT): The value to append. Raises: ValueError: If the value is a duplicate and `on_duplicate` is set to 'raise'. """ if value in self._set: if self.on_duplicate == 'raise': raise ValueError(f'Duplicate value: {value}') else: return self._set.add(value) super().append(value) def __contains__(self, item: HT) -> bool: # type: ignore[override] """ Checks if the list contains the specified item. Args: item (HT): The item to check for. Returns: bool: True if the item is in the list, False otherwise. """ return item in self._set @typing.overload def __setitem__( self, indices: types.SupportsIndex, values: HT ) -> None: ... @typing.overload def __setitem__( self, indices: slice, values: types.Iterable[HT] ) -> None: ... def __setitem__( self, indices: types.Union[slice, types.SupportsIndex], values: types.Union[types.Iterable[HT], HT], ) -> None: """ Sets the item(s) at the specified index/indices, ensuring uniqueness. Args: indices (types.Union[slice, types.SupportsIndex]): The index or slice to set the value(s) at. values (types.Union[types.Iterable[HT], HT]): The value(s) to set. Raises: RuntimeError: If `on_duplicate` is 'ignore' and setting slices. ValueError: If the value(s) are duplicates and `on_duplicate` is set to 'raise'. """ if isinstance(indices, slice): values = types.cast(types.Iterable[HT], values) if self.on_duplicate == 'ignore': raise RuntimeError( 'ignore mode while setting slices introduces ambiguous ' 'behaviour and is therefore not supported' ) duplicates: types.Set[HT] = set(values) & self._set if duplicates and values != list(self[indices]): raise ValueError(f'Duplicate values: {duplicates}') self._set.update(values) else: values = types.cast(HT, values) if values in self._set and values != self[indices]: if self.on_duplicate == 'raise': raise ValueError(f'Duplicate value: {values}') else: return self._set.add(values) super().__setitem__( types.cast(slice, indices), types.cast(types.List[HT], values) ) def __delitem__( self, index: types.Union[types.SupportsIndex, slice] ) -> None: """ Deletes the item(s) at the specified index/indices. Args: index (types.Union[types.SupportsIndex, slice]): The index or slice to delete the item(s) at. """ if isinstance(index, slice): for value in self[index]: self._set.remove(value) else: self._set.remove(self[index]) super().__delitem__(index) # Type hinting `collections.deque` does not work consistently between Python # runtime, mypy and pyright currently so we have to ignore the errors class SliceableDeque(types.Generic[T], collections.deque[T]): """ A deque that supports slicing and enhanced equality checks. Methods: __getitem__(index: types.Union[types.SupportsIndex, slice]) -> types.Union[T, 'SliceableDeque[T]']: Returns the item or slice at the given index. __eq__(other: types.Any) -> bool: Checks equality with another object, allowing for comparison with lists, tuples, and sets. pop(index: int = -1) -> T: Removes and returns the item at the given index. Only supports index 0 and the last index. """ @typing.overload def __getitem__(self, index: types.SupportsIndex) -> T: ... @typing.overload def __getitem__(self, index: slice) -> 'SliceableDeque[T]': ... def __getitem__( self, index: types.Union[types.SupportsIndex, slice] ) -> types.Union[T, 'SliceableDeque[T]']: """ Return the item or slice at the given index. Args: index (types.Union[types.SupportsIndex, slice]): The index or slice to retrieve. Returns: types.Union[T, 'SliceableDeque[T]']: The item or slice at the given index. Examples: >>> d = SliceableDeque[int]([1, 2, 3, 4, 5]) >>> d[1:4] SliceableDeque([2, 3, 4]) >>> d = SliceableDeque[str](['a', 'b', 'c']) >>> d[-2:] SliceableDeque(['b', 'c']) """ if isinstance(index, slice): start, stop, step = index.indices(len(self)) return self.__class__(self[i] for i in range(start, stop, step)) else: return super().__getitem__(index) def __eq__(self, other: types.Any) -> bool: """ Checks equality with another object, allowing for comparison with lists, tuples, and sets. Args: other (types.Any): The object to compare with. Returns: bool: True if the objects are equal, False otherwise. """ if isinstance(other, list): return list(self) == other elif isinstance(other, tuple): return tuple(self) == other elif isinstance(other, set): return set(self) == other else: return super().__eq__(other) def pop(self, index: int = -1) -> T: """ Removes and returns the item at the given index. Only supports index 0 and the last index. Args: index (int, optional): The index of the item to remove. Defaults to -1. Returns: T: The removed item. Raises: IndexError: If the index is not 0 or the last index. Examples: >>> d = SliceableDeque([1, 2, 3]) >>> d.pop(0) 1 >>> d.pop() 3 """ if index == 0: return super().popleft() elif index in {-1, len(self) - 1}: return super().pop() else: raise IndexError( 'Only index 0 and the last index (`N-1` or `-1`) ' 'are supported' ) if __name__ == '__main__': import doctest doctest.testmod() python-utils-3.9.1/python_utils/converters.py000066400000000000000000000331221472121421700215120ustar00rootroot00000000000000""" This module provides utility functions for type conversion. Functions: - to_int: Convert a string to an integer with optional regular expression matching. - to_float: Convert a string to a float with optional regular expression matching. - to_unicode: Convert objects to Unicode strings. - to_str: Convert objects to byte strings. - scale_1024: Scale a number down to a suitable size based on powers of 1024. - remap: Remap a value from one range to another. """ # Ignoring all mypy errors because mypy doesn't understand many modern typing # constructs... please, use pyright instead if you can. from __future__ import annotations import decimal import math import re import typing from typing import Union from . import types _TN = types.TypeVar('_TN', bound=types.DecimalNumber) _RegexpType: types.TypeAlias = Union[ types.Pattern[str], str, types.Literal[True], None ] def to_int( input_: str | None = None, default: int = 0, exception: types.ExceptionsType = (ValueError, TypeError), regexp: _RegexpType = None, ) -> int: r""" Convert the given input to an integer or return default. When trying to convert the exceptions given in the exception parameter are automatically catched and the default will be returned. The regexp parameter allows for a regular expression to find the digits in a string. When True it will automatically match any digit in the string. When a (regexp) object (has a search method) is given, that will be used. WHen a string is given, re.compile will be run over it first The last group of the regexp will be used as value >>> to_int('abc') 0 >>> to_int('1') 1 >>> to_int('') 0 >>> to_int() 0 >>> to_int('abc123') 0 >>> to_int('123abc') 0 >>> to_int('abc123', regexp=True) 123 >>> to_int('123abc', regexp=True) 123 >>> to_int('abc123abc', regexp=True) 123 >>> to_int('abc123abc456', regexp=True) 123 >>> to_int('abc123', regexp=re.compile(r'(\d+)')) 123 >>> to_int('123abc', regexp=re.compile(r'(\d+)')) 123 >>> to_int('abc123abc', regexp=re.compile(r'(\d+)')) 123 >>> to_int('abc123abc456', regexp=re.compile(r'(\d+)')) 123 >>> to_int('abc123', regexp=r'(\d+)') 123 >>> to_int('123abc', regexp=r'(\d+)') 123 >>> to_int('abc', regexp=r'(\d+)') 0 >>> to_int('abc123abc', regexp=r'(\d+)') 123 >>> to_int('abc123abc456', regexp=r'(\d+)') 123 >>> to_int('1234', default=1) 1234 >>> to_int('abc', default=1) 1 >>> to_int('abc', regexp=123) Traceback (most recent call last): ... TypeError: unknown argument for regexp parameter: 123 """ if regexp is True: regexp = re.compile(r'(\d+)') elif isinstance(regexp, str): regexp = re.compile(regexp) elif hasattr(regexp, 'search'): pass elif regexp is not None: raise TypeError(f'unknown argument for regexp parameter: {regexp!r}') try: if regexp and input_ and (match := regexp.search(input_)): input_ = match.groups()[-1] if input_ is None: return default else: return int(input_) except exception: return default def to_float( input_: str, default: int = 0, exception: types.ExceptionsType = (ValueError, TypeError), regexp: _RegexpType = None, ) -> types.Number: r""" Convert the given `input_` to an integer or return default. When trying to convert the exceptions given in the exception parameter are automatically catched and the default will be returned. The regexp parameter allows for a regular expression to find the digits in a string. When True it will automatically match any digit in the string. When a (regexp) object (has a search method) is given, that will be used. When a string is given, re.compile will be run over it first The last group of the regexp will be used as value >>> '%.2f' % to_float('abc') '0.00' >>> '%.2f' % to_float('1') '1.00' >>> '%.2f' % to_float('abc123.456', regexp=True) '123.46' >>> '%.2f' % to_float('abc123', regexp=True) '123.00' >>> '%.2f' % to_float('abc0.456', regexp=True) '0.46' >>> '%.2f' % to_float('abc123.456', regexp=re.compile(r'(\d+\.\d+)')) '123.46' >>> '%.2f' % to_float('123.456abc', regexp=re.compile(r'(\d+\.\d+)')) '123.46' >>> '%.2f' % to_float('abc123.46abc', regexp=re.compile(r'(\d+\.\d+)')) '123.46' >>> '%.2f' % to_float('abc123abc456', regexp=re.compile(r'(\d+(\.\d+|))')) '123.00' >>> '%.2f' % to_float('abc', regexp=r'(\d+)') '0.00' >>> '%.2f' % to_float('abc123', regexp=r'(\d+)') '123.00' >>> '%.2f' % to_float('123abc', regexp=r'(\d+)') '123.00' >>> '%.2f' % to_float('abc123abc', regexp=r'(\d+)') '123.00' >>> '%.2f' % to_float('abc123abc456', regexp=r'(\d+)') '123.00' >>> '%.2f' % to_float('1234', default=1) '1234.00' >>> '%.2f' % to_float('abc', default=1) '1.00' >>> '%.2f' % to_float('abc', regexp=123) Traceback (most recent call last): ... TypeError: unknown argument for regexp parameter """ if regexp is True: regexp = re.compile(r'(\d+(\.\d+|))') elif isinstance(regexp, str): regexp = re.compile(regexp) elif hasattr(regexp, 'search'): pass elif regexp is not None: raise TypeError('unknown argument for regexp parameter') try: if regexp and (match := regexp.search(input_)): input_ = match.group(1) return float(input_) except exception: return default def to_unicode( input_: types.StringTypes, encoding: str = 'utf-8', errors: str = 'replace', ) -> str: """Convert objects to unicode, if needed decodes string with the given encoding and errors settings. :rtype: str >>> to_unicode(b'a') 'a' >>> to_unicode('a') 'a' >>> to_unicode('a') 'a' >>> class Foo(object): ... __str__ = lambda s: 'a' >>> to_unicode(Foo()) 'a' >>> to_unicode(Foo) "" """ if isinstance(input_, bytes): input_ = input_.decode(encoding, errors) else: input_ = str(input_) return input_ def to_str( input_: types.StringTypes, encoding: str = 'utf-8', errors: str = 'replace', ) -> bytes: """Convert objects to string, encodes to the given encoding. :rtype: str >>> to_str('a') b'a' >>> to_str('a') b'a' >>> to_str(b'a') b'a' >>> class Foo(object): ... __str__ = lambda s: 'a' >>> to_str(Foo()) 'a' >>> to_str(Foo) "" """ if not isinstance(input_, bytes): if not hasattr(input_, 'encode'): input_ = str(input_) input_ = input_.encode(encoding, errors) return input_ def scale_1024( x: types.Number, n_prefixes: int, ) -> types.Tuple[types.Number, types.Number]: """Scale a number down to a suitable size, based on powers of 1024. Returns the scaled number and the power of 1024 used. Use to format numbers of bytes to KiB, MiB, etc. >>> scale_1024(310, 3) (310.0, 0) >>> scale_1024(2048, 3) (2.0, 1) >>> scale_1024(0, 2) (0.0, 0) >>> scale_1024(0.5, 2) (0.5, 0) >>> scale_1024(1, 2) (1.0, 0) """ if x <= 0: power = 0 else: power = min(int(math.log(x, 2) / 10), n_prefixes - 1) scaled = float(x) / (2 ** (10 * power)) return scaled, power @typing.overload def remap( value: decimal.Decimal, old_min: decimal.Decimal | float, old_max: decimal.Decimal | float, new_min: decimal.Decimal | float, new_max: decimal.Decimal | float, ) -> decimal.Decimal: ... @typing.overload def remap( value: decimal.Decimal | float, old_min: decimal.Decimal, old_max: decimal.Decimal | float, new_min: decimal.Decimal | float, new_max: decimal.Decimal | float, ) -> decimal.Decimal: ... @typing.overload def remap( value: decimal.Decimal | float, old_min: decimal.Decimal | float, old_max: decimal.Decimal, new_min: decimal.Decimal | float, new_max: decimal.Decimal | float, ) -> decimal.Decimal: ... @typing.overload def remap( value: decimal.Decimal | float, old_min: decimal.Decimal | float, old_max: decimal.Decimal | float, new_min: decimal.Decimal, new_max: decimal.Decimal | float, ) -> decimal.Decimal: ... @typing.overload def remap( value: decimal.Decimal | float, old_min: decimal.Decimal | float, old_max: decimal.Decimal | float, new_min: decimal.Decimal | float, new_max: decimal.Decimal, ) -> decimal.Decimal: ... # Note that float captures both int and float types so we don't need to # specify them separately @typing.overload def remap( value: float, old_min: float, old_max: float, new_min: float, new_max: float, ) -> float: ... def remap( # pyright: ignore[reportInconsistentOverload] value: _TN, old_min: _TN, old_max: _TN, new_min: _TN, new_max: _TN, ) -> _TN: """ remap a value from one range into another. >>> remap(500, 0, 1000, 0, 100) 50 >>> remap(250.0, 0.0, 1000.0, 0.0, 100.0) 25.0 >>> remap(-75, -100, 0, -1000, 0) -750 >>> remap(33, 0, 100, -500, 500) -170 >>> remap(decimal.Decimal('250.0'), 0.0, 1000.0, 0.0, 100.0) Decimal('25.0') This is a great use case example. Take an AVR that has dB values the minimum being -80dB and the maximum being 10dB and you want to convert volume percent to the equilivint in that dB range >>> remap(46.0, 0.0, 100.0, -80.0, 10.0) -38.6 I added using decimal.Decimal so floating point math errors can be avoided. Here is an example of a floating point math error >>> 0.1 + 0.1 + 0.1 0.30000000000000004 If floating point remaps need to be done my suggstion is to pass at least one parameter as a `decimal.Decimal`. This will ensure that the output from this function is accurate. I left passing `floats` for backwards compatability and there is no conversion done from float to `decimal.Decimal` unless one of the passed parameters has a type of `decimal.Decimal`. This will ensure that any existing code that uses this funtion will work exactly how it has in the past. Some edge cases to test >>> remap(1, 0, 0, 1, 2) Traceback (most recent call last): ... ValueError: Input range (0-0) is empty >>> remap(1, 1, 2, 0, 0) Traceback (most recent call last): ... ValueError: Output range (0-0) is empty Args: value (int, float, decimal.Decimal): Value to be converted. old_min (int, float, decimal.Decimal): Minimum of the range for the value that has been passed. old_max (int, float, decimal.Decimal): Maximum of the range for the value that has been passed. new_min (int, float, decimal.Decimal): The minimum of the new range. new_max (int, float, decimal.Decimal): The maximum of the new range. Returns: int, float, decimal.Decimal: Value that has been re-ranged. If any of the parameters passed is a `decimal.Decimal`, all of the parameters will be converted to `decimal.Decimal`. The same thing also happens if one of the parameters is a `float`. Otherwise, all parameters will get converted into an `int`. Technically, you can pass a `str` of an integer and it will get converted. The returned value type will be `decimal.Decimal` if any of the passed parameters are `decimal.Decimal`, the return type will be `float` if any of the passed parameters are a `float`, otherwise the returned type will be `int`. """ type_: types.Type[types.DecimalNumber] if ( isinstance(value, decimal.Decimal) or isinstance(old_min, decimal.Decimal) or isinstance(old_max, decimal.Decimal) or isinstance(new_min, decimal.Decimal) or isinstance(new_max, decimal.Decimal) ): type_ = decimal.Decimal elif ( isinstance(value, float) or isinstance(old_min, float) or isinstance(old_max, float) or isinstance(new_min, float) or isinstance(new_max, float) ): type_ = float else: type_ = int value = types.cast(_TN, type_(value)) old_min = types.cast(_TN, type_(old_min)) old_max = types.cast(_TN, type_(old_max)) new_max = types.cast(_TN, type_(new_max)) new_min = types.cast(_TN, type_(new_min)) # These might not be floats but the Python type system doesn't understand # the generic type system in this case old_range = types.cast(float, old_max) - types.cast(float, old_min) new_range = types.cast(float, new_max) - types.cast(float, new_min) if old_range == 0: raise ValueError(f'Input range ({old_min}-{old_max}) is empty') if new_range == 0: raise ValueError(f'Output range ({new_min}-{new_max}) is empty') # The current state of Python typing makes it impossible to use the # generic type system in this case. Or so extremely verbose that it's not # worth it. new_value = (value - old_min) * new_range # type: ignore[operator] # pyright: ignore[reportOperatorIssue, reportUnknownVariableType] if type_ is int: new_value //= old_range # pyright: ignore[reportUnknownVariableType] else: new_value /= old_range # pyright: ignore[reportUnknownVariableType] new_value += new_min # type: ignore[operator] # pyright: ignore[reportOperatorIssue, reportUnknownVariableType] return types.cast(_TN, new_value) python-utils-3.9.1/python_utils/decorators.py000066400000000000000000000135471472121421700214760ustar00rootroot00000000000000""" This module provides various utility decorators for Python functions and methods. The decorators include: 1. `set_attributes`: Sets attributes on functions and classes. 2. `listify`: Converts any generator to a list or other collection. 3. `sample`: Limits calls to a function based on a sample rate. 4. `wraps_classmethod`: Wraps classmethods with type info from a regular method. Each decorator is designed to enhance the functionality of Python functions and methods in a simple and reusable manner. """ import contextlib import functools import logging import random from . import types _T = types.TypeVar('_T') _P = types.ParamSpec('_P') _S = types.TypeVar('_S', covariant=True) def set_attributes(**kwargs: types.Any) -> types.Callable[..., types.Any]: """Decorator to set attributes on functions and classes. A common usage for this pattern is the Django Admin where functions can get an optional short_description. To illustrate: Example from the Django admin using this decorator: https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.list_display Our simplified version: >>> @set_attributes(short_description='Name') ... def upper_case_name(self, obj): ... return ('%s %s' % (obj.first_name, obj.last_name)).upper() The standard Django version: >>> def upper_case_name(obj): ... return ('%s %s' % (obj.first_name, obj.last_name)).upper() >>> upper_case_name.short_description = 'Name' """ def _set_attributes( function: types.Callable[_P, _T], ) -> types.Callable[_P, _T]: for key, value in kwargs.items(): setattr(function, key, value) return function return _set_attributes def listify( collection: types.Callable[ [types.Iterable[_T]], types.Collection[_T] ] = list, allow_empty: bool = True, ) -> types.Callable[ [types.Callable[..., types.Optional[types.Iterable[_T]]]], types.Callable[..., types.Collection[_T]], ]: """ Convert any generator to a list or other type of collection. >>> @listify() ... def generator(): ... yield 1 ... yield 2 ... yield 3 >>> generator() [1, 2, 3] >>> @listify() ... def empty_generator(): ... pass >>> empty_generator() [] >>> @listify(allow_empty=False) ... def empty_generator_not_allowed(): ... pass >>> empty_generator_not_allowed() # doctest: +ELLIPSIS Traceback (most recent call last): ... TypeError: ... `allow_empty` is `False` >>> @listify(collection=set) ... def set_generator(): ... yield 1 ... yield 1 ... yield 2 >>> set_generator() {1, 2} >>> @listify(collection=dict) ... def dict_generator(): ... yield 'a', 1 ... yield 'b', 2 >>> dict_generator() {'a': 1, 'b': 2} """ def _listify( function: types.Callable[..., types.Optional[types.Iterable[_T]]], ) -> types.Callable[..., types.Collection[_T]]: def __listify( *args: types.Any, **kwargs: types.Any ) -> types.Collection[_T]: result: types.Optional[types.Iterable[_T]] = function( *args, **kwargs ) if result is None: if allow_empty: return collection(iter(())) else: raise TypeError( f'{function} returned `None` and `allow_empty` ' 'is `False`' ) else: return collection(result) return __listify return _listify def sample( sample_rate: float, ) -> types.Callable[ [types.Callable[_P, _T]], types.Callable[_P, types.Optional[_T]], ]: """ Limit calls to a function based on given sample rate. Number of calls to the function will be roughly equal to sample_rate percentage. Usage: >>> @sample(0.5) ... def demo_function(*args, **kwargs): ... return 1 Calls to *demo_function* will be limited to 50% approximatly. """ def _sample( function: types.Callable[_P, _T], ) -> types.Callable[_P, types.Optional[_T]]: @functools.wraps(function) def __sample( *args: _P.args, **kwargs: _P.kwargs ) -> types.Optional[_T]: if random.random() < sample_rate: return function(*args, **kwargs) else: logging.debug( 'Skipped execution of %r(%r, %r) due to sampling', function, args, kwargs, ) return None return __sample return _sample def wraps_classmethod( wrapped: types.Callable[types.Concatenate[_S, _P], _T], ) -> types.Callable[ [ types.Callable[types.Concatenate[types.Any, _P], _T], ], types.Callable[types.Concatenate[_S, _P], _T], ]: """ Like `functools.wraps`, but for wrapping classmethods with the type info from a regular method. """ def _wraps_classmethod( wrapper: types.Callable[types.Concatenate[types.Any, _P], _T], ) -> types.Callable[types.Concatenate[_S, _P], _T]: # For some reason `functools.update_wrapper` fails on some test # runs but not while running actual code with contextlib.suppress(AttributeError): wrapper = functools.update_wrapper( wrapper, wrapped, assigned=tuple( a for a in functools.WRAPPER_ASSIGNMENTS if a != '__annotations__' ), ) if annotations := getattr(wrapped, '__annotations__', {}): annotations.pop('self', None) wrapper.__annotations__ = annotations return wrapper return _wraps_classmethod python-utils-3.9.1/python_utils/exceptions.py000066400000000000000000000021571472121421700215050ustar00rootroot00000000000000""" This module provides utility functions for raising and reraising exceptions. Functions: raise_exception(exception_class, *args, **kwargs): Returns a function that raises an exception of the given type with the given arguments. reraise(*args, **kwargs): Reraises the current exception. """ from . import types def raise_exception( exception_class: types.Type[Exception], *args: types.Any, **kwargs: types.Any, ) -> types.Callable[..., None]: """ Returns a function that raises an exception of the given type with the given arguments. >>> raise_exception(ValueError, 'spam')('eggs') Traceback (most recent call last): ... ValueError: spam """ def raise_(*args_: types.Any, **kwargs_: types.Any) -> types.Any: raise exception_class(*args, **kwargs) return raise_ def reraise(*args: types.Any, **kwargs: types.Any) -> types.Any: """ Reraises the current exception. This function seems useless, but it can be useful when you need to pass a callable to another function that raises an exception. """ raise python-utils-3.9.1/python_utils/formatters.py000066400000000000000000000131051472121421700215050ustar00rootroot00000000000000""" This module provides utility functions for formatting strings and dates. Functions: camel_to_underscore(name: str) -> str: Convert camel case style naming to underscore/snake case style naming. apply_recursive(function: Callable[[str], str], data: OptionalScope = None, **kwargs: Any) -> OptionalScope: Apply a function to all keys in a scope recursively. timesince(dt: Union[datetime.datetime, datetime.timedelta], default: str = 'just now') -> str: Returns string representing 'time since' e.g. 3 days ago, 5 hours ago. """ # pyright: reportUnnecessaryIsInstance=false import datetime from python_utils import types def camel_to_underscore(name: str) -> str: """Convert camel case style naming to underscore/snake case style naming. If there are existing underscores they will be collapsed with the to-be-added underscores. Multiple consecutive capital letters will not be split except for the last one. >>> camel_to_underscore('SpamEggsAndBacon') 'spam_eggs_and_bacon' >>> camel_to_underscore('Spam_and_bacon') 'spam_and_bacon' >>> camel_to_underscore('Spam_And_Bacon') 'spam_and_bacon' >>> camel_to_underscore('__SpamAndBacon__') '__spam_and_bacon__' >>> camel_to_underscore('__SpamANDBacon__') '__spam_and_bacon__' """ output: types.List[str] = [] for i, c in enumerate(name): if i > 0: pc = name[i - 1] if c.isupper() and not pc.isupper() and pc != '_': # Uppercase and the previous character isn't upper/underscore? # Add the underscore output.append('_') elif i > 3 and not c.isupper(): # Will return the last 3 letters to check if we are changing # case previous = name[i - 3 : i] if previous.isalpha() and previous.isupper(): output.insert(len(output) - 1, '_') output.append(c.lower()) return ''.join(output) def apply_recursive( function: types.Callable[[str], str], data: types.OptionalScope = None, **kwargs: types.Any, ) -> types.OptionalScope: """ Apply a function to all keys in a scope recursively. >>> apply_recursive(camel_to_underscore, {'SpamEggsAndBacon': 'spam'}) {'spam_eggs_and_bacon': 'spam'} >>> apply_recursive( ... camel_to_underscore, ... { ... 'SpamEggsAndBacon': { ... 'SpamEggsAndBacon': 'spam', ... } ... }, ... ) {'spam_eggs_and_bacon': {'spam_eggs_and_bacon': 'spam'}} >>> a = {'a_b_c': 123, 'def': {'DeF': 456}} >>> b = apply_recursive(camel_to_underscore, a) >>> b {'a_b_c': 123, 'def': {'de_f': 456}} >>> apply_recursive(camel_to_underscore, None) """ if data is None: return None elif isinstance(data, dict): return { function(key): apply_recursive(function, value, **kwargs) for key, value in data.items() } else: return data def timesince( dt: types.Union[datetime.datetime, datetime.timedelta], default: str = 'just now', ) -> str: """ Returns string representing 'time since' e.g. 3 days ago, 5 hours ago etc. >>> now = datetime.datetime.now() >>> timesince(now) 'just now' >>> timesince(now - datetime.timedelta(seconds=1)) '1 second ago' >>> timesince(now - datetime.timedelta(seconds=2)) '2 seconds ago' >>> timesince(now - datetime.timedelta(seconds=60)) '1 minute ago' >>> timesince(now - datetime.timedelta(seconds=61)) '1 minute and 1 second ago' >>> timesince(now - datetime.timedelta(seconds=62)) '1 minute and 2 seconds ago' >>> timesince(now - datetime.timedelta(seconds=120)) '2 minutes ago' >>> timesince(now - datetime.timedelta(seconds=121)) '2 minutes and 1 second ago' >>> timesince(now - datetime.timedelta(seconds=122)) '2 minutes and 2 seconds ago' >>> timesince(now - datetime.timedelta(seconds=3599)) '59 minutes and 59 seconds ago' >>> timesince(now - datetime.timedelta(seconds=3600)) '1 hour ago' >>> timesince(now - datetime.timedelta(seconds=3601)) '1 hour and 1 second ago' >>> timesince(now - datetime.timedelta(seconds=3602)) '1 hour and 2 seconds ago' >>> timesince(now - datetime.timedelta(seconds=3660)) '1 hour and 1 minute ago' >>> timesince(now - datetime.timedelta(seconds=3661)) '1 hour and 1 minute ago' >>> timesince(now - datetime.timedelta(seconds=3720)) '1 hour and 2 minutes ago' >>> timesince(now - datetime.timedelta(seconds=3721)) '1 hour and 2 minutes ago' >>> timesince(datetime.timedelta(seconds=3721)) '1 hour and 2 minutes ago' """ if isinstance(dt, datetime.timedelta): diff = dt else: now = datetime.datetime.now() diff = abs(now - dt) periods = ( (diff.days / 365, 'year', 'years'), (diff.days % 365 / 30, 'month', 'months'), (diff.days % 30 / 7, 'week', 'weeks'), (diff.days % 7, 'day', 'days'), (diff.seconds / 3600, 'hour', 'hours'), (diff.seconds % 3600 / 60, 'minute', 'minutes'), (diff.seconds % 60, 'second', 'seconds'), ) output: types.List[str] = [] for period, singular, plural in periods: int_period = int(period) if int_period == 1: output.append(f'{int_period} {singular}') elif int_period: output.append(f'{int_period} {plural}') if output: return f'{" and ".join(output[:2])} ago' return default python-utils-3.9.1/python_utils/generators.py000066400000000000000000000073401472121421700214740ustar00rootroot00000000000000""" This module provides generator utilities for batching items from iterables and async iterables. Functions: abatcher(generator, batch_size=None, interval=None): Asyncio generator wrapper that returns items with a given batch size or interval (whichever is reached first). batcher(iterable, batch_size=10): Generator wrapper that returns items with a given batch size. """ import asyncio import time import python_utils from python_utils import types _T = types.TypeVar('_T') async def abatcher( generator: types.Union[ types.AsyncGenerator[_T, None], types.AsyncIterator[_T], ], batch_size: types.Optional[int] = None, interval: types.Optional[types.delta_type] = None, ) -> types.AsyncGenerator[types.List[_T], None]: """ Asyncio generator wrapper that returns items with a given batch size or interval (whichever is reached first). Args: generator: The async generator or iterator to batch. batch_size (types.Optional[int], optional): The number of items per batch. Defaults to None. interval (types.Optional[types.delta_type], optional): The time interval to wait before yielding a batch. Defaults to None. Yields: types.AsyncGenerator[types.List[_T], None]: A generator that yields batches of items. """ batch: types.List[_T] = [] assert batch_size or interval, 'Must specify either batch_size or interval' # If interval is specified, use it to determine when to yield the batch # Alternatively set a really long timeout to keep the code simpler if interval: interval_s = python_utils.delta_to_seconds(interval) else: # Set the timeout to 10 years interval_s = 60 * 60 * 24 * 365 * 10.0 next_yield: float = time.perf_counter() + interval_s done: types.Set[asyncio.Task[_T]] pending: types.Set[asyncio.Task[_T]] = set() while True: try: done, pending = await asyncio.wait( pending or [ asyncio.create_task( types.cast( types.Coroutine[None, None, _T], generator.__anext__(), ) ), ], timeout=interval_s, return_when=asyncio.FIRST_COMPLETED, ) if done: for result in done: batch.append(result.result()) except StopAsyncIteration: if batch: yield batch break if batch_size is not None and len(batch) == batch_size: yield batch batch = [] if interval and batch and time.perf_counter() > next_yield: yield batch batch = [] # Always set the next yield time to the current time. If the # loop is running slow due to blocking functions we do not # want to burst too much next_yield = time.perf_counter() + interval_s def batcher( iterable: types.Iterable[_T], batch_size: int = 10, ) -> types.Generator[types.List[_T], None, None]: """ Generator wrapper that returns items with a given batch size. Args: iterable (types.Iterable[_T]): The iterable to batch. batch_size (int, optional): The number of items per batch. Defaults to 10. Yields: types.Generator[types.List[_T], None, None]: A generator that yields batches of items. """ batch: types.List[_T] = [] for item in iterable: batch.append(item) if len(batch) == batch_size: yield batch batch = [] if batch: yield batch python-utils-3.9.1/python_utils/import_.py000066400000000000000000000073761472121421700210050ustar00rootroot00000000000000""" This module provides utilities for importing modules and handling exceptions. Classes: DummyError(Exception): A custom exception class used as a default for exception handling. Functions: import_global(name, modules=None, exceptions=DummyError, locals_=None, globals_=None, level=-1): Imports the requested items into the global scope, with support for relative imports and custom exception handling. """ from . import types class DummyError(Exception): """A custom exception class used as a default for exception handling.""" # Legacy alias for DummyError DummyException = DummyError def import_global( # noqa: C901 name: str, modules: types.Optional[types.List[str]] = None, exceptions: types.ExceptionsType = DummyError, locals_: types.OptionalScope = None, globals_: types.OptionalScope = None, level: int = -1, ) -> types.Any: # sourcery skip: hoist-if-from-if """Import the requested items into the global scope. WARNING! this method _will_ overwrite your global scope If you have a variable named `path` and you call `import_global('sys')` it will be overwritten with `sys.path` Args: name (str): the name of the module to import, e.g. sys modules (str): the modules to import, use None for everything exceptions (Exception): the exception to catch, e.g. ImportError locals_: the `locals()` method (in case you need a different scope) globals_: the `globals()` method (in case you need a different scope) level (int): the level to import from, this can be used for relative imports """ frame = None name_parts: types.List[str] = name.split('.') modules_set: types.Set[str] = set() try: # If locals_ or globals_ are not given, autodetect them by inspecting # the current stack if locals_ is None or globals_ is None: import inspect frame = inspect.stack()[1][0] if locals_ is None: locals_ = frame.f_locals if globals_ is None: globals_ = frame.f_globals try: # Relative imports are supported (from .spam import eggs) if not name_parts[0]: name_parts = name_parts[1:] level = 1 # raise IOError((name, level)) module = __import__( name=name_parts[0] or '.', globals=globals_, locals=locals_, fromlist=name_parts[1:], level=max(level, 0), ) # Make sure we get the right part of a dotted import (i.e. # spam.eggs should return eggs, not spam) try: for attr in name_parts[1:]: module = getattr(module, attr) except AttributeError as e: raise ImportError( 'No module named ' + '.'.join(name_parts) ) from e # If no list of modules is given, autodetect from either __all__ # or a dir() of the module if not modules: modules_set = set(getattr(module, '__all__', dir(module))) else: modules_set = set(modules).intersection(dir(module)) # Add all items in modules to the global scope for k in set(dir(module)).intersection(modules_set): if k and k[0] != '_': globals_[k] = getattr(module, k) except exceptions as e: return e finally: # Clean up, just to be sure del ( name, name_parts, modules, modules_set, exceptions, locals_, globals_, frame, ) python-utils-3.9.1/python_utils/logger.py000066400000000000000000000234001472121421700205750ustar00rootroot00000000000000""" This module provides a base class `LoggerBase` and a derived class `Logged` for adding logging capabilities to classes. The `LoggerBase` class expects a `logger` attribute to be a `logging.Logger` or compatible instance and provides methods for logging at various levels. The `Logged` class automatically adds a named logger to the class. Classes: LoggerBase: A base class that adds logging utilities to a class. Logged: A derived class that automatically adds a named logger to a class. Example: >>> class MyClass(Logged): ... def __init__(self): ... Logged.__init__(self) >>> my_class = MyClass() >>> my_class.debug('debug') >>> my_class.info('info') >>> my_class.warning('warning') >>> my_class.error('error') >>> my_class.exception('exception') >>> my_class.log(0, 'log') """ import abc import logging from . import decorators __all__ = ['Logged'] from . import types # From the logging typeshed, converted to be compatible with Python 3.8 # https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi _ExcInfoType: types.TypeAlias = types.Union[ bool, types.Tuple[ types.Type[BaseException], BaseException, types.Union[types.TracebackType, None], ], types.Tuple[None, None, None], BaseException, None, ] _P = types.ParamSpec('_P') _T = types.TypeVar('_T', covariant=True) class LoggerProtocol(types.Protocol): def debug( self, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: ... def info( self, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: ... def warning( self, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: ... def error( self, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: ... def critical( self, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: ... def exception( self, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: ... def log( self, level: int, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: ... class LoggerBase(abc.ABC): """Class which automatically adds logging utilities to your class when interiting. Expects `logger` to be a logging.Logger or compatible instance. Adds easy access to debug, info, warning, error, exception and log methods >>> class MyClass(LoggerBase): ... logger = logging.getLogger(__name__) ... ... def __init__(self): ... Logged.__init__(self) >>> my_class = MyClass() >>> my_class.debug('debug') >>> my_class.info('info') >>> my_class.warning('warning') >>> my_class.error('error') >>> my_class.exception('exception') >>> my_class.log(0, 'log') """ # I've tried using a protocol to properly type the logger but it gave all # sorts of issues with mypy so we're using the lazy solution for now. The # actual classes define the correct type anyway logger: types.Any # logger: LoggerProtocol @classmethod def __get_name( # pyright: ignore[reportUnusedFunction] cls, *name_parts: str ) -> str: return '.'.join(n.strip() for n in name_parts if n.strip()) @decorators.wraps_classmethod(logging.Logger.debug) @classmethod def debug( cls, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: return cls.logger.debug( # type: ignore[no-any-return] msg, *args, exc_info=exc_info, stack_info=stack_info, stacklevel=stacklevel, extra=extra, ) @decorators.wraps_classmethod(logging.Logger.info) @classmethod def info( cls, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: return cls.logger.info( # type: ignore[no-any-return] msg, *args, exc_info=exc_info, stack_info=stack_info, stacklevel=stacklevel, extra=extra, ) @decorators.wraps_classmethod(logging.Logger.warning) @classmethod def warning( cls, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: return cls.logger.warning( # type: ignore[no-any-return] msg, *args, exc_info=exc_info, stack_info=stack_info, stacklevel=stacklevel, extra=extra, ) @decorators.wraps_classmethod(logging.Logger.error) @classmethod def error( cls, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: return cls.logger.error( # type: ignore[no-any-return] msg, *args, exc_info=exc_info, stack_info=stack_info, stacklevel=stacklevel, extra=extra, ) @decorators.wraps_classmethod(logging.Logger.critical) @classmethod def critical( cls, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: return cls.logger.critical( # type: ignore[no-any-return] msg, *args, exc_info=exc_info, stack_info=stack_info, stacklevel=stacklevel, extra=extra, ) @decorators.wraps_classmethod(logging.Logger.exception) @classmethod def exception( cls, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: return cls.logger.exception( # type: ignore[no-any-return] msg, *args, exc_info=exc_info, stack_info=stack_info, stacklevel=stacklevel, extra=extra, ) @decorators.wraps_classmethod(logging.Logger.log) @classmethod def log( cls, level: int, msg: object, *args: object, exc_info: _ExcInfoType = None, stack_info: bool = False, stacklevel: int = 1, extra: types.Union[types.Mapping[str, object], None] = None, ) -> None: return cls.logger.log( # type: ignore[no-any-return] level, msg, *args, exc_info=exc_info, stack_info=stack_info, stacklevel=stacklevel, extra=extra, ) class Logged(LoggerBase): """Class which automatically adds a named logger to your class when interiting. Adds easy access to debug, info, warning, error, exception and log methods >>> class MyClass(Logged): ... def __init__(self): ... Logged.__init__(self) >>> my_class = MyClass() >>> my_class.debug('debug') >>> my_class.info('info') >>> my_class.warning('warning') >>> my_class.error('error') >>> my_class.exception('exception') >>> my_class.log(0, 'log') >>> my_class._Logged__get_name('spam') 'spam' """ logger: logging.Logger # pragma: no cover @classmethod def __get_name(cls, *name_parts: str) -> str: return types.cast( str, LoggerBase._LoggerBase__get_name(*name_parts), # type: ignore[attr-defined] # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType, reportAttributeAccessIssue] ) def __new__(cls, *args: types.Any, **kwargs: types.Any) -> 'Logged': """ Create a new instance of the class and initialize the logger. The logger is named using the module and class name. Args: *args: Variable length argument list. **kwargs: Arbitrary keyword arguments. Returns: An instance of the class. """ cls.logger = logging.getLogger( cls.__get_name(cls.__module__, cls.__name__) ) return super().__new__(cls) python-utils-3.9.1/python_utils/loguru.py000066400000000000000000000025151472121421700206370ustar00rootroot00000000000000""" This module provides a `Logurud` class that integrates the `loguru` logger with the base logging functionality defined in `logger_module.LoggerBase`. Classes: Logurud: A class that extends `LoggerBase` and uses `loguru` for logging. Usage example: >>> from python_utils.loguru import Logurud >>> class MyClass(Logurud): ... def __init__(self): ... Logurud.__init__(self) >>> my_class = MyClass() >>> my_class.logger.info('This is an info message') """ from __future__ import annotations import typing import loguru from . import logger as logger_module __all__ = ['Logurud'] class Logurud(logger_module.LoggerBase): """ A class that extends `LoggerBase` and uses `loguru` for logging. Attributes: logger (loguru.Logger): The `loguru` logger instance. """ logger: loguru.Logger def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Logurud: """ Creates a new instance of `Logurud` and initializes the `loguru` logger. Args: *args (typing.Any): Variable length argument list. **kwargs (typing.Any): Arbitrary keyword arguments. Returns: Logurud: A new instance of `Logurud`. """ cls.logger: loguru.Logger = loguru.logger.opt(depth=1) return super().__new__(cls) python-utils-3.9.1/python_utils/py.typed000066400000000000000000000000001472121421700204320ustar00rootroot00000000000000python-utils-3.9.1/python_utils/terminal.py000066400000000000000000000126171472121421700211410ustar00rootroot00000000000000""" This module provides functions to get the terminal size across different platforms. Functions: get_terminal_size: Get the current size of the terminal. _get_terminal_size_windows: Get terminal size on Windows. _get_terminal_size_tput: Get terminal size using `tput`. _get_terminal_size_linux: Get terminal size on Linux. Usage example: >>> width, height = get_terminal_size() """ from __future__ import annotations import contextlib import os import typing from . import converters Dimensions = tuple[int, int] OptionalDimensions = typing.Optional[Dimensions] _StrDimensions = tuple[str, str] _OptionalStrDimensions = typing.Optional[_StrDimensions] def get_terminal_size() -> Dimensions: # pragma: no cover """Get the current size of your terminal. Multiple returns are not always a good idea, but in this case it greatly simplifies the code so I believe it's justified. It's not the prettiest function but that's never really possible with cross-platform code. Returns: width, height: Two integers containing width and height """ w: int | None h: int | None with contextlib.suppress(Exception): # Default to 79 characters for IPython notebooks from IPython import get_ipython # type: ignore[attr-defined] ipython = get_ipython() # type: ignore[no-untyped-call] from ipykernel import zmqshell # type: ignore[import-not-found] if isinstance(ipython, zmqshell.ZMQInteractiveShell): return 79, 24 with contextlib.suppress(Exception): # This works for Python 3, but not Pypy3. Probably the best method if # it's supported so let's always try import shutil w, h = shutil.get_terminal_size() if w and h: # The off by one is needed due to progressbars in some cases, for # safety we'll always substract it. return w - 1, h with contextlib.suppress(Exception): w = converters.to_int(os.environ.get('COLUMNS')) h = converters.to_int(os.environ.get('LINES')) if w and h: return w, h with contextlib.suppress(Exception): import blessings # type: ignore[import-untyped] terminal = blessings.Terminal() w = terminal.width h = terminal.height if w and h: return w, h with contextlib.suppress(Exception): # The method can return None so we don't unpack it wh = _get_terminal_size_linux() if wh is not None and all(wh): return wh with contextlib.suppress(Exception): # Windows detection doesn't always work, let's try anyhow wh = _get_terminal_size_windows() if wh is not None and all(wh): return wh with contextlib.suppress(Exception): # needed for window's python in cygwin's xterm! wh = _get_terminal_size_tput() if wh is not None and all(wh): return wh return 79, 24 def _get_terminal_size_windows() -> OptionalDimensions: # pragma: no cover res = None try: from ctypes import ( # type: ignore[attr-defined] create_string_buffer, windll, ) # stdin handle is -10 # stdout handle is -11 # stderr handle is -12 h = windll.kernel32.GetStdHandle(-12) csbi = create_string_buffer(22) res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) except Exception: return None if res: import struct (_, _, _, _, _, left, top, right, bottom, _, _) = struct.unpack( 'hhhhHhhhhhh', csbi.raw ) w = right - left h = bottom - top return w, h else: return None def _get_terminal_size_tput() -> OptionalDimensions: # pragma: no cover # get terminal width src: http://stackoverflow.com/questions/263890/ try: import subprocess proc = subprocess.Popen( ['tput', 'cols'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) output = proc.communicate(input=None) w = int(output[0]) proc = subprocess.Popen( ['tput', 'lines'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) output = proc.communicate(input=None) h = int(output[0]) except Exception: return None else: return w, h def _get_terminal_size_linux() -> OptionalDimensions: # pragma: no cover def ioctl_gwinsz(fd: int) -> tuple[str, str] | None: try: import fcntl import struct import termios return typing.cast( _OptionalStrDimensions, struct.unpack( 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'), # type: ignore[call-overload] ), ) except Exception: return None size: _OptionalStrDimensions size = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) if not size: with contextlib.suppress(Exception): fd = os.open(os.ctermid(), os.O_RDONLY) size = ioctl_gwinsz(fd) os.close(fd) if not size: try: size = os.environ['LINES'], os.environ['COLUMNS'] except Exception: return None return int(size[1]), int(size[0]) python-utils-3.9.1/python_utils/time.py000066400000000000000000000315171472121421700202640ustar00rootroot00000000000000""" This module provides utility functions for handling time-related operations. Functions: - timedelta_to_seconds: Convert a timedelta to seconds with microseconds as fraction. - delta_to_seconds: Convert a timedelta or numeric interval to seconds. - delta_to_seconds_or_none: Convert a timedelta to seconds or return None. - format_time: Format a timestamp (timedelta, datetime, or seconds) to a string. - timeout_generator: Generate items from an iterable until a timeout is reached. - aio_timeout_generator: Asynchronously generate items from an iterable until a timeout is reached. - aio_generator_timeout_detector: Detect if an async generator has not yielded an element for a set amount of time. - aio_generator_timeout_detector_decorator: Decorator for aio_generator_timeout_detector. """ # pyright: reportUnnecessaryIsInstance=false import asyncio import datetime import functools import itertools import time import python_utils from python_utils import aio, exceptions, types _T = types.TypeVar('_T') _P = types.ParamSpec('_P') # There might be a better way to get the epoch with tzinfo, please create # a pull request if you know a better way that functions for Python 2 and 3 epoch = datetime.datetime(year=1970, month=1, day=1) def timedelta_to_seconds(delta: datetime.timedelta) -> types.Number: """Convert a timedelta to seconds with the microseconds as fraction. Note that this method has become largely obsolete with the `timedelta.total_seconds()` method introduced in Python 2.7. >>> from datetime import timedelta >>> '%d' % timedelta_to_seconds(timedelta(days=1)) '86400' >>> '%d' % timedelta_to_seconds(timedelta(seconds=1)) '1' >>> '%.6f' % timedelta_to_seconds(timedelta(seconds=1, microseconds=1)) '1.000001' >>> '%.6f' % timedelta_to_seconds(timedelta(microseconds=1)) '0.000001' """ # Only convert to float if needed if delta.microseconds: total = delta.microseconds * 1e-6 else: total = 0 total += delta.seconds total += delta.days * 60 * 60 * 24 return total def delta_to_seconds(interval: types.delta_type) -> types.Number: """ Convert a timedelta to seconds. >>> delta_to_seconds(datetime.timedelta(seconds=1)) 1 >>> delta_to_seconds(datetime.timedelta(seconds=1, microseconds=1)) 1.000001 >>> delta_to_seconds(1) 1 >>> delta_to_seconds('whatever') # doctest: +ELLIPSIS Traceback (most recent call last): ... TypeError: Unknown type ... """ if isinstance(interval, datetime.timedelta): return timedelta_to_seconds(interval) elif isinstance(interval, (int, float)): return interval else: raise TypeError(f'Unknown type {type(interval)}: {interval!r}') def delta_to_seconds_or_none( interval: types.Optional[types.delta_type], ) -> types.Optional[types.Number]: """Convert a timedelta to seconds or return None.""" if interval is None: return None else: return delta_to_seconds(interval) def format_time( timestamp: types.timestamp_type, precision: datetime.timedelta = datetime.timedelta(seconds=1), ) -> str: """Formats timedelta/datetime/seconds. >>> format_time('1') '0:00:01' >>> format_time(1.234) '0:00:01' >>> format_time(1) '0:00:01' >>> format_time(datetime.datetime(2000, 1, 2, 3, 4, 5, 6)) '2000-01-02 03:04:05' >>> format_time(datetime.date(2000, 1, 2)) '2000-01-02' >>> format_time(datetime.timedelta(seconds=3661)) '1:01:01' >>> format_time(None) '--:--:--' >>> format_time(format_time) # doctest: +ELLIPSIS Traceback (most recent call last): ... TypeError: Unknown type ... """ precision_seconds = precision.total_seconds() if isinstance(timestamp, str): timestamp = float(timestamp) if isinstance(timestamp, (int, float)): try: timestamp = datetime.timedelta(seconds=timestamp) except OverflowError: # pragma: no cover timestamp = None if isinstance(timestamp, datetime.timedelta): seconds = timestamp.total_seconds() # Truncate the number to the given precision seconds = seconds - (seconds % precision_seconds) return str(datetime.timedelta(seconds=seconds)) elif isinstance(timestamp, datetime.datetime): # pragma: no cover # Python 2 doesn't have the timestamp method if hasattr(timestamp, 'timestamp'): seconds = timestamp.timestamp() else: seconds = timedelta_to_seconds(timestamp - epoch) # Truncate the number to the given precision seconds = seconds - (seconds % precision_seconds) try: # pragma: no cover dt = datetime.datetime.fromtimestamp(seconds) except (ValueError, OSError): # pragma: no cover dt = datetime.datetime.max return str(dt) elif isinstance(timestamp, datetime.date): return str(timestamp) elif timestamp is None: return '--:--:--' else: raise TypeError(f'Unknown type {type(timestamp)}: {timestamp!r}') @types.overload def _to_iterable( iterable: types.Union[ types.Callable[[], types.AsyncIterable[_T]], types.AsyncIterable[_T], ], ) -> types.AsyncIterable[_T]: ... @types.overload def _to_iterable( iterable: types.Union[ types.Callable[[], types.Iterable[_T]], types.Iterable[_T] ], ) -> types.Iterable[_T]: ... def _to_iterable( iterable: types.Union[ types.Iterable[_T], types.Callable[[], types.Iterable[_T]], types.AsyncIterable[_T], types.Callable[[], types.AsyncIterable[_T]], ], ) -> types.Union[types.Iterable[_T], types.AsyncIterable[_T]]: if callable(iterable): return iterable() else: return iterable def timeout_generator( timeout: types.delta_type, interval: types.delta_type = datetime.timedelta(seconds=1), iterable: types.Union[ types.Iterable[_T], types.Callable[[], types.Iterable[_T]] ] = itertools.count, # type: ignore[assignment] interval_multiplier: float = 1.0, maximum_interval: types.Optional[types.delta_type] = None, ) -> types.Iterable[_T]: """ Generator that walks through the given iterable (a counter by default) until the float_timeout is reached with a configurable float_interval between items. This can be used to limit the time spent on a slow operation. This can be useful for testing slow APIs so you get a small sample of the data in a reasonable amount of time. >>> for i in timeout_generator(0.1, 0.06): ... # Put your slow code here ... print(i) 0 1 2 >>> timeout = datetime.timedelta(seconds=0.1) >>> interval = datetime.timedelta(seconds=0.06) >>> for i in timeout_generator(timeout, interval, itertools.count()): ... print(i) 0 1 2 >>> for i in timeout_generator(1, interval=0.1, iterable='ab'): ... print(i) a b >>> timeout = datetime.timedelta(seconds=0.1) >>> interval = datetime.timedelta(seconds=0.06) >>> for i in timeout_generator(timeout, interval, interval_multiplier=2): ... print(i) 0 1 2 """ float_interval: float = delta_to_seconds(interval) float_maximum_interval: types.Optional[float] = delta_to_seconds_or_none( maximum_interval ) iterable_ = _to_iterable(iterable) end = delta_to_seconds(timeout) + time.perf_counter() for item in iterable_: yield item if time.perf_counter() >= end: break time.sleep(float_interval) float_interval *= interval_multiplier if float_maximum_interval: float_interval = min(float_interval, float_maximum_interval) async def aio_timeout_generator( timeout: types.delta_type, # noqa: ASYNC109 interval: types.delta_type = datetime.timedelta(seconds=1), iterable: types.Union[ types.AsyncIterable[_T], types.Callable[..., types.AsyncIterable[_T]] ] = aio.acount, interval_multiplier: float = 1.0, maximum_interval: types.Optional[types.delta_type] = None, ) -> types.AsyncGenerator[_T, None]: """ Async generator that walks through the given async iterable (a counter by default) until the float_timeout is reached with a configurable float_interval between items. The interval_exponent automatically increases the float_timeout with each run. Note that if the float_interval is less than 1, 1/interval_exponent will be used so the float_interval is always growing. To double the float_interval with each run, specify 2. Doctests and asyncio are not friends, so no examples. But this function is effectively the same as the `timeout_generator` but it uses `async for` instead. """ float_interval: float = delta_to_seconds(interval) float_maximum_interval: types.Optional[float] = delta_to_seconds_or_none( maximum_interval ) iterable_ = _to_iterable(iterable) end = delta_to_seconds(timeout) + time.perf_counter() async for item in iterable_: # pragma: no branch yield item if time.perf_counter() >= end: break await asyncio.sleep(float_interval) float_interval *= interval_multiplier if float_maximum_interval: # pragma: no branch float_interval = min(float_interval, float_maximum_interval) async def aio_generator_timeout_detector( generator: types.AsyncGenerator[_T, None], timeout: types.Optional[types.delta_type] = None, # noqa: ASYNC109 total_timeout: types.Optional[types.delta_type] = None, on_timeout: types.Optional[ types.Callable[ [ types.AsyncGenerator[_T, None], types.Optional[types.delta_type], types.Optional[types.delta_type], BaseException, ], types.Any, ] ] = exceptions.reraise, **on_timeout_kwargs: types.Mapping[types.Text, types.Any], ) -> types.AsyncGenerator[_T, None]: """ This function is used to detect if an asyncio generator has not yielded an element for a set amount of time. The `on_timeout` argument is called with the `generator`, `timeout`, `total_timeout`, `exception` and the extra `**kwargs` to this function as arguments. If `on_timeout` is not specified, the exception is reraised. If `on_timeout` is `None`, the exception is silently ignored and the generator will finish as normal. """ if total_timeout is None: total_timeout_end = None else: total_timeout_end = time.perf_counter() + delta_to_seconds( total_timeout ) timeout_s = python_utils.delta_to_seconds_or_none(timeout) while True: try: if total_timeout_end and time.perf_counter() >= total_timeout_end: raise asyncio.TimeoutError( # noqa: TRY301 'Total timeout reached' ) if timeout_s: yield await asyncio.wait_for(generator.__anext__(), timeout_s) else: yield await generator.__anext__() except asyncio.TimeoutError as exception: # noqa: PERF203 if on_timeout is not None: await on_timeout( generator, timeout, total_timeout, exception, **on_timeout_kwargs, ) break except StopAsyncIteration: break def aio_generator_timeout_detector_decorator( timeout: types.Optional[types.delta_type] = None, total_timeout: types.Optional[types.delta_type] = None, on_timeout: types.Optional[ types.Callable[ [ types.AsyncGenerator[types.Any, None], types.Optional[types.delta_type], types.Optional[types.delta_type], BaseException, ], types.Any, ] ] = exceptions.reraise, **on_timeout_kwargs: types.Mapping[types.Text, types.Any], ) -> types.Callable[ [types.Callable[_P, types.AsyncGenerator[_T, None]]], types.Callable[_P, types.AsyncGenerator[_T, None]], ]: """A decorator wrapper for aio_generator_timeout_detector.""" def _timeout_detector_decorator( generator: types.Callable[_P, types.AsyncGenerator[_T, None]], ) -> types.Callable[_P, types.AsyncGenerator[_T, None]]: """The decorator itself.""" @functools.wraps(generator) def wrapper( *args: _P.args, **kwargs: _P.kwargs, ) -> types.AsyncGenerator[_T, None]: return aio_generator_timeout_detector( generator(*args, **kwargs), timeout, total_timeout, on_timeout, **on_timeout_kwargs, ) return wrapper return _timeout_detector_decorator python-utils-3.9.1/python_utils/types.py000066400000000000000000000100001472121421700204520ustar00rootroot00000000000000""" This module provides type definitions and utility functions for type hinting. It includes: - Shorthand for commonly used types such as Optional and Union. - Type aliases for various data structures and common types. - Importing all types from the `typing` and `typing_extensions` modules. - Importing specific types from the `types` module. The module also configures Pyright to ignore wildcard import warnings. """ # pyright: reportWildcardImportFromLibrary=false # ruff: noqa: F405 import datetime import decimal from re import Match, Pattern from types import * # pragma: no cover # noqa: F403 from typing import * # pragma: no cover # noqa: F403 # import * does not import these in all Python versions # Quickhand for optional because it gets so much use. If only Python had # support for an optional type shorthand such as `SomeType?` instead of # `Optional[SomeType]`. # Since the Union operator is only supported for Python 3.10, we'll create a # shorthand for it. from typing import ( IO, BinaryIO, Optional as O, # noqa: N817 TextIO, Union as U, # noqa: N817 ) from typing_extensions import * # type: ignore[no-redef,assignment] # noqa: F403 Scope = Dict[str, Any] OptionalScope = O[Scope] Number = U[int, float] DecimalNumber = U[Number, decimal.Decimal] ExceptionType = Type[Exception] ExceptionsType = U[Tuple[ExceptionType, ...], ExceptionType] StringTypes = U[str, bytes] delta_type = U[datetime.timedelta, int, float] timestamp_type = U[ datetime.timedelta, datetime.date, datetime.datetime, str, int, float, None, ] __all__ = [ 'IO', 'TYPE_CHECKING', # ABCs (from collections.abc). 'AbstractSet', # The types from the typing module. # Super-special typing primitives. 'Annotated', 'Any', # One-off things. 'AnyStr', 'AsyncContextManager', 'AsyncGenerator', 'AsyncGeneratorType', 'AsyncIterable', 'AsyncIterator', 'Awaitable', # Other concrete types. 'BinaryIO', 'BuiltinFunctionType', 'BuiltinMethodType', 'ByteString', 'Callable', # Concrete collection types. 'ChainMap', 'ClassMethodDescriptorType', 'ClassVar', 'CodeType', 'Collection', 'Concatenate', 'Container', 'ContextManager', 'Coroutine', 'CoroutineType', 'Counter', 'DecimalNumber', 'DefaultDict', 'Deque', 'Dict', 'DynamicClassAttribute', 'Final', 'ForwardRef', 'FrameType', 'FrozenSet', # Types from the `types` module. 'FunctionType', 'Generator', 'GeneratorType', 'Generic', 'GetSetDescriptorType', 'Hashable', 'ItemsView', 'Iterable', 'Iterator', 'KeysView', 'LambdaType', 'List', 'Literal', 'Mapping', 'MappingProxyType', 'MappingView', 'Match', 'MemberDescriptorType', 'MethodDescriptorType', 'MethodType', 'MethodWrapperType', 'ModuleType', 'MutableMapping', 'MutableSequence', 'MutableSet', 'NamedTuple', # Not really a type. 'NewType', 'NoReturn', 'Number', 'Optional', 'OptionalScope', 'OrderedDict', 'ParamSpec', 'ParamSpecArgs', 'ParamSpecKwargs', 'Pattern', 'Protocol', # Structural checks, a.k.a. protocols. 'Reversible', 'Sequence', 'Set', 'SimpleNamespace', 'Sized', 'SupportsAbs', 'SupportsBytes', 'SupportsComplex', 'SupportsFloat', 'SupportsIndex', 'SupportsIndex', 'SupportsInt', 'SupportsRound', 'Text', 'TextIO', 'TracebackType', 'TracebackType', 'Tuple', 'Type', 'TypeAlias', 'TypeGuard', 'TypeVar', 'TypedDict', # Not really a type. 'Union', 'ValuesView', 'WrapperDescriptorType', 'cast', 'coroutine', 'delta_type', 'final', 'get_args', 'get_origin', 'get_type_hints', 'is_typeddict', 'new_class', 'no_type_check', 'no_type_check_decorator', 'overload', 'prepare_class', 'resolve_bases', 'runtime_checkable', 'timestamp_type', ] python-utils-3.9.1/requirements.txt000066400000000000000000000000021472121421700174600ustar00rootroot00000000000000. python-utils-3.9.1/ruff.toml000066400000000000000000000063301472121421700160450ustar00rootroot00000000000000# We keep the ruff configuration separate so it can easily be shared across # all projects target-version = 'py39' exclude = [ '.venv', '.tox', # Ignore local test files/directories/old-stuff 'test.py', '*_old.py', ] line-length = 79 [lint] ignore = [ 'A001', # Variable {name} is shadowing a Python builtin 'A002', # Argument {name} is shadowing a Python builtin 'A003', # Class attribute {name} is shadowing a Python builtin 'B023', # function-uses-loop-variable 'B024', # `FormatWidgetMixin` is an abstract base class, but it has no abstract methods 'D205', # blank-line-after-summary 'D212', # multi-line-summary-first-line 'RET505', # Unnecessary `else` after `return` statement 'TRY003', # Avoid specifying long messages outside the exception class 'RET507', # Unnecessary `elif` after `continue` statement 'C405', # Unnecessary {obj_type} literal (rewrite as a set literal) 'C406', # Unnecessary {obj_type} literal (rewrite as a dict literal) 'C408', # Unnecessary {obj_type} call (rewrite as a literal) 'SIM114', # Combine `if` branches using logical `or` operator 'RET506', # Unnecessary `else` after `raise` statement 'Q001', # Remove bad quotes 'Q002', # Remove bad quotes 'FA100', # Missing `from __future__ import annotations`, but uses `typing.Optional` 'COM812', # Missing trailing comma in a list 'ISC001', # String concatenation with implicit str conversion 'SIM108', # Ternary operators are not always more readable 'RUF100', # Unused noqa directives. Due to multiple Python versions, we need to keep them ] select = [ 'A', # flake8-builtins 'ASYNC', # flake8 async checker 'B', # flake8-bugbear 'C4', # flake8-comprehensions 'C90', # mccabe 'COM', # flake8-commas ## Require docstrings for all public methods, would be good to enable at some point 'D', # pydocstyle 'E', # pycodestyle error ('W' for warning) 'F', # pyflakes 'FA', # flake8-future-annotations 'I', # isort 'ICN', # flake8-import-conventions 'INP', # flake8-no-pep420 'ISC', # flake8-implicit-str-concat 'N', # pep8-naming 'NPY', # NumPy-specific rules 'PERF', # perflint, 'PIE', # flake8-pie 'Q', # flake8-quotes 'RET', # flake8-return 'RUF', # Ruff-specific rules 'SIM', # flake8-simplify 'T20', # flake8-print 'TD', # flake8-todos 'TRY', # tryceratops 'UP', # pyupgrade ] [lint.per-file-ignores] '*tests/*' = ['INP001', 'T201', 'T203', 'ASYNC109', 'B007'] 'examples.py' = ['T201', 'N806'] 'docs/conf.py' = ['E501', 'INP001'] 'docs/_theme/flask_theme_support.py' = ['RUF012', 'INP001'] '*/types.py' = ['F405'] [lint.pydocstyle] convention = 'google' ignore-decorators = [ 'typing.overload', 'typing.override', ] [lint.isort] case-sensitive = true combine-as-imports = true force-wrap-aliases = true [lint.flake8-quotes] docstring-quotes = 'single' inline-quotes = 'single' multiline-quotes = 'single' [format] line-ending = 'lf' indent-style = 'space' quote-style = 'single' docstring-code-format = true skip-magic-trailing-comma = false exclude = [ '__init__.py', ] [lint.pycodestyle] max-line-length = 79 [lint.flake8-pytest-style] mark-parentheses = true python-utils-3.9.1/setup.cfg000066400000000000000000000011061472121421700160230ustar00rootroot00000000000000[aliases] test=pytest [metadata] description-file = README.rst [nosetests] verbosity=3 with-doctest=1 with-coverage=1 cover-package=python_utils cover-min-percentage=100 detailed-errors=1 debug=nose.loader pdb=1 # pdb-failures=1 [build_sphinx] source-dir = docs/ build-dir = docs/_build all_files = 1 [upload_sphinx] upload-dir = docs/_build/html [bdist_wheel] universal = 1 [upload] sign = 1 [flake8] per-file-ignores = python_utils/types.py: F403,F405 ignore = W391, W503, E741, E203, F811 exclude = docs [mypy] files = python_utils, _python_utils_tests python-utils-3.9.1/setup.py000066400000000000000000000042011472121421700157130ustar00rootroot00000000000000""" Setup script for the python-utils package. This script uses setuptools to package the python-utils library. It reads metadata from the `python_utils/__about__.py` file and the `README.rst` file to populate the package information. The script also defines the package requirements and optional dependencies for different use cases such as logging, documentation, and testing. """ import pathlib import setuptools # pyright: reportUnknownMemberType=false # To prevent importing about and thereby breaking the coverage info we use this # exec hack about: dict[str, str] = {} with open('python_utils/__about__.py') as fp: exec(fp.read(), about) _readme_path = pathlib.Path(__file__).parent / 'README.rst' if _readme_path.exists() and _readme_path.is_file(): long_description = _readme_path.read_text() else: long_description = 'See http://pypi.python.org/pypi/python-utils/' if __name__ == '__main__': setuptools.setup( python_requires='>=3.9.0', name='python-utils', version=about['__version__'], author=about['__author__'], author_email=about['__author_email__'], description=about['__description__'], url=about['__url__'], license='BSD', packages=setuptools.find_packages( exclude=['_python_utils_tests', '*.__pycache__'], ), package_data={'python_utils': ['py.typed']}, long_description=long_description, install_requires=['typing_extensions>3.10.0.2'], extras_require={ 'loguru': [ 'loguru', ], 'docs': [ 'mock', 'sphinx', 'python-utils', ], 'tests': [ 'ruff', 'pyright', 'pytest', 'pytest-cov', 'pytest-mypy', 'pytest-asyncio', 'sphinx', 'types-setuptools', 'loguru', 'loguru-mypy', 'mypy-ipython', 'blessings', ], }, classifiers=['License :: OSI Approved :: BSD License'], ) python-utils-3.9.1/tox.ini000066400000000000000000000030151472121421700155160ustar00rootroot00000000000000[tox] envlist = ruff, black, pypy3, py39, py310, py311, py312, py313, docs, mypy, pyright skip_missing_interpreters = True [testenv] basepython = py39: python3.9 py310: python3.10 py311: python3.11 py312: python3.12 py313: python3.13 pypy3: pypy3 setenv = PY_IGNORE_IMPORTMISMATCH=1 deps = mypy pyright -r{toxinidir}/_python_utils_tests/requirements.txt commands = mypy pyright py.test --basetemp="{envtmpdir}" --confcutdir=.. {posargs} python_utils _python_utils_tests [testenv:ruff] basepython = python3 deps = ruff commands = ruff check {toxinidir}/setup.py {toxinidir}/_python_utils_tests {toxinidir}/python_utils [testenv:black] basepython = python3 deps = black commands = black --skip-string-normalization --line-length 79 {toxinidir}/setup.py {toxinidir}/_python_utils_tests {toxinidir}/python_utils [testenv:pyright] basepython = python3 deps = pyright -r{toxinidir}/_python_utils_tests/requirements.txt commands = pyright {posargs} [testenv:mypy] basepython = python3 deps = -r{toxinidir}/_python_utils_tests/requirements.txt commands = mypy {posargs} [testenv:docs] changedir = basepython = python3 deps = -r{toxinidir}/docs/requirements.txt allowlist_externals = rm mkdir whitelist_externals = rm cd mkdir commands = rm -f docs/project_name.rst rm -f docs/modules.rst mkdir -p docs/_static sphinx-apidoc -o docs/ python_utils rm -f docs/modules.rst sphinx-build -W -b html -d docs/_build/doctrees docs docs/_build/html {posargs}