pax_global_header00006660000000000000000000000064150424257300014514gustar00rootroot0000000000000052 comment=8cb467ea54c68368eaf34deef1a6cc38c41218a2 PyrateLimiter-3.9.0/000077500000000000000000000000001504242573000143175ustar00rootroot00000000000000PyrateLimiter-3.9.0/.github/000077500000000000000000000000001504242573000156575ustar00rootroot00000000000000PyrateLimiter-3.9.0/.github/workflows/000077500000000000000000000000001504242573000177145ustar00rootroot00000000000000PyrateLimiter-3.9.0/.github/workflows/poetry-package.yml000066400000000000000000000056751504242573000233670ustar00rootroot00000000000000name: Python package on: push: branches: - master pull_request: branches: - master env: LATEST_PY_VERSION: '3.13' REDIS: 'redis://localhost:6379' jobs: check: runs-on: ubuntu-latest services: pyrate_redis: image: bitnami/redis:latest env: ALLOW_EMPTY_PASSWORD: yes ports: - 6379:6379 # Set health checks to wait until redis has started options: >- --health-cmd "redis-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5 pyrate_postgres: image: bitnami/postgresql env: ALLOW_EMPTY_PASSWORD: yes POSTGRESQL_PASSWORD: postgres POSTGRESQL_MAX_CONNECTIONS: 1000 ports: - 5432:5432 strategy: fail-fast: true matrix: python-version: ["3.8", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - uses: snok/install-poetry@v1.4.1 with: virtualenvs-in-project: true version: 1.8.5 # Cache packages per python version, and reuse until lockfile changes # TODO: For some strange reason, virtualenvs restored from the cache will sometimes be # missing a python interpreter. Just disabling the cache for now. # - name: Cache python packages # id: cache # uses: actions/cache@v3 # with: # path: .venv # key: venv-${{ matrix.python-version }}-latest-${{ hashFiles('poetry.lock') }} - name: Install dependencies # if: steps.cache.outputs.cache-hit != 'true' run: poetry install -v -E all # Run linting (latest python version only) - name: Lint if: ${{ matrix.python-version == env.LATEST_PY_VERSION }} run: | source $VENV nox -e lint # Run tests and coverage report (all python versions) - name: Test and Coverage run: | source $VENV nox -e cover # Latest python version: send coverage report to codecov - name: "Upload coverage report to Codecov" if: ${{ matrix.python-version == env.LATEST_PY_VERSION }} uses: codecov/codecov-action@v2 publish: needs: check runs-on: ubuntu-latest if: github.ref == 'refs/heads/master' steps: - uses: actions/checkout@v4 - name: Set up Python ${{ env.LATEST_PY_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.LATEST_PY_VERSION }} - uses: snok/install-poetry@v1.4.1 with: virtualenvs-in-project: true version: 1.8.5 - name: Install dependencies run: poetry install -v -E all - name: Publish run: | poetry config http-basic.pypi ${{ secrets.PYPI_USERNAME }} ${{ secrets.PYPI_PASSWORD }} poetry build poetry publish PyrateLimiter-3.9.0/.gitignore000066400000000000000000000003041504242573000163040ustar00rootroot00000000000000.coverage .coverage.* pyrate_limiter.egg-info/ __pycache__/ dist/ docs/_build/ docs/modules/ htmlcov/ env/ test-reports/ .vim/ .vscode/ .idea/ .coveralls.yml .DS_Store .venv xdg_cache *\.sqlite* PyrateLimiter-3.9.0/.pre-commit-config.yaml000066400000000000000000000012701504242573000206000ustar00rootroot00000000000000repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/PyCQA/flake8 rev: 7.0.0 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.14.0 hooks: - id: mypy additional_dependencies: [types-filelock, types-redis] - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.10.0 hooks: - id: python-no-eval - id: python-use-type-annotations - repo: https://github.com/asottile/reorder_python_imports rev: v3.14.0 hooks: - id: reorder-python-imports PyrateLimiter-3.9.0/.python-version000066400000000000000000000000041504242573000173160ustar00rootroot000000000000003.9 PyrateLimiter-3.9.0/.readthedocs.yml000066400000000000000000000003701504242573000174050ustar00rootroot00000000000000# Read the Docs configuration file version: 2 build: os: ubuntu-22.04 tools: python: "3.8" sphinx: builder: html configuration: docs/conf.py python: install: - method: pip path: . extra_requirements: - docs PyrateLimiter-3.9.0/CHANGELOG.md000066400000000000000000000122131504242573000161270ustar00rootroot00000000000000# Change Log All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). ## [3.9.0] * Introduce MultiProcessBucket * Updte documentation to include MultiProcessBucket * Add delay configure * Simplify lock interface for SQLFileLock & MultiProcessBucket ## [3.8.1] * Keep Retrying until Max Delay Has Expired * Postgres performance turning * Fix cursor leaks on SQLiteBucket ## [3.8.0] * Add FileLock option for SQLiteBucket ## [3.7.1] * Update package metadata and local dev config to support python 3.13 ## [3.7.0] * Add method to remove bucket ## [3.6.2] * Fix table creation for SQLiteBucket ## [3.6.1] * Support creating/getting bucket asynchronously ## [3.6.0] * Use psycopg3 for PostgresBucket ## [3.5.1] * Fix dependencies for "all" package extra ## [3.5.0] * Add PostgresBucket backend ## [3.4.1] * Fix: unnecessary warning during async check ## [3.4.0] * Improved in-memory-bucket performance ## [3.3.0] * Fix background task for leaking ## [3.2.1] - 2024-02-13 * Fix Redis CROSSSLOT Keys following issue [#126](https://github.com/vutran1710/PyrateLimiter/issues/126) ## [3.1.1] - 2024-01-02 * Fix broken SqliteBucket following issue [#132](https://github.com/vutran1710/PyrateLimiter/issues/132) ## [3.1.0] - 2023-08-28 * Allow to pass rates directly to Limiter to use default ImMemoryBucket with Limiter * Allow to pass *Duration* to `max_delay` argument of Limiter ## [3.0.2] - 2023-08-28 * Critical bug fix: importing redis fail crashing apps ## [3.0.0] - 2023-08-28 Third major release with API breaking changes: - Drop python 3.7 (only python ^3.8) - Bucket must be initialized before passing to Limiter - Auto leaking (provided by BucketFactory) - Decorator API changes - Limiter workable with both async/sync out-of-the-box - Async RedisBucket built-in - Contextmanager not available yet ## [2.10.0] - 2023-02-26 ### Updates * Add change log to sdist * Improve test coverage * Force check some bucket-keyword arguments ## [2.9.1] - 2023-02-26 ### Fixed * Fix unit test to make test results stable * Fix remaining-time calculation using exact 3 decimals only * Increase test intesity to ensure correctness ## [2.8.5] - TBD ### Fixed * Fix SQLite OperationalError when getting more items than SQLite variable limit ## [2.8.4] - 2022-11-23 ### Fixed * Build both `wheel` and `sdist` on publish ## [2.8.3] - 2022-10-17 ### Added * Add option to expire redis key when using RedisBucket ## [2.8.2] - 2022-09-24 ### Removed * Python 3.6 support ## [2.8.1] - 2022-04-11 ### Added * Add Sphinx config * Add documentation site: https://pyrate-limiter.readthedocs.io * Add some missing type hints * Add package metadata to indicate PEP-561 compliance ## [2.8.0] - 2022-04-10 ### Added * Add `flush()` method to all bucket classes ## [2.7.0] - 2022-04-06 ### Added * Add `FileLockSQliteBucket` for a SQLite backend with file-based locking * Add optional backend dependencies to package metadata ## [2.6.3] - 2022-04-05 ### Fixed * Make SQLite bucket thread-safe and multiprocess-safe ## [2.6.2] - 2022-03-30 ### Fixed * Remove development scripts from package published on PyPI ### Added * Add `nox` to run development scripts ## [2.6.1] - 2022-03-30 ### Updated * Replace all formatting/linting tools with *pre-commit* ## [2.6.0] - 2021-12-08 ### Added * Add `SQliteBucket` to persist rate limit data in a SQLite database ## [2.5.0] - 2021-12-08 ### Added * Custom time source ## [2.4.6] - 2021-09-30 * Add `RedisClusterBucket` to support using `PyrateLimiter` with `redis-py-cluster` * Update README, add Table of Content ## [2.3.6] - 2021-09-23 * Run CI tests for all supported python versions * Fix issue with deployments on Travis CI ## [2.3.5] - 2021-09-22 ### Added * Use `time.monotonic()` instead of `time.time()` * Support for floating point rate-limiting delays (more granular than 1 second) ## [2.3.4] - 2021-06-01 ### Fixed * Bucket group initialization ## [2.3.3] - 2021-05-08 ### Added * Support for python 3.6 ## [2.3.2] - 2021-05-06 ### Fixed * Incorrect type hint ## [2.3.1] - 2021-04-26 ### Added * LICENSE file to be included in PyPI package ### Fixed * Incorrect delay time when using using `Limiter.ratelimit()` with `delay=True` ## [2.3.0] - 2021-03-01 ### Added * Support for using `Limiter.ratelimit()` as a contextmanager or async contextmanager * Separate `LimitContextDecorator` class to handle `Limiter.ratelimit()` behavior * Package published on conda-forge ## [2.2.2] - 2021-03-03 ### Changed * Internal: Reduce cognitive complexity ## [2.2.1] - 2021-03-02 ### Fixed * Incorrect check log against time-window ## [2.2.0] - 2021-02-26 ### Added * `Limiter.ratelimit()` method, an async-compatible decorator that optionally adds rate-limiting delays ## [2.1.0] - 2021-02-21 ## [2.0.3] - 2020-06-01 ## [2.0.2] - 2020-06-01 ## [2.0.1] - 2020-06-01 ## [2.0.0] - 2019-12-29 ## [1.1.0] - 2019-12-17 ### Removed - Code duplication ### Added - Thread lock for Bucket's state modification in case of Multi-threading - Html Cover Report ### Fixed - LocalBucket's default init value being mutated - Typos. A lot of friggin' typos. PyrateLimiter-3.9.0/CONTRIBUTING.md000066400000000000000000000035331504242573000165540ustar00rootroot00000000000000# Contributing Guide Here are some basic instructions for local development setup and contributing to the project. ## Setup & Commands To setup local development, *Poetry* and *Python 3.7+* are required. Python can be installed using *Pyenv* or normal installation from binary source. To install *poetry*, follow the official guideline (https://python-poetry.org/docs/#installation). Then, in the repository directory, run the following to install all optional backend dependencies and dev dependencies: ```shell $ poetry install -E all ``` Some shortcuts are included for some common development tasks, using [nox](https://nox.thea.codes): - Run tests with: `nox -e test` - To run tests with coverage: `nox -e cover` - Format & check for lint error: `nox -e lint` - To run linting for every commit, run: `pre-commit install` ## Documentation Documentation is generated using [Sphinx](https://www.sphinx-doc.org) and published on readthedocs.io. To build this documentation locally: ``` poetry install -E docs nox -e docs ``` ## Guideline & Notes We have GitHub Action CICD to do the checking, testing and publishing work. So, there are few small notes when making Pull Request: - All existing tests must pass (Of course!) - Reduction in *Coverage* shall result in failure. (below 98% is not accepted) - When you are making bug fixes, or adding more features, remember to bump the version number in **pyproject.toml**. The number should follow *semantic-versioning* rules ## TODO Planned features: * A rate limit may reset on a fixed schedule, eg: every first-day of a month * Sometimes, we may need to apply specific rate-limiting strategies based on schedules/region or some other metrics. It requires the capability to switch the strategies instantly without re-deploying the whole service. * Reference: https://www.keycdn.com/support/rate-limiting#types-of-rate-limits PyrateLimiter-3.9.0/LICENSE000066400000000000000000000020531504242573000153240ustar00rootroot00000000000000MIT License Copyright (c) 2021 vutran1710 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PyrateLimiter-3.9.0/README.md000066400000000000000000000641071504242573000156060ustar00rootroot00000000000000 # PyrateLimiter The request rate limiter using Leaky-bucket Algorithm. Full project documentation can be found at [pyratelimiter.readthedocs.io](https://pyratelimiter.readthedocs.io). [![PyPI version](https://badge.fury.io/py/pyrate-limiter.svg)](https://badge.fury.io/py/pyrate-limiter) [![PyPI - Python Versions](https://img.shields.io/pypi/pyversions/pyrate-limiter)](https://pypi.org/project/pyrate-limiter) [![codecov](https://codecov.io/gh/vutran1710/PyrateLimiter/branch/master/graph/badge.svg?token=E0Q0YBSINS)](https://codecov.io/gh/vutran1710/PyrateLimiter) [![Maintenance](https://img.shields.io/badge/Maintained%3F-yes-green.svg)](https://github.com/vutran1710/PyrateLimiter/graphs/commit-activity) [![PyPI license](https://img.shields.io/pypi/l/ansicolortags.svg)](https://pypi.python.org/pypi/pyrate-limiter/)
## Contents - [Features](#features) - [Installation](#installation) - [Quickstart](#quickstart) - [limiter_factory](#limiter_factory) - [Examples](#examples) - [Basic usage](#basic-usage) - [Key concepts](#key-concepts) - [Defining rate limits & buckets](#defining-rate-limits-and-buckets) - [Defining clock & routing logic](#defining-clock--routing-logic-with-bucketfactory) - [Wrapping all up with Limiter](#wrapping-all-up-with-limiter) - [asyncio and event loops](#asyncio-and-event-loops) - [Decorators](#as_decorator-use-limiter-as-decorator) - [Limiter API](#limiter-api) - [Weight](#weight) - [Handling exceeded limits](#handling-exceeded-limits) - [Bucket analogy](#bucket-analogy) - [Rate limit exceptions](#rate-limit-exceptions) - [Rate limit delays](#rate-limit-delays) - [Backends](#backends) - [InMemoryBucket](#inmemorybucket) - [MultiprocessBucket](#multiprocessbucket) - [SQLiteBucket](#sqlitebucket) - [RedisBucket](#redisbucket) - [PostgresBucket](#postgresbucket) - [BucketAsyncWrapper](#bucketasyncwrapper) - [Async or Sync?](#async-or-sync) - [Advanced Usage](#advanced-usage) - [Component-level Diagram](#component-level-diagram) - [Time sources](#time-sources) - [Leaking](#leaking) - [Concurrency](#concurrency) - [Custom backend](#custom-backend) ## Features - Supports unlimited rate limits and custom intervals. - Separately tracks limits for different services or resources. - Manages limit breaches by raising exceptions or applying delays. - Offers multiple usage modes: direct calls or decorators. - Fully compatible with both synchronous and asynchronous workflows. - Provides SQLite and Redis backends for persistent limit tracking across threads or restarts. - Includes MultiprocessBucket and SQLite File Lock backends for multiprocessing environments. ## Installation **PyrateLimiter** supports **python ^3.8** Install using pip: ``` pip install pyrate-limiter ``` Or using conda: ``` conda install --channel conda-forge pyrate-limiter ``` ## Quickstart To limit 5 requests within 2 seconds and raise an exception when the limit is exceeded: ```python from pyrate_limiter import Duration, Rate, Limiter, BucketFullException limiter = Limiter(Rate(5, Duration.SECOND * 2)) for i in range(6): try: limiter.try_acquire(i) except BucketFullException as err: print(err, err.meta_info) ``` ## limiter_factory [limiter_factory.py](pyrate_limiter.limiter_factory.py) provides several functions to simplify common cases: - create_sqlite_limiter(rate_per_duration: int, duration: Duration, ...) - create_inmemory_limiter(rate_per_duration: int, duration: Duration, ...) - + more to be added... ## Examples - Rate limiting asyncio tasks: [asyncio_ratelimit.py](examples/asyncio_ratelimit.py) - Rate limiting asyncio tasks w/ a decorator: [asyncio_decorator.py](examples/asyncio_decorator.py) - HTTPX rate limiting - asyncio, single process and multiprocess examples [httpx_ratelimiter.py](examples/httpx_ratelimiter.py) - Multiprocessing using an in-memory rate limiter - [in_memory_multiprocess.py](examples/in_memory_multiprocess.py) - Multiprocessing using SQLite and a file lock - this can be used for distributed processes not created within a multiprocessing [sql_filelock_multiprocess.py](examples/sql_filelock_multiprocess.py) ## Basic Usage ### Key concepts #### Clock - Timestamps incoming items #### Bucket - Stores items with timestamps. - Functions as a FIFO queue. - Can `leak` to remove outdated items. #### BucketFactory - Manages buckets and clocks, routing items to their appropriate buckets. - Schedules periodic `leak` operations to prevent overflow. - Allows custom logic for routing, conditions, and timing. #### Limiter - Provides a simple, intuitive API by abstracting underlying logic. - Seamlessly supports both sync and async contexts. - Offers multiple interaction modes: direct calls, decorators, and (future) context managers. - Ensures thread-safety via RLock, and if needed, asyncio concurrency via asyncio.Lock ### Defining rate limits and buckets For example, an API (like LinkedIn or GitHub) might have these rate limits: ``` - 500 requests per hour - 1000 requests per day - 10000 requests per month ``` You can define these rates using the `Rate` class. `Rate` class has 2 properties only: **limit** and **interval** ```python from pyrate_limiter import Duration, Rate hourly_rate = Rate(500, Duration.HOUR) # 500 requests per hour daily_rate = Rate(1000, Duration.DAY) # 1000 requests per day monthly_rate = Rate(10000, Duration.WEEK * 4) # 10000 requests per month rates = [hourly_rate, daily_rate, monthly_rate] ``` Rates must be properly ordered: - Rates' intervals & limits must be ordered from least to greatest - Rates' ratio of **limit/interval** must be ordered from greatest to least Buckets validate rates during initialization. If using a custom implementation, use the built-in validator: ```python from pyrate_limiter import validate_rate_list assert validate_rate_list(my_rates) ``` Then, add the rates to the bucket of your choices ```python from pyrate_limiter import InMemoryBucket, RedisBucket basic_bucket = InMemoryBucket(rates) # Or, using redis from redis import Redis redis_connection = Redis(host='localhost') redis_bucket = RedisBucket.init(rates, redis_connection, "my-bucket-name") # Async Redis would work too! from redis.asyncio import Redis redis_connection = Redis(host='localhost') redis_bucket = await RedisBucket.init(rates, redis_connection, "my-bucket-name") ``` If you only need a single Bucket for everything, and python's built-in `time()` is enough for you, then pass the bucket to Limiter then ready to roll! ```python from pyrate_limiter import Limiter # Limiter constructor accepts single bucket as the only parameter, # the rest are 3 optional parameters with default values as following # Limiter(bucket, clock=TimeClock(), raise_when_fail=True, max_delay=None) limiter = Limiter(bucket) # Limiter is now ready to work! limiter.try_acquire("hello world") ``` If you want to have finer grain control with routing & clocks etc, then you should use `BucketFactory`. ### Defining Clock & routing logic with BucketFactory When multiple bucket types are needed and items must be routed based on certain conditions, use `BucketFactory`. First, define your clock (time source). Most use cases work with the built-in clocks: ```python from pyrate_limiter.clock import TimeClock, MonotonicClock, SQLiteClock base_clock = TimeClock() ``` PyrateLimiter does not assume routing logic, so you implement a custom BucketFactory. At a minimum, these two methods must be defined: ```python from pyrate_limiter import BucketFactory from pyrate_limiter import AbstractBucket class MyBucketFactory(BucketFactory): # You can use constructor here, # nor it requires to make bucket-factory work! def wrap_item(self, name: str, weight: int = 1) -> RateItem: """Time-stamping item, return a RateItem""" now = clock.now() return RateItem(name, now, weight=weight) def get(self, _item: RateItem) -> AbstractBucket: """For simplicity's sake, all items route to the same, single bucket""" return bucket ``` ### Creating buckets dynamically If more than one bucket is needed, the bucket-routing logic should go to BucketFactory `get(..)` method. When creating buckets dynamically, it is needed to schedule leak for each newly created buckets. To support this, BucketFactory comes with a predefined method call `self.create(..)`. It is meant to create the bucket and schedule that bucket for leaking using the Factory's clock ```python def create( self, clock: AbstractClock, bucket_class: Type[AbstractBucket], *args, **kwargs, ) -> AbstractBucket: """Creating a bucket dynamically""" bucket = bucket_class(*args, **kwargs) self.schedule_leak(bucket, clock) return bucket ``` By utilizing this, we can modify the code as following: ```python class MultiBucketFactory(BucketFactory): def __init__(self, clock): self.clock = clock self.buckets = {} def wrap_item(self, name: str, weight: int = 1) -> RateItem: """Time-stamping item, return a RateItem""" now = clock.now() return RateItem(name, now, weight=weight) def get(self, item: RateItem) -> AbstractBucket: if item.name not in self.buckets: # Use `self.create(..)` method to both initialize new bucket and calling `schedule_leak` on that bucket # We can create different buckets with different types/classes here as well new_bucket = self.create(YourBucketClass, *your-arguments, **your-keyword-arguments) self.buckets.update({item.name: new_bucket}) return self.buckets[item.name] ``` ### Wrapping all up with Limiter Pass your bucket-factory to Limiter, and ready to roll! ```python from pyrate_limiter import Limiter limiter = Limiter( bucket_factory, raise_when_fail=False, # Default = True max_delay=1000, # Default = None ) item = "the-earth" limiter.try_acquire(item) heavy_item = "the-sun" limiter.try_acquire(heavy_item, weight=10000) ``` ### asyncio and event loops To ensure the event loop isn't blocked, use `try_acquire_async` with an **async bucket**, which leverages `asyncio.Lock` for concurrency control. If your bucket isn't async, wrap it with `BucketAsyncWrapper`. This ensures `asyncio.sleep` is used instead of `time.sleep`, preventing event loop blocking: ```python await limiter.try_acquire_async(item) ``` Example: [asyncio_ratelimit.py](examples/asyncio_ratelimit.py) #### `as_decorator()`: use limiter as decorator `Limiter` can be used as a decorator, but you must provide a `mapping` function that maps the wrapped function's arguments to `limiter.try_acquire` arguments (either a `str` or a `(str, int)` tuple). The decorator works with both synchronous and asynchronous functions: ```python decorator = limiter.as_decorator() def mapping(*args, **kwargs): return "demo", 1 @decorator(mapping) def handle_something(*args, **kwargs): """function logic""" @decorator(mapping) async def handle_something_async(*args, **kwargs): """function logic""" ``` Async Example: ```python my_beautiful_decorator = limiter.as_decorator() def mapping(some_number: int): return str(some_number) @my_beautiful_decorator(mapping) def request_function(some_number: int): requests.get('https://example.com') # Async would work too! @my_beautiful_decorator(mapping) async def async_request_function(some_number: int): requests.get('https://example.com') ``` For full example see [asyncio_decorator.py](examples/asyncio_decorator.py) ### Limiter API #### `bucket()`: get list of all active buckets Return list of all active buckets with `limiter.buckets()` #### `dispose(bucket: int | BucketObject)`: dispose/remove/delete the given bucket Method signature: ```python def dispose(self, bucket: Union[int, AbstractBucket]) -> bool: """Dispose/Remove a specific bucket, using bucket-id or bucket object as param """ ``` Example of usage: ```python active_buckets = limiter.buckets() assert len(active_buckets) > 0 bucket_to_remove = active_buckets[0] assert limiter.dispose(bucket_to_remove) ``` If a bucket is found and get deleted, calling this method will return **True**, otherwise **False**. If there is no more buckets in the limiter's bucket-factory, all the leaking tasks will be stopped. ### Weight Item can have weight. By default item's weight = 1, but you can modify the weight before passing to `limiter.try_acquire`. Item with weight W > 1 when consumed will be multiplied to (W) items with the same timestamp and weight = 1. Example with a big item with weight W=5, when put to bucket, it will be divided to 5 items with weight=1 + following names ``` BigItem(weight=5, name="item", timestamp=100) => [ item(weight=1, name="item", timestamp=100), item(weight=1, name="item", timestamp=100), item(weight=1, name="item", timestamp=100), item(weight=1, name="item", timestamp=100), item(weight=1, name="item", timestamp=100), ] ``` Yet, putting this big, heavy item into bucket is expected to be transactional & atomic - meaning either all 5 items will be consumed or none of them will. This is made possible as bucket `put(item)` always check for available space before ingesting. All of the Bucket's implementations provided by **PyrateLimiter** follows this rule. Any additional, custom implementation of Bucket are expected to behave alike - as we have unit tests to cover the case. See [Advanced usage options](#advanced-usage) below for more details. ### Handling exceeded limits When a rate limit is exceeded, you have two options: raise an exception, or add delays. #### Bucket analogy At this point it's useful to introduce the analogy of "buckets" used for rate-limiting. Here is a quick summary: - This library implements the [Leaky Bucket algorithm](https://en.wikipedia.org/wiki/Leaky_bucket). - It is named after the idea of representing some kind of fixed capacity -- like a network or service -- as a bucket. - The bucket "leaks" at a constant rate. For web services, this represents the **ideal or permitted request rate**. - The bucket is "filled" at an intermittent, unpredicatble rate, representing the **actual rate of requests**. - When the bucket is "full", it will overflow, representing **canceled or delayed requests**. - Item can have weight. Consuming a single item with weight W > 1 is the same as consuming W smaller, unit items - each with weight=1, with the same timestamp and maybe same name (depending on however user choose to implement it) #### Rate limit exceptions By default, a `BucketFullException` will be raised when a rate limit is exceeded. The error contains a `meta_info` attribute with the following information: - `name`: The name of item it received - `weight`: The weight of item it received - `rate`: The specific rate that has been exceeded Here's an example that will raise an exception on the 4th request: ```python rate = Rate(3, Duration.SECOND) bucket = InMemoryBucket([rate]) clock = TimeClock() class MyBucketFactory(BucketFactory): def wrap_item(self, name: str, weight: int = 1) -> RateItem: """Time-stamping item, return a RateItem""" now = clock.now() return RateItem(name, now, weight=weight) def get(self, _item: RateItem) -> AbstractBucket: """For simplicity's sake, all items route to the same, single bucket""" return bucket limiter = Limiter(MyBucketFactory()) for _ in range(4): try: limiter.try_acquire('item', weight=2) except BucketFullException as err: print(err) # Output: Bucket with Rate 3/1.0s is already full print(err.meta_info) # Output: {'name': 'item', 'weight': 2, 'rate': '3/1.0s', 'error': 'Bucket with Rate 3/1.0s is already full'} ``` The rate part of the output is constructed as: `limit / interval`. On the above example, the limit is 3 and the interval is 1, hence the `Rate 3/1`. #### Rate limit delays You may want to simply slow down your requests to stay within the rate limits instead of canceling them. In that case you pass the `max_delay` argument the maximum value of delay (typically in _ms_ when use human-clock). ```python limiter = Limiter(factory, max_delay=500) # Allow to delay up to 500ms ``` Limiter has a default buffer_ms of 50ms. This means that when waiting, an additional 50ms will be added per step. As `max_delay` has been passed as a numeric value, when ingesting item, limiter will: - First, try to ingest such item using the routed bucket - If it fails to put item into the bucket, it will call `wait(item)` on the bucket to see how much time remains until the bucket can consume the item again? - Comparing the `wait` value to the `max_delay`. - if `max_delay` >= `wait`: delay (wait + buffer_ms as latency-tolerance) using either `asyncio.sleep` or `time.sleep` until the bucket can consume again - if `max_delay` < `wait`: it raises `LimiterDelayException` if Limiter's `raise_when_fail=True`, otherwise silently fail and return False Example: ```python from pyrate_limiter import LimiterDelayException for _ in range(4): try: limiter.try_acquire('item', weight=2, max_delay=200) except LimiterDelayException as err: print(err) # Output: # Actual delay exceeded allowance: actual=500, allowed=200 # Bucket for 'item' with Rate 3/1.0s is already full print(err.meta_info) # Output: {'name': 'item', 'weight': 2, 'rate': '3/1.0s', 'max_delay': 200, 'actual_delay': 500} ``` ### Backends A few different bucket backends are available: - **InMemoryBucket**: using python built-in list as bucket - **MultiprocessBucket**: uses a multiprocessing lock for distributed concurrency with a ListProxy as the bucket - **RedisBucket**, using err... redis, with both async/sync support - **PostgresBucket**, using `psycopg2` - **SQLiteBucket**, using sqlite3 - **BucketAsyncWrapper**: wraps an existing bucket with async interfaces, to avoid blocking the event loop #### InMemoryBucket The default bucket is stored in memory, using python `list` ```python from pyrate_limiter import InMemoryBucket, Rate, Duration rates = [Rate(5, Duration.MINUTE * 2)] bucket = InMemoryBucket(rates) ``` This bucket only availabe in `sync` mode. The only constructor argument is `List[Rate]`. #### MultiprocessBucket MultiprocessBucket uses a ListProxy to store items within a python multiprocessing pool or ProcessPoolExecutor. Concurrency is enforced via a multiprocessing Lock. The bucket is shared across instances. An example is provided in [in_memory_multiprocess](examples/in_memory_multiprocess.py) Whenever multiprocessing, bucket.waiting calculations will be often wrong because of the concurrency involved. Set Limiter.retry_until_max_delay=True so that the item keeps retrying rather than returning False when contention causes an extra delay. #### RedisBucket RedisBucket uses `Sorted-Set` to store items with key being item's name and score item's timestamp Because it is intended to work with both async & sync, we provide a classmethod `init` for it ```python from pyrate_limiter import RedisBucket, Rate, Duration # Using synchronous redis from redis import ConnectionPool from redis import Redis rates = [Rate(5, Duration.MINUTE * 2)] pool = ConnectionPool.from_url("redis://localhost:6379") redis_db = Redis(connection_pool=pool) bucket_key = "bucket-key" bucket = RedisBucket.init(rates, redis_db, bucket_key) # Using asynchronous redis from redis.asyncio import ConnectionPool as AsyncConnectionPool from redis.asyncio import Redis as AsyncRedis pool = AsyncConnectionPool.from_url("redis://localhost:6379") redis_db = AsyncRedis(connection_pool=pool) bucket_key = "bucket-key" bucket = await RedisBucket.init(rates, redis_db, bucket_key) ``` The API are the same, regardless of sync/async. If AsyncRedis is being used, calling `await bucket.method_name(args)` would just work! #### SQLiteBucket If you need to persist the bucket state, a SQLite backend is available. The SQLite bucket works in sync manner. Manully create a connection to Sqlite and pass it along with the table name to the bucket class: ```python from pyrate_limiter import SQLiteBucket, Rate, Duration import sqlite3 rates = [Rate(5, Duration.MINUTE * 2)] bucket = SQLiteBucket.init_from_file(rates) ``` ```py from pyrate_limiter import Rate, Limiter, Duration, SQLiteBucket requests_per_minute = 5 rate = Rate(requests_per_minute, Duration.MINUTE) bucket = SQLiteBucket.init_from_file([rate], use_file_lock=False) # set use_file_lock to True if using across multiple processes limiter = Limiter(bucket, raise_when_fail=False, max_delay=max_delay) ``` You can also pass custom arguments to the `init_from_file` following its signature: ```python class SQLiteBucket(AbstractBucket): @classmethod def init_from_file( cls, rates: List[Rate], table: str = "rate_bucket", db_path: Optional[str] = None, create_new_table = True, use_file_lock: bool = False ) -> "SQLiteBucket": ... ``` Options: - `db_path`: If not provided, uses `tempdir / "pyrate-limiter.sqlite"` - `use_file_lock`: Should be False for single process workloads. For multi process, uses a [filelock](https://pypi.org/project/filelock/) to ensure single access to the SQLite bucket across multiple processes, allowing multi process rate limiting on a single host. Example: [limiter_factory.py::create_sqlite_limiter()](pyrate_limiter/limiter_factory.py) #### PostgresBucket Postgres is supported, but you have to install `psycopg[pool]` either as an extra or as a separate package. The PostgresBucket currently does not support async. You can use Postgres's built-in **CURRENT_TIMESTAMP** as the time source with `PostgresClock`, or use an external custom time source. ```python from pyrate_limiter import PostgresBucket, Rate, PostgresClock from psycopg_pool import ConnectionPool connection_pool = ConnectionPool('postgresql://postgres:postgres@localhost:5432') clock = PostgresClock(connection_pool) rates = [Rate(3, 1000), Rate(4, 1500)] bucket = PostgresBucket(connection_pool, "my-bucket-table", rates) ``` #### BucketAsyncWrapper The BucketAsyncWrapper wraps a sync bucket to ensure all its methods return awaitables. This allows the Limiter to detect asynchronous behavior and use asyncio.sleep() instead of time.sleep() during delay handling, preventing blocking of the asyncio event loop. Example: [limiter_factory.py::create_inmemory_limiter()](pyrate_limiter/limiter_factory.py) ### Async or Sync or Multiprocessing The Limiter is basically made of a Clock backend and a Bucket backend. The backends may be async or sync, which determines the Limiters internal behavior, regardless of whether the caller enters via a sync or async function. try_acquire_async: When calling from an async context, use try_acquire_async. This uses a thread-local asyncio lock to ensure only one asyncio task is acquiring, followed by a global RLock so that only one thread is acquiring. try_acquire: When called directly, the global RLock enforces only one thread at a time. Multiprocessing: If using MultiprocessBucket, two locks are used in Limiter: a top level multiprocessing lock, then a thread level RLock ## Advanced Usage ### Component level diagram ![](https://raw.githubusercontent.com/vutran1710/PyrateLimiter/master/docs/_static/components.jpg) ### Time sources Time source can be anything from anywhere: be it python's built-in time, or monotonic clock, sqliteclock, or crawling from world time server(well we don't have that, but you can!). ```python from pyrate_limiter import TimeClock # use python' time.time() from pyrate_limiter import MonotonicClock # use python time.monotonic() ``` Clock's abstract interface only requires implementing a method `now() -> int`. And it can be both sync or async. ### Leaking Typically bucket should not hold items forever. Bucket's abstract interface requires its implementation must be provided with `leak(current_timestamp: Optional[int] = None)`. The `leak` method when called is expected to remove any items considered outdated at that moment. During Limiter lifetime, all the buckets' `leak` should be called periodically. **BucketFactory** provide a method called `schedule_leak` to help deal with this matter. Basically, it will run as a background task for all the buckets currently in use, with interval between `leak` call by **default is 10 seconds**. ```python # Runnning a background task (whether it is sync/async - doesnt matter) # calling the bucket's leak factory.schedule_leak(bucket, clock) ``` You can change this calling interval by overriding BucketFactory's `leak_interval` property. This interval is in **miliseconds**. ```python class MyBucketFactory(BucketFactory): def __init__(self, *args): self.leak_interval = 300 ``` When dealing with leak using BucketFactory, the author's suggestion is, we can be pythonic about this by implementing a constructor ```python class MyBucketFactory(BucketFactory): def constructor(self, clock, buckets): self.clock = clock self.buckets = buckets for bucket in buckets: self.schedule_leak(bucket, clock) ``` ### Concurrency Generally, Lock is provided at Limiter's level, except SQLiteBucket case. ### Custom backends If these don't suit your needs, you can also create your own bucket backend by implementing `pyrate_limiter.AbstractBucket` class. One of **PyrateLimiter** design goals is powerful extensibility and maximum ease of development. It must be not only be a ready-to-use tool, but also a guide-line, or a framework that help implementing new features/bucket free of the most hassles. Due to the composition nature of the library, it is possbile to write minimum code and validate the result: - Fork the repo - Implement your bucket with `pyrate_limiter.AbstractBucket` - Add your own `create_bucket` method in `tests/conftest.py` and pass it to the `create_bucket` fixture - Run the test suite to validate the result If the tests pass through, then you are just good to go with your new, fancy bucket! PyrateLimiter-3.9.0/benchmarks/000077500000000000000000000000001504242573000164345ustar00rootroot00000000000000PyrateLimiter-3.9.0/benchmarks/stress_limiters.py000066400000000000000000000174241504242573000222510ustar00rootroot00000000000000import logging from concurrent.futures import ProcessPoolExecutor from concurrent.futures import ThreadPoolExecutor from concurrent.futures import wait from dataclasses import dataclass from functools import partial from time import perf_counter from typing import Callable from typing import cast from typing import Literal from pyrate_limiter import Duration from pyrate_limiter import Limiter from pyrate_limiter import limiter_factory from pyrate_limiter import MonotonicClock from pyrate_limiter import MultiprocessBucket from pyrate_limiter import Rate logger = logging.getLogger(__name__) BUFFER_MS: int = 1 # reduce the buffer to improve measurement TEST_DURATION_SEC: int = 1 # time per test PREFILL: bool = True @dataclass class TestResult: label: str requests_per_second: int test_duration_seconds: int duration: float num_requests: int percent_from_expected_duration: float def create_mp_limiter(max_delay: int, bucket: MultiprocessBucket): limiter = Limiter(bucket, raise_when_fail=False, clock=MonotonicClock(), retry_until_max_delay=True, max_delay=max_delay, buffer_ms=BUFFER_MS) return limiter def create_rate_limiter_factory( requests_per_second: int, max_delay_seconds: int, backend: Literal["default", "sqlite", "sqlite_filelock", "mp_limiter"], ) -> Callable[[], Limiter]: """Returns a callable, so it can be used with multiprocessing""" max_delay = max_delay_seconds * 1000 # should never wait for more than 60 seconds rate = Rate(requests_per_second, Duration.SECOND) if backend == "default": limiter = limiter_factory.create_inmemory_limiter(rate_per_duration=requests_per_second, duration=Duration.SECOND, max_delay=max_delay, buffer_ms=BUFFER_MS) return lambda: limiter elif backend == "sqlite": limiter = limiter_factory.create_sqlite_limiter(rate_per_duration=requests_per_second, use_file_lock=False, max_delay=max_delay, buffer_ms=BUFFER_MS, db_path="pyrate_limiter.sqlite") return lambda: limiter elif backend == "sqlite_filelock": return partial( limiter_factory.create_sqlite_limiter, rate_per_duration=requests_per_second, duration=Duration.SECOND, use_file_lock=True, max_delay=max_delay, buffer_ms=BUFFER_MS, db_path="pyrate_limiter.sqlite" ) elif backend == "mp_limiter": bucket = MultiprocessBucket.init([rate]) return partial( create_mp_limiter, max_delay=max_delay, bucket=bucket ) else: raise ValueError(f"Unexpected backend option: {backend}") def task(): assert limiter_factory.LIMITER is not None, "Limiter not initialized" try: while not limiter_factory.LIMITER.try_acquire("task"): # Keep trying pass except Exception as e: logger.exception(e) def limiter_init(limiter_creator: Callable[[], Limiter]): limiter_factory.LIMITER = limiter_creator() def test_rate_limiter( limiter_creator: Callable[[], Limiter], num_requests: int, use_process_pool: bool, ): start = perf_counter() if use_process_pool: logger.info("Using ProcessPoolExecutor") with ProcessPoolExecutor( initializer=partial(limiter_init, limiter_creator) if limiter_creator is not None else None ) as executor: if PREFILL: # Pre-load the buckets, after processes created limiter = limiter_creator() [limiter.try_acquire("task") for i in range(requests_per_second)] futures = [executor.submit(task) for _ in range(num_requests)] wait(futures) else: with ThreadPoolExecutor() as executor: if PREFILL: # Pre-load the buckets, after threads created limiter = limiter_creator() [limiter.try_acquire("task") for i in range(requests_per_second)] limiter = limiter_creator() limiter_factory.LIMITER = limiter futures = [executor.submit(task) for _ in range(num_requests)] wait(futures) for f in futures: try: f.result() except Exception as e: logger.exception(f"Task raised: {e}") end = perf_counter() return end - start def run_test_limiter( limiter_creator: Callable, label: str, requests_per_second: int, test_duration_seconds: int, use_process_pool: bool = False, ): num_requests = ( test_duration_seconds * requests_per_second ) # should finish in around 20 seconds duration = test_rate_limiter( limiter_creator=limiter_creator, num_requests=num_requests, use_process_pool=use_process_pool ) percent_from_expected_duration = ( abs(duration) - test_duration_seconds ) / test_duration_seconds return TestResult( label=label, requests_per_second=requests_per_second, test_duration_seconds=test_duration_seconds, duration=duration, num_requests=num_requests, percent_from_expected_duration=percent_from_expected_duration, ) if __name__ == "__main__": import pandas as pd import plotly.express as px requests_per_second_list = [10, 100, 1000, 2500, 5000] test_duration_seconds = TEST_DURATION_SEC test_results = [] logging.basicConfig( format="%(asctime)s %(name)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S", ) for backend in ["default", "sqlite", "mp_limiter"]: backend = cast(Literal["default", "sqlite", "sqlite_filelock", "mp_limiter"], backend) for requests_per_second in requests_per_second_list: logger.info(f"Testing with {backend=}, {requests_per_second=}") limiter_creator = create_rate_limiter_factory( requests_per_second, max_delay_seconds=60, backend=backend ) result = run_test_limiter( limiter_creator=limiter_creator, label="Threads: " + backend, requests_per_second=requests_per_second, test_duration_seconds=test_duration_seconds, ) test_results.append(result) logger.info("Testing Multiprocessing") for backend in ["sqlite_filelock", "mp_limiter"]: backend = cast(Literal["default", "sqlite", "sqlite_filelock", "mp_limiter"], backend) for requests_per_second in requests_per_second_list: logger.info(f"Testing with {backend=}, {requests_per_second=}") limiter_creator = create_rate_limiter_factory( requests_per_second, max_delay_seconds=60, backend=backend ) result = run_test_limiter( limiter_creator=limiter_creator, label="Processes: " + backend, requests_per_second=requests_per_second, test_duration_seconds=test_duration_seconds, use_process_pool=True, ) test_results.append(result) results_df = pd.DataFrame(test_results).sort_values(by="requests_per_second") results_df["requests_per_second"] = results_df["requests_per_second"].astype(str) fig = px.line( results_df, x="requests_per_second", y="duration", color="label", markers=True ) fig.write_html("chart.html") logger.info("Output written to chart.html") PyrateLimiter-3.9.0/docker-compose.yaml000066400000000000000000000013701504242573000201160ustar00rootroot00000000000000services: redis-master: image: bitnami/redis:latest ports: - "6379:6379" environment: - ALLOW_EMPTY_PASSWORD=yes - REDIS_REPLICATION_MODE=master - REDIS_REPLICA_PASSWORD="" networks: - pyrate-bay redis-slave: image: bitnami/redis:latest ports: - "6380:6379" environment: - ALLOW_EMPTY_PASSWORD=yes - REDIS_MASTER_HOST=redis-master - REDIS_REPLICATION_MODE=slave - REDIS_MASTER_PASSWORD="" networks: - pyrate-bay postgres: image: bitnami/postgresql ports: - "5432:5432" environment: - POSTGRESQL_PASSWORD=postgres - POSTGRESQL_MAX_CONNECTIONS=1000 networks: - pyrate-bay networks: pyrate-bay: driver: bridge PyrateLimiter-3.9.0/docs/000077500000000000000000000000001504242573000152475ustar00rootroot00000000000000PyrateLimiter-3.9.0/docs/_static/000077500000000000000000000000001504242573000166755ustar00rootroot00000000000000PyrateLimiter-3.9.0/docs/_static/components.jpg000066400000000000000000007757231504242573000216110ustar00rootroot00000000000000JFIFICC_PROFILE0mntrRGB XYZ acsp- desc$rXYZgXYZ(bXYZ<wtptPrTRCd(gTRCd(bTRCd(cprt4z5VV5}1[,2f%G&SRC%JLb&=Np;!l1S7-?e~;EXы)SE>F/览o[?L4} g@>;EX!l1S7-?ecS4bxETGOi_Oы)SE>F/览o[?L4} g@>;EX!l1S7-?ecS4bxETGOi_Oы)SE>F/览o[?L4} g@>;EX!l1S7-?ecS4bxETGOi_Oы)SE>F/览o[?L4} g@>;EX!l1S7-?ecS4bxETGOi_Oы)SE>F/览o[?L4} g@>;EX!l1S7-?ecS4bxETGOi_Oы)SE>F/览o[?L4} g@>;EX!l1S7-?ecS4bxETGOi_Oы)SE>F/览o[?L4} g@>;EX!l1S7-?ecS4bxETGOi_Oы)SE>F/览o[?L4} g@>;EX!l1fBmWj KV4W#Z{7*?q h.ߢƓ1&Dդy+kE|IiF5+DD6?'=@>y/)Q"kćK)/#?ܙ$.$>YOoĩ!|/}VO@?ٸɘ̣ٻQV{`K:H?aO{IĀ}_;SE׉S_G?*3H\H}2=SɟC_>?`Ğ{7*?q1Gw£7Xʭ(t7o8@~x%4E`rvgsN{3~XN5Z™.׊uD$oD{MQS9z=ɵz11|H @c2fGn2f3(nxTf+U΀?'oq dy@5C%JLba,xhTrg>F+q'~jޣiM Or,*U:Fؕ^gw{K`R)}BY-bs=TլLDtO%Y>.%~nePMMщ<=M2TL5cZE:+rϓQH?~2${QV{V:̎ڍkXdWd۸gr|vS{3NNoFφ^tK`+uW:^o o|zՊe^OrVOXs<~\-W8V]dbi쨨Vy4 +{mSȭX<7E_j l̋^35DkDiÎrɑSvnwDΜۤEӻ~WM:h:7x oRYYU/iSW#SdtoZjn"* {\阢tלBTj+UWů rurɺ."OMM.lMQ\nTvEk$Dm[;IU4[HyGw+ɬ9&.pmuY)ߒV*{S2Ղ}'̰౷NEUS&`?ٸɘ̣ٻQV{`K:H?aO{IĀ}_;SE׉S_G?*3H\H}2=SɟC_>?`ĞyO-?|S O WT ӟ'/Vec>N_{G"xP]?j5jեYW--e?q'+_{U:*/ u9ћxV:lK}QϚ^T/V\QrKս֚V4+WStRg-o@}5YvITqc^]0!E|z&sQaYOSiY\%lIF97]n96!߁eN%ͻ.Fp>L(OY1|Yri殤4SӤڱs9|Rqs^k4CiZH2&'kYw,*&dkTk(cfT%]\yX]"9Sf9sLuEbb"q:q:˒~zTKUgW9\"*H؉<}NĪ!K +[+y\m>LK=ujȜT2mѽLjo~U.ȱ)}c9 v)*ިrɺt^ފPsl>8 ;p#Eׅ9kr>UeU V偓ȝR*#M^Mux+^1-eG+}lʝs"QʈTD]۾0%<SMv6M+h.^ډI:ȉU\FU9SM6>LyvM5uh;f}E4ܥ{Ҫr,_X[ \܎/dP/#X'u2t%{h*hڇ'^Vڽ6mR:udwSK+'ΎFਨo2% UQnSO 4j=(/HJjmo{rt#foU"0!Z՝im"}ON׀^Yf<"b9LDTNsUWWUj3,q/g LNiTUl13ힻ/UUSp9=~ְ\mul9Tljv_5횝ig6_>h{mWvϚY`{mWvϚ=kU|X=kUw~GlVw~Gl6_>ig6_>h{mWvϚY`{mWvϚ=kU|X=kUw~GlVw~Gl6_>ig6_>h{mWvϚY?U8u gѷy۬mGI?*3mBoEfC?؇0}X~= Z .H<q IP>N_{G"N|EY\~!i/AOs<7Od7#M֟yFnfeT`qP>O|Tkg-o@}KXxQwMxwYS;d<;,۩mwW2*duQ8#&Or]%dOZv3b,OTixfX壝ߍjeUZ'­d[~Uȉ} Bg9cxUSնVʛs?bUW:s3Qi1[\ӞGM{ieWiQ:b1SOw[SѨ,WW۴}Sj;?Q(>-+44z2xU^t ?jk $,9+aV6NyU_Tɞ9}:x&c8ODڟUT{Քw+{m=wsiWڽN?k?o|/~/ _T/g{.Oa޽}'[h%xl*+Ҩ/̼#*ÝSo._S2yMȸYqLݰ/t.pyj>I8<`xIĤ|ߟby_Jj@OrZ+. ߻xa{N!=G+UswU_jfнCW~-uDD哧g1\sU1戝= Ϭ]*.t6Yu-;=#N ? |f]䖜^֍ZtY܇њ~Wu_ڸܸeL/$EO@߿8\󦉟E:ťZ]hAT,4D}9_sdM +bnzSUUN3f^d fQLe e`*Aoi2g؏}qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4{׶;g,{׶;gqK>qGm{j#|π+m{j#|^گ4 ^گ4UWff(p_AB$NY襸*f<34߭8 ~yOG@M'W -Sp~I@#!~y7"3RSɟCm7}/5*?ܙ8υI)B?wA>ﻏ@LJJ5tTE 6;膳@s(j2*']5FR8;tAtC?zs5wf:y]{kTu9ћx{b [֟yFnA3ZEVn󫖚WZB<}P~ Y'*bwWD96!߁eN}n ka_ԩמ8'2)c<{pOebSY s3Qi1ӣQ|zI/U+*v@>:sDDq|}t?T\ ؤ7ӓT35iA0\$+S!^f7q4b"+j33Ӯ'Mz]i/Z{ynTLV?TQܱ^!tU֊ɨFj-ӧvm_yA.n鵿OVN܂>iV15עuTx5l9&)D-uӻX ʡf;G_?5uL6y,]%]Ήha.qIu8Ɠ3<ԉs𮯡RKpVAIKU<q;vȉɚ6x`Tk\*k *Iړ6mޯjckd_XoD OQM?Tuvk ϰDꊊ3Tѿ$kwswju3Ok/?JB>˨s++s3ML~O~^k_ jt ߇?Mse!3(nxTf&c2fGn2?[Q"hg؏h#žo)d[sdž}ƛ|N}ϸ~-E=_;P=7d-_4EM'W ˀ phJ%O& _MԨTrg>F+q'88~~V5Dmڵݬ|r|{M NWv::DLr=Bw/3c [/p%|MgMG [] {m/?ÿu7Z)o5XLKY{U[h?.ͷ6nކd:f.o\4"9qDߗM4]Ef}wz?4wmZm{sW_֊I#Xئ'h߉܆~`-}^w?~>^K [|DbU:D<&&=J?[Ӵm4^=Cov_[m܁Ͷ6mSc0LN&>m1Z5Ji{{eO?y9칽T}l80W13O <(?^KU [-tvԗJJxnnVw}yQti5_ƃU뾛{7wv];9mO~_g N/nQ33b#_*Q S꼊oG_v=Ȼ '6Ŷ;@5eͱ5bo\MgHQpCۨ.U֊ *ȟE<7&cQQJgM|jr(r˄ S j{Z#.21ȫfy <9af곧Vϟbl~bԹ"0&E P56TQ_YU C~SLOOa򣥊 (7lLw^VɿCLβj-s^(1[Yf)޻9G*DǵFWKNO'΄Wm{[Vos>mTZ&n9U؜l߾5z,MiaLzvϞju}$VD_Y&K G]S㢞i鯠Ѫ鮗E7lmvΩ{;WfsZH=UUʌj{6DDDCj^srnުjx3=^x\.}WUKS rs]˿&o!'iơc>W]CmȻ˹w\pJm]6+kLƓru䦀*̣ٻQ?ٸUoGb<[Ѣg؏}oϳo֜ZB9c>MӀy@T?߁|7d-_4 .;7}/5*?ܙ6!~y7"3RSɟC_>?`Ğ{7*?q1Gw£7Xʭ(43G}Sz4] x7ޏ^9c>MӋHU'>xgiZpQ"/( 2O[" eG|BoEfC?؆/o&_FjT?*3q #_8c2fGn2f3(nxTf+U&}oJoFb<[KU'>xgiZqi ُ 7N߀_;SESp~IKdT?߁|,_MԨTrg5s^AjYYQ i\JTDND{ CJȳ 4`-M{^LFcU [;ʿ'_>3̫u*_ϦWV [;ʿ'_>3̫u~>մV$2Ls*p_Ϧ5m վ3̫u;ʿ'_>![H5oLs*p$2{EicV [;ʿ'_>3̫u~>մV$2Ls*p_Ϧ5m վ3̫u;ʿ'_>![H5oLs*p$2{EicV [;ʿ'_>3̫u~>մV$2Ls*p_Ϧ5m վ3̫u;ʿ'_>![H5oLs*p$2{EicV [;ʿ'_>3̫u~>մV$2Ls*p_Ϧ5m վ3̫u;ʿ'_>![H5oLs*p$2{EicV [;ʿ'_>3̫u~>մV$2Ls*p_Ϧ5m վ3̫u;ʿ'_>![H5oLs*p$2{EicV [;ʿ'_>3̫u~>մV$2Ls*p_Ϧ5m վ3̫u;ʿ'_>![H5oLs*p$2{EicV [;ʿ'_>3̫u~>մV$2Ls*p_Ϧ5m վ3̫u;ʿ'_>![H5oLs*p$2{EicV [;ʿ'_>3̫u~>մV$2Ls*p_Ϧ5m&3(nxTfIe_>[=Z- UO$(UFwda6h-(4ΞTɡb<[ѧhf,vu[;u]<Ne("|H㍪˳Zꪪkv떢ܲzxюm.:Im}ӈױU 9N/EGI\p\Cqq[I\p\C̕/9vIMvOٳ螉Y7߯vہ$Xꎙ.1prZKwUQ7T^Q:MUMY jh"$oG5v]eN~[sdž}ƛ|N}ϸ~-E=_;P=7d-_4EM'W ˀqqdE_sjb}myo3۫\Y1DF^ՎUQNs1QM\v;0e~24QTG*;F薣kތi"#uR69jSOTUF#hYߗoIM^m[{+>SʏVXhIN棶'T/^UF{i^Y[|SZX\4sәUSwruU]b.xrӏ7ҫ4tme5]wNrwmdUC^,8XFj}>Yz5i ]FI=Rv{b̬HVԁ6*nt]LtٷwS+eԌviڭG.:*;u,t6Hiio,p1Ob5:"|C1 E?Jff*uOs(^6FfZujѻ;b7GnoʽԌ޾I"57(gz//TEXtjoTO :* K{\#̊rR'tέh=tu%eW'3v^y{V*lݗoJU<||Tٮ#~yuK9/NUzx"8_ڭs\{So?u۲󤂭ʼ'*/[ɿq$Yylw;F˅3Sw/*&Q^DoTr+6%OXYoT7tjO嚙;vr#{CU_k_>mxOewzc|:NtT*=TjYVU/w]U^vKO&FG1]TTTBbz*f<34߭8[sdž}ƛo/)Q"3-Yҽ:L1Zxo7j']ljznrf"]#Ǧsujn6JƱ\)͸i(}3zUO4?;9G ? Z?duEi2_bL]s Fӿv7nYNUSƇ'g?܏4?;9G )LpP:mA}S8~u+_ޣr?x,1Ç@]LpP:mA*zxc{YjsF6h'f)ޮ4byk<}i]Y[O$ÿO1C|ʩi,9+k`_6݈d0*2EwvܽP%f;:8}ɮގȎQ۵Ewh }#g;[c89V&Ɋb'37g]5Qc=f?EtMcI=KUM[MeDS=ݮky#UWJgUjzzbz6lXt|*f<34߭8[sdž}ƛo/)Q"O-`'CRs;r6':7=ߺު/F{A ݤ:ul`HgK:1Xw3܋6|3qM]&%=\TΗf4F+y6]Wb;EOWL{er7+4]vX8i*Tj< &O/WJ]g*<#syQ<6WTmfC\_S{5[U(^eՍ\Ǿ玾ZF*$JEFU˴ͦeyQޅ<\QEBT6@ZOE,v%m}k#NuTo${ukT^r5UpXD4i(t=MWP?fYʲȨU\]7vqow՘WS1ko}_wTN\鹮vtb(jc+ڹ_ɦg̶`>;_1p݂DzteHUޒlr?v-5m3w>ם:FWdُnwߢFd5Α|-*dWh&F؟WmS<.MՈF*nq9ӭW]jr2:%ZjQU)FJ\8wWG\LLzji&xs7ʁhFto-~Ќ5YUSXfC,:fm[&ju68_LR:Ri7?#D]CuoQd"0VcGo=Ⓢ:Zbdb#Ƶj헵oIUv]+vifyK5KC"|*Us'*2f/-D덏z~sĵHZ4ꡥ |#5k\ԗЍUWfXy-L'鰤;)վ*m(.IiW3w3'|}ڻU7r:WYLb|MʢDγ^p9x*nMms=G$sFɡG5]Ƚʋ⇣EݦNj($#{"Zt٩2&EjrƎE.oGegxm5jed߭x|lfaQ:^GNkۯ xk-]gvxU)DY@aKhHڊ ldS='sصy=-vۣ\d0x}رkyE13>yhj!'qAc&>S-9jS]z̬MX_9ӈ~-:,X*ZUTRȈuTߛ7s*tG;~G×9 W2MsnYDvTG;dD/]p،MuQM113L1N}}bmMb'Y腤h1JZB4}蘿?%-!cBoϳo֜ZB9c>MӀy@Yb'48ڮ{ܻ5N܀{" lKtE].߳ƪ[q;$kYuXhF=eB8-}ɮ:WM 6j8Mj=dk~nTIDw6NY Fjjvde5HܲkMʶw ""ΪQw^~k_[dGnsr?__&Ih^1]mvI[]~s~sQ)Q_ꋚ?,&x.alɯ:KK*&\֢r1!3nצ?6iϧTwgf(zSSIIpAQFkƦ֢tDDDDD?@|μefu3Si}\Ϩ#Z#jucyꪊJ`cq8 nTULOjJUza\ZEպEiLc;{n蜔 wDG򱨞to-,4!:2}JO/:.˲Qu7CE՝mlP1G] <$fLT]nUDks;TY):4zZFYE|mpR‚cɫ\iyܳ&4ܬ,G/,w&ʭnEvG*r,W6Ofȱۥ-rTOvNwܳ4gjqx*'={%nUnKuDxHYSo{)/څ<[mEv29ڙb˾r"f/V'\QE +<ӯbx3}&<"%ݏWOyIc]Y4s?6.ݎJ.D ;[r;tI(UڮEDN̉bSNԌ^D+Y7lT۞)ݷU7]ӡuY/='2IX7Dk*U96Ncwy/ X&.yfv'*dV5wvߪd Wv "y})Ӌ7 :ڝ u-MB:\#sGR" ZUU۳2O?upɯnIrF"$rJ\$mDjQRl]4QkLygS-ݜ\ץ}EV׵UZcQUW/MvBQNW]31>yk*UѧY)[GsgHG+5]щ}CGM\1>2 >yxck6WwjuN`ս3Z}%s.6jG͎HU9TCy1$規qU($J]SצTd+wtn^zU`-MEI#i妓7ک$nk{z4KSOj|Fښjd*#މ&zŕəTʛGMM|=q=N]UThxLѴG0mqLW\35T|s9bgz zOܮ}>ܠi䬻T}mjJ#w]U~ewE0 pQE]QIAUyzGNG3sUSg/E4zU[7U2Yr. c,Il{;m]  O'9]s\+H*onAÍeUj%YB]CJ{B5#cEU_EW񐘁>ƒ^q{.=\j;}\9),lG*M?F*ntk7q11 kMZ0k>'t󌁋-tI$+Ucٮ_AʨM 4cTu95CA&hҦU+.V6^Usymzj9%gG{ -W4鮓I[UrcQnƹcEkX Eu%UldoY)#UXQG"**omSZv_$PE͎U~}mU۱p`zj0 m%El,z#Z1?uESQ}CimUՖl@_G|@F)$ͭӦ= *WFG|2J[}tp~dUurlU^DM?j/BK>)]46nz9vbrv鿰|A99۫9Srˤ5O{헪s5zQ~E>ϪOEjޞg>9blOrj:s?i d]k~^7u^_EFyRttUeͳبQۢȎDw*=}qqxSWрs+L8o/hVJ>TjlJע)f6ZYr9* bUZ1v|HZ4GѮjDAʧ۫ѣI1ik#cUsdj'z!c_S؝kw&r"켱sQzlO&nj6dQj]zNol;}ӯ.ty(֛(<% jz>g+ynTd;+&jaOӾRn_X(E4-UVZ-=|ݛ;!,ʝuT%HVjKSz5뷚ӷw""5ʩ/r9:tq>b~OV/u vk.ʈvmsbUߓ}z$ߌVOndʭiY\mDۙU~UU+GcrɰqGMQ;&{_kRyJxc+_YfalQ+_.G7ezl6Towze­6o_U,OGIKW72TG+xc>[E[sjgzƏTrfa3lpcXV9nICNc]rSr˺UU!/(<iҖ>P/z.yeӥ45rA޷.qb2^Mz&/KHU&䥤>D9c>MӋHU'>xgiZpQ"/( S~UƆc[e+ C[2 *JW*zs˾{Eȵk3G2(֠^ݺmŹ}Elv'6gm5$w+zʪUUU{[n'gU\q̧OGTqe{V:kmv[OIINlqF舟Ȁ|]u\ku)F U~5Tw*9Hj*tgVboMx*nKES=ѹ:9ފ)j18wS55xi4.)lIe]U{G->sL6uUƽq=<%n5slVT|5B3G3|mk`ti2ڭ:rDŽ_4OgV0 /Grsp*Lɶ<2 J˝X^^./vٍ&iKW#V vWt{\9~U:cx=0+%nCkJΐ6f#؜.j+SU[mְrx݇bi@jJJf##6FޫU2@ 6a9]*KŞj&ʊuFiɖ4gez[N΂2DFs*w56joO>eֆdVRAxbSv̚7bwW9U)/tLĥcqۖv/iop3Wl]ڪݮNT*# zǮT{c1Ja9]梪UDE7ܟ)i Ki7f71xʪT]יݕd8-ԺtrH@iw;y9{TUDD]3I1Ydkƫ.NUCg̊Qg-ξ&QKG_Cfcjrl:Z SlS]l55;kg:hCrD} ,k+fshc[-rf 1 H=g3U]s4월F5[XY P㸤c==NUkڊr9Q^۳^eK]rz{3u=,tny5M+wED^c|7I.yE"Y1O}gZlwvT\r+79lP"c^kdb[::Zg|':#r!?{וvĞJlɣGEt6(_Je\5]##^"o"]ì-1t1.5D5R1wz"& ץ5׬K.uYIJW;UDwwSe ``tSYW#Y6lȭG"#r qRr~Wɭ`3p6W-L'鰅| 7i˼.*lN>cH@@Ny{s/)o+>KɣD)i b\\U'>xgiZqi ُ 7N߀_;SE 7ZH e* R"s;QllI9 nj ?FZgH/csw[Ѣ[vm_ڧz+d6k|Om3ߧ(#W-[w3«[ks&CUmΦ_ƎTۻ_EBd2/渻L]s9GbYnmSSK(L,O*-ȗu%禝"5WnqmXҘupgh@Fj5a,\ 7jFI*fr"{֢oE+ zsYavgڣVWuJ(3tEFVF?]nGNk[Wb2wTSU{+Nh-v'EukKs[j:XްOrh䤻S>(tHNT4^G'ɲUCnazU3I4NeޞS vi c{3eu-Rrz<U}ROh/86Ng5t8c9Uފev*zR\#e6*Mj*8U10bl͋@$ 릭Y43IMR+] ev̂#U| nnݮ(/iEcM:]Hzw2)#>D {'"s}&3\qε:;!,nTE`.4}kl["98DG+UQvTsS~Zor[/ڡ]pDGGzi\L%IT7hQݓrIcz訫>-pMn_5o. #XҥUEU#r$|U6$"&ՂՔ^i#3Gq]psB{?29@h_#@,xE3Md!~i<"~?j5L۬zW錗liRٝyَTEޝSblTؾ7jƨOi%Fȣk'D膩U}k~:nߞfGi]Wo-i.gPG7&5m{Pu TDUUDDꪠfXM4Ug Q2)ƪcdLa<>z+e{keC#Ws*t6lfQUU"əb4yU,-Gs+]2 ^W3L^d~WDxG7ej\.e gTo_L#cX^#٢{ت討jtP%ljPnI+&##j]Neφ * .cjIk";aTiu_LkjL2Pڻs^{m=jdF1^HvƹUP6 6KeP{iDwI$V4mEr$czz.FP,)s,Cj2e+5S7ev9mLV% pt.HۺcY6Nej*wdUU؏4O9̝Zwm0zhՑ/XSm8^y- YP_;UtNs#]D:MYՊC&SXp%tr{e=4^dW"\رӌح䦡e1آ ["C*Y dz֫7]z'S W2kYTH٢dj'FH"߲laf8<-wӽ4ƺ{}źܶoYnxnNk:zgH#r9DVtT9j6 4٩G/kNcޥ6[Cfm&6=ZOa 5?nwR3?ZpBï:md_o."sv^h~ʙj˅ߦ֫9kck_K誣yz*.Md gx*qi(괝cĻfʵS~tabtvl %H|ltFwDc\^w"t]1βLvv5bc%z/ӿDt3֭]Ÿv!^KlmnK?,@'frszuNgod\&*qTb(߈xF:ObUT|slSKUdߪ%fF&*_v?30~'4ӄ5ܫGNOdHhY"M4j*r쩷10˂p͒K#ls]溶FR#QeNY>m ݂iNHYl:,p_Izܝ"Ř)mzuim2{f{~evkY ecdeEtjzVDzo3M5=eG`f]'4_[?'rIg[U՛gX\#ǓO!,[Vi[7V#1mU|*> 3eEG9FnTTr*d:Y8~":jbW:'\gTߪo,O]ۮۯC>O UtjgEMj Ζ5JܒDֵ߲lHy14b꧄i\y4[`q_5džRTz{{|wK: IYOUQbrsmho:I .G\u]R*UwWjn\b*Yb'48ڮ{ܻ5N܅,}AQS2eX'>FOHj9SVy۫)=kT :IYMAKQ/<5z*>V/4f&* &kUD芲9뺮3dt_xw[<ٯ=|֐QN>9ފ۝%mVR*TDtܮtQI\X]̿okQTU:LWL=/NYtK ݼT2zlw-[7f'DowVwjHeRxVէ?K@cG;%X.j&/UU-i-SvUDLwO>cvt]P doiԸՒ-9ՌV-Kr#_kvTjt-&ce:}_-&uU7䑋~z5s7ʁhFto-~Ќ5YUSXfC,:f:JuFm,iPHVVΰ̋NbKTW'NmjTZ9USu~̯,˦?x?)/t|z%ƦDZ%nzh`nm|lxܩ+Ȫʛu8̫]09c+k DiQJ+[LTDݪ&KZL͡X>1m5d.ZW6=OUwDE<q^nz̲|V5%Cy^F+g5ʈnKxNܟm/ݻ9;7[:TTsѼtMCeӲ0'aPԍMM.Kۻt{U:jΔ{;>y8cʨ+&wJ,0eWnv#UVl5|PVndy4Fl6b=?I4x5n{}heu5Q;Db+bUEED{&O:;Ŷq뵏Mo-VTQUۦtg?2T{Sdr^xp)OAd@gCYICXs'6"ޜbJG~N&L}ȯgPcwVE\FƮVF#w&*KHOf} .W?Ǫ_IvK_x|-KjVũf$jy:3ʜ]=fVWPܳ |V.ȭWl٨Xʞ%%&TŚ+oW<K<-\NK%B>^TT)@u8M]*ϖ,xyYזDEcU;ڱn5V&yA>>JlLUKMNxfs#E9UQw*tWԻ&^ii2 &KI#`XkeNFn^Mqͩ5]۽ΊjԲ|q9zGlNgeD{U:8ϱb0Ng^mks$#Yg4v7Dopף|%iVozGSevL.z稝cs剪A5eQJM1j5hES䙴ך'rUZiVeco註(G45_rk(UӢ9=ܫۿMsɏ:ej;ITEOMyXTvgWgݛQS^*I M;jk*c$sڔuu͍e9vXRiN<Z# t-mgETDbNՒwd߯q!]z k殮trT+cyEۢ-]TO+E OS4M^#k|U'M/;Jm$~ExI-K(|mK9$NdݽO;+Tv{j8k㦷PK<*nۢ6'cr6z_Ņ_xc?*+D3l.|YtTT"LI ]+MUUQk+٦틮_9$wjZyӳ|SSʉHވʊʝQU:ެqÆe`ne<ךW5)*WW9QUrt:%xM#Z:Bo$O'4'VaBkf_,M\UV+Sd~ۿz.ۢ M؉_~yG4=׋!奩t};ױȎc+۪u9Sh甇0:PYfLz|mUTo:9ȞDuA_BqǕI;|eZFT#UUzym #Whw M/VzfUV6{|TTWz!*bɎW h-4(UԭKeSz:3EG QUYnlO} <תIm;Y:$zwxԚ#MW;SxJj>U\nV m42TVcsUTTT]i;5.\fAu4SSNGr+F*/+VER4ֻ_Z-fq:ۼ[O4$TTdDә]95+D Ӹ[̮#׬nlO=$DF/6o2+S³y!>ə*\jp}&-})%-=[혒1λWs*rw]vbř[I]Cr-[$G"j:f_/et{fEbs*zJoCWiSj\n4I,mCe#5z/jM JG$1\ټm'zTsU}-<`ekTk:go;yvnnm@4_tZ.g:"BsyG"Ȉ,jSUul^]A*;MTPvߺr/2r(gb29uYak-3ON:iQTTMػtEMΠk9~ ݆Ƒum}(^E\99QTj"Y̆CVYnEYIQɡ=QS#N|+r\~pQ=u?TJ{ލ\JDoڢ!%Q۩cCMO ycьb{Urܭֽ"[nM\NqGnnErw9Wx٤phn:WXEOt4fcoIUD]TU!5DӬCU*;{e|/Ib7gƨLMUm` k3*3Uq1MM'~mvt߽UMFr] YUS4ܞ)"3*ŏP/n"xv|dOoȸ8uȉ׏yloi8<5uD?ZtZ{LN%9O46SHHsQUoUjuEOط[j$D߻y۷f ZlS)x?VlvUr&Dk[ +|+S_Y4($#&.zV~d/nNM_|1HʜS| D|b˷[(w[OU*9xʿP|;64ST{!O'̪=1nj-Ɖ[wI3xig~5/"̢DD'm z+FJi;RZ{jw&vtu]J3n*o>4q7'm(jz1=^TZjtE_{u_iK`ǫM%NǾzjW9:*.6zON|zombzCkzAWrTNڥۤmTFDUW/"mE~"uuf`1{){nzUO4\ш$nUnʪ*Ɲv?JJ (jeDZܨ:Y+rvDN!!i~gzi##.Ujs5{"mzXS3WcL~; a:5OzzC,,;Ӽš** 3Е6J̛.oU7o&lDhF\)j]eZG%ƭҪ+ٺ1X"oEU5&eE4ckޫۤiW ˙;(3d]=5kƨƜ8rHi WU?*Jה= 1^J;ܳJx/ͶvBU~8*JL#VxInXuu)ֶiJެENةuY8j&bbiQ5Dy^O#;E<9(~Ԯ&WR7-Y+.lv=TӲF5]܈o]l Z[/u.-F eU~Fz QExg }E2Mկڽ5S}WEU$O*ULAYk$RRj(J3{gXy҉ֈ<TGE\ۈnz"S+{]/TT[Kz]5.jԊ;:ƕNMֳ}vdx3ssmY"Si檅򵭕&bnr7u~Ȫǒj4mT\᳷vʛgqϰYf#M7)z4uw)ovu>Eڃzƿri۬TAjmSं$^jswͲk.;Ϫ4O}E]eM4rò5NNmܻ9:LÏ}ȶjo>ǮƻQʏz"*{HѪUt6;&-{i jXG:I5{ݺr/]{gr8),͞3oDꦭ8LGuU%6~ZZsq~>sq⫊,׉-B$-rSE*Z_ΊU.ݣ>mu71Z7F hdUF䥁QdSrUݍT&85%kR軫UvߪȈ褛b09ejS|(fg\f9k]5܈No6dQdt)[s{SXO2[$n뻜RqY>q GS>%k sdVXlFT\#\x8Z%I[qAo3KS''7ꈩM [nOwkn\Kd_KIIUڲO$kMɲ.˴kdj26U4"UE"7xE5'ᑊZi]"|_cԍfɳ86ddoT=UUG1oMȨj*h>P,/=5y.Jf[vD[+ZkX"*u[0pKdl6 CAtkC,{ԫe|7h]+ګܝU ʳM3uss"XkU*?=m2 =x:&Y4QƺNsIE TU<tL#y:ac:zv]rD5*TS yáugBF WıDV_le:u[2:V>45vWmɘS̱)8F13 #^:m[ޖz@dy{s/)a_:S{/[:V1?}GމsrRoɣD)i0|N}ϸ~oϳo֜"l}7Nmuo:]% ¯4qOIo-KU7Mȝ:qXJz"s)&mώ~C46~ڴGޡU%jC.Ζ$W}SS%ipk}I˦XwGs%rNrIܭ^"*/ThZSް,NµW#ݍDv9VUcec+s4E4q:LDŽLΓC[rzquM3q%F䘮3]nET:v*F9W=juۡ`s< ӻR_3`s&pd-{ߕʜvMסC2hiX*+>O'2h7lżmr5vzNCf5.R_sq*kc(o4cXƎsZOt]U󍤿eWEF:i1M֥Ekחmz/Vn,;mWl7 )!oWf(B:G׸CQSnX^ȟ̭zQt.PUoo *s|pD(6zo}-O9b#I۹w^S"gUV*Ԭ$\究}]ڝUd}PlUD5_U_; =?#W&SA^t<㞪Y#lDۗdHQ8׋}J }_\qTOs噈T:4vE_Q/)ߔFbδzRmt<ל"eY7/IeM^9s*zhRUGKJp۽\WsZJu?d|,k5ʉ]MYئ58aVj9&*r\F<@ {9v\%IjetRs\ިoYCbEC%d.{#$N&*;Nm<}mENtYՖL2Xֶ)yQ[oQ[ȫ5GpaFMȉ,,86۵/͆\2kK[:"GTV5緙9tDS'\W>=Q+9W7KeWw,j!r;9w|_.ʛn~}ayfa,֪yJfݧktt="B߶^MnG~To{QTFWM6ij/&~9_e-2w![Vڈ9^Wg(1j'vb)^4|xO"']'Hɦ{)L1ˈj)QY9'NElѺDlW*I$oE,v_hu7+s^©F^梹Sw.˲'^>2ivg6 ݗ*}= XȠcrNGv$W >j Qlyjoj{ڎUN~3;Mw,jmU\"tv"|x3vSk.i3<.h\͂jeqFGZ$k}g67l"{Q uq-~fy'HETY˿~CɝWN5dEMVj6tBW#;Q:7d]Y)LNK%QV9m?#)̰67Qk1r:kL1HN\U8Ќ/ v+5mFIifg<}tvT_MS2= c8T:J9j]&U3zMEN?3<8'6o%}]{,(V5z޲WeWzK~5"=S+fْ1,&Iڵ>y]G^VYX.ɵ_NUquׯb&k}f.QmەP6Vl. %-D?̪l־.ebki/"[ӳY̾&&y1^V-Es6B(WKbMxҷ͝P/UTNx*[2È:OaE7k-8kiY=rK&@Ց5YQwG#Wa(_U ndx:6"Xq>DFtn8q]JrLv1m хޣiWK 1D_Ր쎊qǮE_QO ,L*1tt(o *s|pD 8H#"k\nytMڋʈES[i%g2W+ލDUzUZGv+u]w6',h+SmA/1P˟/X3/9E2]%ln4jzo6@JV\G~CJZ셍RXގ]ӿ@ ԗ(kӄ;֝4Ӌ]s)[qVQIdʝ~P)'E)Hj(ذ\"jr\ kS$\ӎG9wzoV4WFr}6jծj2dgEP]%Ȭ9nz +჊^x6a %憝6253 y_#eFNDTTT̹\눎2r HjpȎnvi)ѐ-3WB?g"59λd /WSکxlHTݻʊH~qY$C]-v eMItt8Dl3G?'2\vU蛮T5#?g[fBǺ ZGvJ5Tȑ\lQZX)aPլKTpiJ*Rѫrn xڙ7*ii*w$R5ZĭUO}`3\(qV<2ICabZ5t#bmM=KvϏt9ZI< SgGzW$Ij*h:[3Uֵz26*Ɦ1o)\˞npu.\U\UQWR#[핬uw3WT^Ʋ2IgVn* wTWOQvF"/TЊ^}Ӂ,FK35*o:d_"DWt^nI i~s+X4;{Etl^s ܋5eDr'5ݒ1z9veQPSztC|\1~eٝV][Wj9A+6END"\y 6nvjM 쫤ٔRQU=t^FvSh뫘Sk^R3M{_rK-}$,5L911~jeMק`&T9fԘo6.+<T$nbiUơ̈̄omok6gzmE-yŖdtB{ױ7oCS~7roN9q +hZJu2Log0$Yv#WnMyw$D~8/͖rvUm{ֲrB4|̍T{SeEMצ \S0*2їkjXkGӿtuɲ1WۺCݕeJ[KG5,LW94U^+QDj2lۨop>x64wkR/b*ԙI趂Ww"OJ_◄%+=Mi|킩y}U̍ts7^WEQ9`hʆg7?)!~/l1޺[*{3agQ95jrɺ&uCƐ-Y]]A= %h{]4|U6F1TEh W6c^ʉ`[mhnTL% r ̼+U]ͻN϶at/.o/pU 83ǴrMQU3$N| >9t> #u.!t,"yeòWޔ/Nݶʲ^릕}=ƫKv854ݓG+ibtcxd& ?3.Y_W%5IU4pW1{Xj*B"ĸ9ˬN1*wʔG[&4ު9#Q7^U>U\;VQtĮxs5uz M٢ѽCWZ`lߟa|IYKG{+#k'n9ܨ|MSW5RpI-iWgή--*$t|"Mλm ƔfǗV\٪:"(VDUzD^-00{Cدvevݗ˶P31[K+lMiP-u.ts}yNUkwF}SsC/L'd}~պ*ZٖFE07~ͱ;EO Jۆ*[\KU*z+ޜ̾Ӣ"']}.uXqΎt)sL{EzI5J>[k~1$Ufw*;}TUES4N Wa\$QGTR^NfH׳g]62,G8ŦЎo+*tT}uUS{Ʈ KKemۢI*6+(s=lUTDz=_4<)_v#7ZWPӺF$:"̂mNso9}8VrӯM8ip' jkݍUSx ҍVեL )K5݅]3)yƳ|~ʑO۶ď%k. SʚzG47HsWv+\1M굱/D=xq|P07SݭF[-YnTAMX8#V[\*5G:?T_ޱ: $2^HdV-82-m>oc^:"^r;aIڪi hօtzcw{%JI5p,>'c6"GDtSsz׈%= [%+cj+湮nsWEM*qsDŽvqY"źhqO V} #Jt+=ԋ&K]/LV@'DV2&#܉01&4zY/VQj7h >P/z.yeӥ,1^|^\Jov[wJ'5Oh1JZB4}蘿?%-!&oϳo֜ZB9c>MӀy@yH"IJk-fm[a|UgD}&F75wG"*/x<öau[ʎ稉,-6& @+l49y,HgDUDD7l'ӄ1q&g12e5q8v>R#3g!'ZkUr$j̉TnjnD>~YڗIWAMƑ۫u4S JN^rƍOI7:{;Ql#|m5q1&:ӆW34s# }=pv㪫kXLcej:kN?a!{秮꣍"s|ݾƦSNqson}Ӝ&jV/TΜNŇ9ՎX8Uzb,qE̝Ҥqƻo:U?/)}[.,#m WT[Ezā^ɮߕormv3~+{ɉwi33<5Vk6gUa2. 1^sbgtRMU"vLjv\tw?f!YݬO>?̮4RSTr^F_K: l*7#v7^:릚w.E7^fi`YkWKUj n֚6JUZŕG*']0nm&c^cvj#ppcn- _vjq::U$յ/kUآbꈝUyZ4]QM1ʜGne4舄>ꚺThmmg%^@^.9Q)!brbXڅNK_XXWjH}#USjz^4_^NOMe}5c$UVl?eMq%cqU5mFGmNz@4!z -.U{V9;Xs̆G6ym#_6ElUwŠ̩M ڌGmY#Ml-Q\NcUI'_ WS"o,Z(+bNdv̕+n_>TڢlK%DNDkᒉΦ{QWybjUMpJUT&a0VW\ *w YVWǓRT>C-\͉)Q^j+mdٕy>>>w򕻶DV[&YQ2e#"zs*Y|64s^ڗbs)kR7=$ZFkzrEGu Ip{iUqvq8j.W7&tׄ^=XMf4P6gՉ5vĦڦhG=m3JjU ̋f+XƹZ3jx[ ܈YSdXcdF2/zx_RSV0[6cvb:"b&"IV˚4Fw9N#KD.kVEj7m//:DrmWnF|C- m1rZ5Zj1ɶʻ)gkjZpn4ܚu*f':8=N&FwӇe>YLZ`;"J)ifX(#QܪTEnAM=+ Us9>;K>>E=QIyKHƣ*܊tTnmDڏɦj>NҤab78C` p~⟉9\hj=\}=$Smވ߽fzi)嫨$P=*MvNE~O;uMv:\v^2f$UV@vTStrtdNc 5^))Dѩͪ4~գD4@*IEitVrj:^P}.Ɋcsg{bb!]Eu8d[ict0VFVW=7EEVQUvo(rOCp~ Ϸ兲RSJثnrvTTjUN*잫uNNhCU|2d+չ{ Y?͡M9Nswذ;cĬt83jڭ⍩?UWG;%F|}ؑԕ[UNInNGoV; i㩅\*G7t] _M4m|USMrTvj*7t377!vFNx*o]u7EE8ׅb8SUT&?[|*>~o_;΃[V+x+ɩ*KkfĔ(VFr5E]S~:uyMIju] t(]K3jYbdY̨ήD^Kgi|[nػtc8zfܙ9uoWiN;ڶymmD+ nԯrGbYWmtUE$28m>P=im.x*=)b|qz.~g3QEUET܎3"5O>1gB#08P&TYV(nN#d_;2^S;~GF3]dՋ=ܜ]˿+mW^0bX 0tZ["j骝333â#,nu3`|C- m1rZ5Zj1ɶʻ)Ӈe>YLZ`;"J)ifX(#QܪTEnZ k;7rݹwFv.4sɧzjSpt'ig*)#O9ysE]{NZ\fh&b#H#Np#FNЯ=Ch]uQQB]Ljr""*V"j#AqĮ`*r 5,rF'yӯ+W[y;;A.m^WVeXǻu]VϟnXY,agQM?VVsW\=ȟ(4u\>/51tUoOmxtVv_Bv.tu1dg>>TWkεASR5nI9ڪ]Ru8ፐYkZ#Q;t+" 8v9 מɭUs߱j|z괧ʁhFto-~Ќ5YUSXfC,:f$OrV3.*!f׹=Q^Fw%ة[Y4G#wEb"ӽ6q}:ŮiweU95\ETj+w9YMe3ᝍ7l\{ 7y@UG-XNS7ıi_C]GO T$ rF;g3n窹wE"(vKڸGK%\[B*Z"'(j>Nj* EjPƲV`ʍj|GX:W|kYqj0S6t/scWl*#Uw*S|ر\̖AiҷzAMTkSR(.MǮxn#QέFúª$4L5jU_߇]*)Icڿ"w-|0 [c*nU+ WM }S+GTFjIPǿ;9ZռʻAqKEUm(5y;WmN$^ȫ'Dj*"tlͥzs>bbt6+Xڈcl=DyW{~7FI#6)]v! y} UP_ -h2kESY[P`{T;ydf|_Je3WuCO5JY")&6fE'*;ʝ˲suYar~4ΉnT]@u~7W5Zk=Ӗ6\:5U&V*moж ʬV3lQW6h$boڪ 2Ұ j]TӝBuZm1˲9ȝȪn6,5j5* xUOn^Ɉݓt*hʲWϒfyj%U2*Ȋ&;{⷇ 9k>-Wr kWddjU] 9&MyIEArK*W#br:xyXtW?Ս4ҺzJGlUttqd=F"*ovB:hMnZdӡ}J]᭥옪dvFۻ}5cvlĪ.b5٭cݳSD 3X5WэR1ۃG]r $|0NXYCOYV#;nno vѭXJ;~V+KG\ũkv%'bD}bҾ2<3k>Khʐ:x |nsZWoXɲ*5m(Lh ,2rirTgUU˲n"vlREk29$W6kUvMuӪ *IVij|rNԶ-Oj*f+6V3eD]]k%pїp<;nDĴfI25ת6U6m&EL~׽g< UDW7UWz=:Kű|_5y|MO5k^w˲.א ̯SŲ{5e4|3"_&Qup#.5Q+muS2Y)^TVت'MS:'q ]amwJUV5GtW^ZiQ9o[ޓEg=ү*j#賙ȮNZݥ?QiԜm <2+I1G*o˶W ԭ%cUķ }*dj沩V웹oUsN2kFLYU2ӲpDƎ{{]Չo0O +[lz:]֚::keMS*w*"sn}˯QQQ"+}~^T<\Ef+|5]رjyDF= u#-8^֎=I%k;~X7G/6˺5H6y-![-k[4%JFFuHC~rowu RDO|M渌¯æO9=er1MwJ,ۘO*c˔͜q\­1|HbeI*9TVĝ՞$%`XeqMrn坜q➚/*QEO &GmF1j#ZMe>Gj+κGuD9c>MӋHU'>xgiZpQ"/( [=)k+F #G l4FȽەt=>j 'GEE,xvZ٭5R#`UZHsQ\urӧB%$-7TǝoES˒uMSOYO]$lK{QQQQP4)H@nO@c'Nn_Nw+҆KW,qY+X5#wR7gYU6mK9I{ DyzoSd/xTM@i)⫤9)cr9ctsU:*****Ɯ%*3̎mrdZWMY/#'"sM>>Ỳ0=gOu_bzDhۚ rj?ZX#drk$=DǑnwuΑݫ{ٙ QxEM0<O.vOlt{/v~SbfP=Ι{? <"~?jqY?eZLCda>>uay ng6'7uDߕ_JR`C4nHkr1QQ{d6 RSe}FuMu}K)j֢u^D>ȥx426Hj9j5zzf7UII5[U7Os\5ts[+k/{j_ռ|EIy8f~?<>tuL|42r[|ryIQSIO2q 5ʯK]:QSM _PkUs\z)H؏+d1 $H@ލYo#`vߊVO%O얽 Õ扷 v "S"2EƷndm}|% Tt,} ek&YUU7{= r?;BvZ|U \jxm;3#W5zHT:)D궒^O fKW#uJ[\jj*e9 NWsLLr+z*HuL#G~3!NSHѨ5*$c9Ư2\xhkOYSU5f;H;l<2zhVgG y9bVv~"C7N0Zn5c7iڊY.ТJFy%VƏsTQ\ z ϠUP&KotW+`m;n ;:!I F;.eqWv9J@͕ENF*98;R0[*<\βR#cF@椎W/F싺Mۂ i5 Gu/,Wk5-]5N5ʒ#yvOE˱R&#]Sϓ\L4٫(21eCPzq?BSiWڭjo, _o\ TÎ\R4®!Yj-Rx'̈f+ח?̳KVzڲQ E͝F9EruxQz;ݢQQ$tS6eofznd|)i5K"Eyu]-l55HR'2VZr9Fy7/}SiU..d{e mggʎEj6ȨoU-9-Ct rihe=kkHj*߲bxA^3 䨪sj>ȩngTuw9%~ $7=.v醱S$kPŗμG/.9y= +f]QpUS+Xw zxe &2hnrO٫N]'7uTZy[')ah Wdw[5;g5P_$rf'2NůNvc׋M%#s%7#.[ʪʪ&+ޱ7 B]k'^@٥URl[䗵Jz1^I6o;1TTE#)g<6bq[Ar[ Gju38lɹU\U4YRRj˧MTVImWnvʽOC^o8|^iawV2[Nt1>3Pòn=8uiDV|JetVZJU+b]Hb9rʛ"X& ٖa6gy7\S=GVtDbnQ:o$PQn*4r*&Ȟ JT52ŸkY:-x`KbI i O+^U]sQvTmZ;m]>=Yһdyp皒Nڦا{́cW5T+2Bo7adSi'UHJOw,'Č6wXw0z˖-WtI=Ƕxv>f=Nϑ9KuVnkUQR%e-.ֲNh*#=)ZȊG+V=6^='⣎RԌ;h,Ց.DE+)#{U;ފ"+W3 +v|O=E;Sݪ"nHk79v(UFZ]9kl2:mUQeyu37-43>>=qFO}+{tzh $o#Z^dMT?N,xVԝ\'9ឭ&}lƵyӰk39[**=":Y+.m]Y4Ee 4T{`VEM#vN͉*3~EBOF-8gljRevDDJ|~r34Tj/EU/NbXEX"5dls$HyQ{fF'%oEݲ m+(T2Iac\潭wUM(S<8>jU]<49i5m-/j1^TI6#g޻/M1ԌXQOxZ᧥c9wTŸpU gfg;"ok[lG,ŷ2UoU]C<3Jz"ƸoWS\lWH68ehcȘn)U"uѫt j_awjY%TԴDfT'ʭOI9Qvr*>h+5 ˙h}-F5yY1d^YUj$r9Z~-y_xUMC|vb*UD6HeM#~xpn"j%Z?)q}= )ΩG6:Xam~&FުS4~S3 Hu>ګ(X{dѮYOaqL&荅@Y^~Nn숋f5Q+˳vDSEN$Hk6U{u]M)u=TdG5v]t^`pF5mάNenFSvuNXFlѺ齎d/tNTFTUE/2*K6lWGR4rL^FȊjdUvEt;me-^xɬRO-5+o8-^TW+stEMXEOVGmWr#ZMUW s8=ˮ̣Xj5_$~V1'nWFˋ'eTJČwKdvM4/j b1vySP;+Ωv]1=Maj6,tWe"ZokH#"}5ŏlvvInՂ$w_ !7i!\S]ə3dxB\l{Ե3k{( ?MǽKS?l!_&r >ʛ,orӀ>P/z.yeӥ,1^|^\Jov[wJ'5Oh1JZB4}蘿?%-!&oϳo֜ZB9c>MӀy@`/l2eTshoݜVv^Wr"T3 O[|/WuB:wLU/Vg"1,q:"5;iq[7SY5K.ZuCK;XXb9U>*z=WUf#@׭ >Ft_hܴ9͞pU#"y}|;c1s) ͮySpeدG{@ ^:Iܳ+]%{3a#[~ɺ6nb.Sf3UUNfyD)3ƲxR`uV dhed*6JHdrG+wj8Lw-i]r燚paڵ/.&X_9ݝQܪQc{Uv-rt3Trq=_=Ȯh"uzYoZ8Ac 5 R'W뺱|SuTeC~bkbݮbS'8WMbyH1)?W+Tĩq2Ig-Hk+r1l{US PsWKu]#ڡYʜ_rֵ:!påYFjG:`WF=V/fۥLȮUU]gA.:/ߏ{sTU<ن;z4OaC4I혂*iIrsInYQ<ޓ/EjVW""4hc8g1\Kt] u5eKU?ؽ訊*0-᲻#\ ?R uwS6>=dk[>%FU,9qavcVܯOejvWJMI+a:*;vmlE6AX\+C9f;XFMa^znU_I7Uk~U7SD|ʧxN ]ǮX䍠>Q9;ug_#Վ6^7R71)jH33"cYhOjn`-TYvfs5|ʒ"&>drߑ%tV@tϧpV¾zw毸yr]ܼ֢ ZegmzMw@Y9 bt+;WN[an#YYn)֋+Eq4t;ӹT轛v: =ERxL΋=&e.*.Z1YR1TFƦ+Ev3UI@P?V1QETOwGʩ]_NRfreUM+Z"b ꍊi]l{}+k]4c\Nl3ˬ8'wͲSZ,trV؝# mW9QEsdD p\Lh[[)"VIVoޝ 8VU2{C-8ktOA+XGUEr#Q<2c >[V[U5S5SI UcUDz5v^UTmBbYwZaS{®:*|kaG9$]֢usӕԱ`rUXpZ+ٗIWosH⩋9z59Swc@A>+3=(ԛ.Qoɨ{6@w'5QQ6FoUUTfN"cV =;_[A ZT_$O{̰UxԌ?j_e4+AOp\"t\sY,Ek؎sZfU GbZ5.zzKrZu͑77>6VUT7|[]zJZ-443XFw**웮Ȫ}@ydqF{"5DUU{־#q1c;v*I"VEE梢65q]!=_]V *:EUNJtT|WSnႇzez[|,4*ESuGq}tU{펇"Ӛm6ƋºjO_O'Jt+PYU%z4*qW--tN娱c l +(F7OeB#Qn1X[85aTULO)j'zjբku'[/qۜK/rHڽ˳ZQ VMW3R,+t\=tv!þCީB),ڳѾj-mֆQ7ɺ+WdUDtx׫SXe-\v*"B#zlﲮŜ !xPJ|p ԛB[F]kb675tMxUU*"&9]q뎎&24Ξ E.{| \9e%{nXntk]#WQȍOIUBۮ6 ;M}=mKH*)lJs\Tr/;g<髺yOw8Hl߷~5_@ү:&#Y痸m&TM[M|vju]{U#mEe>Y+R:>m\'DTo2). lζٗp2,]l75zꮓ8;#N#kY*ի_cɣL&iQk־!,~6UQ5d*2rg6Uj#ݭ_.˰N<=3fxG\SUs5+\:w΋>.vIS6'wZN]ٍW5dgUTULOiiy^+W9]Yk*uG5ȜtI6^Uv$TOYڇޫpJFԵewdu1tmD^ uU7ݫ[xʹzI`aU2-519fj.6GS#\ʪ~syM3lE@<D2Ћ#KP*^YlT5)(܌z/XV̬6Y^Qίjkv홅hNs$LLGzPr*KU-wfȉ-TrE 7Yvkw^R1 ͸,-[N-jpfQ{;ŮEQQXΈݑvDUGlUm0[x[3`blи.tT=ITWEM܈ПIlG>Dc1zWG\SW4uc1b־7rl4_ZED]NUpIvLmUGRu WĒz5wNd&W(fueiiwU٭M7^~W&+Nuifʕ}{. cj1Wu_G="e5LS)oԴu e"*.^EeN'EENl$FjrDU_0~kj hoV沪d]۹⋲&hmѨ6GV%= L3 ImTTUMwE:tCM/̴Ƣkj1h.jj5Ӣvq6rS3bj#XW*'3ꫲQ0|GQqW86Jj̝j*x)i^]w)uW Zi)tVղmC-Nz*AV۩dUMO yib{UaܶÄXjL4^{g*DCun!r͛ڵLUSDqu7lTT+ZƧz7>՜Jl0XJ*ӳnyܝ"Ǔ*Sxf͈G+]O=nuM}uZNt z<{TnlT7:"w6δaj鯲:F4EdS{04΋]15iή5WJ-q:h+3$hS"߬lɲ~I >ZNß+5UW5W9duDtDp_a5MX)5?LdlG#YN^oNfw/᰷bϚj(j!!AݾE_+>$K3s\#QVNV {P8֞$r }#K ['5UjUmǫ"Iȍs*szRꚩ \70fݮ薂eqP+#睍v[,j/".ɾĠ`|N}ϸ~oϳo֜+ cjlbhqTDǢ^'ZgIRzyF⏈uTUpeK+dHwGӱSw*vix3bATMx{ҵN]^My;9۷,;\53nW:/wNf ,tx}33>L3wWgزVX/T:w5٨Y3}Tv_S#kϓ۽KkYԵ"InWS쨪ٹ56DE61pp 6;trb"uU"| ο2c7emJ\4зI'\>R8I[k/5]%4쪨j#yi\$D]sMb fKclT7E>ʪr*.ʈTENdGW:}YVUtbW`W-:/]ԑ C[Bi *"8efW6AٵSi]:O۪E5w.#x?:<,XDULWrlv]RT9]eufv+*Qtߢobi[]`P\sfdT]TsSeM6э"ԙVP4Ȫg\0TLj*$j7^_jޥsjZ$^+TU^l܉w""_\UL~1WhgӛW*֒OƲK%u®Zh[$Hcڮ^)ô+P:*HajQ޾T<\"S]^eb:ɺﺣfj/]zWGZosE2'y›2:NJ%?7kOMpe\V "ܜMԏ$Ԕ} >gω ֲ-VeVUjgTEWr".ȽUͱO >Lfڹ6ޥfs*&ބH9&]wggvE<ǹ^1p)\Eq`}:9V۾DٱQ\ըcH4Fq_[16/T9>i\$D]sM'1cn(:"4l`Wr\ xfP=Ι{? <"~?jqY?eZLG>nWV]WKXYQނGSTT#ʑT:5N=Sj'vB-}ƳL֫KcZ;K^Ѽ^ϝ[er5dUSdW*tPeVfparta}:#^M{][1z%Z2tOB7Jш^2 ꧋_J$P2c=Mt\+ Α,͍լW9WmI5u-Ap$Ro"5UF.̽}Ϭ IJSj2O =lӻ]3u:Xݔۥ\*uKe\jx69~i^Ynv,% mi₵F꘢3*9Dv}`jMU&Ԯ1(2K1&?tWO\ULLջ\k71܎Zu+-9+U6zqVD]rw5<](/>>+}ۧ9X ~W* I*[s%j=%rlIstom63O+51SـtQ8er[t@mUFܫ2(_$މnZS|}V0:FRF}ѕ"DU]ltS҉w1i3;dMUfGOяj'*5yڨe#Lp5qNh,"#z"7tMcʵ#5|/!ri5Md1֥$u.j9*=r(E8Noxׇ,O:6_ [U\ؚWM>= OKU_h1v1hx[} *[(]"b9֫o;ݾ?Cl(jͩ|+$Q;lX`5"/¥M,c\w"tn7^&_t0`jEae)wW.b7gf1j+.˴UdtRm[Zt?L(ռת٢V#s5U=#Uʿ! |hn3tet5|چ7^#u&[9ӝXTO*{j3ytb)wKcUdߨ"KKڲ ՈF5ۢ;mK.~/m=߈׏ެRGoKL} Tvqvn;~_g=ԭ_*؉Uȭ{vߣ7J"xpjz4XUYuM5G`ܲ[.ņ~$U%Lt*Tm'E]"F*,yק=Xlv̮9l4RTo&𹪬^ΫM೫ϋi?-L\k+F9%j"/3yWoUCIl,{'eI"їY<9p!nh'5WdDo OKGKdMvxACGveŒS$qfYH^E0z5rhs:I`tUVNmX*{IP OJ9Zjkǥ.Xh"q;s͕;kMX5]ni,V^yQٹT~EO HjxQ^e\k0( k#UwV1Ȉu6S9wTj"%s"s#XWL=Q퍼ΫojxPId9;FHۅ&Q {tVƱ5*+/ nڗxULl cm=jY\eQt 9*/ר=Xv~nl5Sе΋JY);E_`]AmtDkr*1:lg@t,GQ~]^?eUGcK`UlMj+}|KdTh"K,T2yъ9"}̀nujwuW*Jk $_FKFDtʛ*u],FzRmҭM^77tE-M?a3I:;ʝ[t]ۥ^oU-MasCU,tUR>xrz* ',f_ګ{DVM (bIKRڲѯW5mLnj?̮okެ4t7 ƞJUDrVn9ܵ ViR:gsԕ5wOT6 5<](WR(}ULǕcdNTP#j+DS{Ƥ_HZ#]mċ]vN OJ 췉[.MGkэ!r;Dlv6!bΨlNG6鹰hvܱzwc׽)ilK,-HY#'mܼ;M$ZDժ\2ST{-1yЉdwۀ(UۆOp52S7--{[mڝ\W9wM9\@|w:oIn+#b=9^UG"/E؂R5% /ckM<;6*؈U\}/VqB嘞;ؖ[ihRͿ,:*****"iƁ9RGڭrOSXfBQ'"ݕʼRkqn[\{fuڒZ*;{"=j*E+CygԹ_3퐵:$U ߷+7kQz=vt9̲pxdEȎErn96r/TTRjN3^S1ê<}eLYݼiwUYuEN9tyÓMEnsf{ښV2+DJ͕WmT=c<)V l7_&Y(fcz# v];:zbSiAv3nqƋFȊB-lVA8-:M>Li_;&.Wڧls.v=˾2ʛ"u]d1&8|Iˈ$HF*7Mne$9jW#MoF&8:m˷&жGкv1料iclwNb;ޝpnQ٬Unʈ[Wk+dbEjw*/>eZJU6-"$s=v78bu & `FCDDnȎr웪'~oЏb< ^=L{*b9|=nh[|fUw]N*ߋgl|e."7c=W*Ʋ灬'wO|mt9rgA. ^7m;%FM9%??*sߡ??%W=}ձ?ߝ0sʳ"\7q6y]6\(S}.OTϧQOzt@Fs\2b5>Ao#DEOr쫲lw=(fa*pH)3Vp7f1LDwESxE2%emJ\4зI'\>SBxЌ?4x=ֆIwrLromM\o8ZV;ds"97hdN;'D7{trF6(MF9=myW؊78o)"=sU^Ūi&Ww|P\Xy̵ZTW5vkj T˵,AWv\3ܨ׮kNg""5ʩ/r9:tqj' kUil۬׊ɪQtNG;DEܜ1 Jl;XF$I +QvoJ0> w17(Sv=,j+;Ўwĝ+be 1r2j.TMMZl$iggFuQʡϼvuuL2n"UvEGtY(x+TۏوNuܮ\4J4H쫏ijǨ^|ܛd]+dsUXsU'>xgiZqi ُ 7N߀_;SE*[C#T|5B0:drDŽ_4OgVN9c/k' @<IUU4p I$ֱW"']Ь\}jNKiU=3,_XҤKtY9,WYlwuV-ZE_o+6n#P%0Y5äv;VU]ꚩA@һoLhܝK8E-zt9/x0J&"#<8ɬa;ի*MTGl]&4^N:E@Vꚉ-MN[Zz1 :lv^idzۑVjIf'iOO"+|eZX܈wv(A#65j&Ȉ܈zŋbK1(9v; z̮Tiv|λv篮~"*تhMG<01HElolj]M#קÍ}jP۳k7g-?2n4ċwz*5SyUzXPI4i33:%]dzhj+O#aE9&F#\EV]*"q:ۦOkv-6ܬ0'7+_EPW*.V1qiŤ*f<34߭8 ~yOG@3|mkgLfP=Y?eZu8i2ڭ`'AS rw{Be}U(d3N|o.}+FWSD)(ۿ+kS`0Zn~CGOz%TzcZ[Χ|Nhmծkts~-cgǍݱ[]r̖-TFȈݾEF ~ۨ]1## E4tnT^TsUvnvo3wuY,:GaeUީ+Ƌ޾=Գ[ۛקJc Ng j"?3#ɬ#ZTJ vΕcE_AAoխZYISȊ=}~"'ݺH4#܍RK5i;Jzy[/oDUE;֒g\O5SKW?gmW;˲.ȝTW4Y1ey)=Ư,Բ*oLwvSφl کkF_[-Ͷy{k(U0ʩ7]}e@@ ُ 7N-!V1qi[G|zQɹЎsӴorlw"ӿ6DU^tc0*/H혏_ۍgumKEN6E^rw"SR떉ؚknj5WRB]})OaVa_Uy[WwZuwߙw(d {;84rl/l;U~U#8 jS3,\Oh+8ᒆ$n!4WrMG3";tSWj"MKQFVDThV4Cl1n+Tw'@:?5ydEF-JwGVӭ;~8,ZdtlRU]wMUSVښ:v˲z6f2:(#^dhVWX"\譮qrLVSfE+lvonpf>jf?_R{'L}΋EzWL#i|;tjv7ùWm&˞ko;Mu*Im7 @Mӕ{N^v_0)E\M>[]{ ,svi>>FyF[$ '4'dr[9țrƻnX: ,z?d6+h+Y$#T-UKgD]\ULǦOkpT|5B3G3|mk`ti2ڭ:rDŽ_4OgV0YOc<l}~y)ُϱsuY,:GaeUީ+Ƌ޾=Գ[ۛקJc Ng j"?3#~2-%;ݦǵJcv;u6V:}7Uz"[Gk3VuwTCʬmsƯNUtH2=m߫Z$[f{-F DO[u[h%52CEMHI16Mވܝسw=18t9:>bf+Tnݻ뢎޺ }ET2"dqcƢ5D;CZ/GvLĵ{Oju䩤³mSe$m3cr\zۖ,^ٝAv^jȏbQHsM0]uևWPǶTݱG@LjƷd:ȍNi#iv9mͦt=uo^E]TM#G{{Y0D1V',TEUE*#V29y}-vrU'>xgiZqi ُ 7N߀_;SEUUSPM[[Q==n#xtO&d`{!'[ v^^ֽ豣\Te_K9V&qeNMX1왍`ȑȰsz]sdW9^mZYq2 P-ue5 Qn]WCl0meTŋ}|*|tFɖ eb*N##"fb՗:Q=Y$UUwk.o$pM1yٿU禩ZIjͻZ";#@a?FXc^ǢrnފGeko WR]A=3꽭hVmܨKeٮ;)W]31ӜvO{8w/SGl#+NiƖ;]X/;m"-Ԋz*U]5IFR3 YU#ɛ:EWlw_U=qZgα[}ɳ|/E#v|kzȽWSl1k9t|ލcvV+ VOǭr ͠Ws\F5Nɫ\5<-N]۷oԱZ 6kLWw+c7!W"6}Ies[o6{jrT:gT'e ،=ҝ{?29@h_#@,xE3Md!~i<"~?jBe}U(d3N|o.}+FWE@k<[ݚgWU9`DTlwڵr߷7Δ9ep8F NuΑg8-dK|YUwj;gJ1ru [r7֭I,٤)Eo쾋Qk{UAnFZթ%4=<ݗj7`or"z۪h&AL8ڌcF""'r".c,(9v; j\iv|λv篮yۤR(dD㍨1DkZ""w"!Ns:񖳐  Ʃk/į\fN5ZU9UDEDSf>[Sp<»]-uqOUngro#UTϕ _':}T5swI#s\&ʪꨭ{>\z=(F-4p:5ύsv%ί{ZHUF+ьEVUʈxv6H=QQwEC1&WdK]=qT XފԎ4gC*Jvڷcʝ\/c;^G܊UUUUP ُ 7N-!V1qi[G|z_]\33̦QMb#HY{ SgժHY.u ݯdꊎȊQM8Be}Uf>N7>nϣ+" ~,B7u|[yܫUN1-]5V=e9%]'צdmW9'5smO*qҶ<}ENݲr/ESXSMNj5#w/Z.~ŅW5cTbGn:#jPtw\=}MӀy@aģ/] 5er9ɿsF [ytDs-Q7.Nk3<#ʴ5LSO9|NBJ}3;CrViKM{(W<Z]Ez'Dif} ]P2JQLH+7D f"E Q6Dkvc~"ҙ8Y6]}&2y!&TJEDV@͑DNNVD>T&ܫ.jpf8MꧪutDt\"L_"I@.a]xj#siNKnILERMѓtQvg;d(m1ۂrVv5v珙ܫfb9)RPlpUjCPV;*1 &\k'*GTLgTߣ%4߉1_F|?>Obr=TJ׋DV}rTTEdG"nuDbcX@@P9@h_#:ds7ʁhFLXfC,ө,xE3Md!~h3(d3Q 7Wg8\>W>x0 n8;p[[m[5J"z(ED*&le<8j{֢nȈ|wWdy  eUu3.̊&&r8żj7aO-}d*R۲JW5ɻ^ُDMW]PXέfiuňj>5zKAtyQQrxtBm MWu3{TV{E4\ϢsY #DETG^S'FUPSyS 5*ZUɳٷPus[xϵ}O\>ZnRdYuᤵG3eN"$~UkyUlߟ3Z-6Q6))mGXHډ[m_mZVfEi ]D4싲¼EdT+̛2VSMfϚvbMR]+7uQy@tWtZ1w@,#TdvʊʊM8#Yݭۑ rڐ%,,s#QV+bZ fQ.QYg5u^dr龣Z۳*e${zI #Wv鲮F6bo{\-<)'-W΋lm"+Weӻ`@ Qjg yƭzqwt9tFU*U)UDQQUkQ~neV=?J}'<;֝+km([#`TEGHr;n9{/7C.yXe皮[n^Ŋܯ7.TGUPs-> &׫ο~'kk詨r;jYO_k_'cwUWj+!Lvc4s.a)r7V$UopZC"-Ƴj==Lb5g"'DQ6Dw6ɶ͈qEZx|dܷ))m;SoO}wkEkUmZr pCIUQWJcvj#VȬT]dG&۾F)eiZ}k5GSufvNT26'T >4t{5339MK ,4˲o.W+d%[sdž}ƛ|N}ϸ~-E=_;P=|Klݫ"J]ȢcU{ اZqYYꞾgfĊw{xڝWuNU"ԍܦ]z"\5/,jz*H ;\ٵۊ_'e[p|ryhtRdjUl7ƫEUMdMGEʶ/ȧGLgt|:ζk()m?&:1C9WGYb82"%ڽVv5?/YU^{_z4PMWډ_d%M,vZ=>{\7kz_HKs*uғNuciic{,^eEFMw7Ոv0ʈS'#pir\m?*>~C@T|5B3G3|mk`ti2ڭ:rDŽ_4OgV0YOc<l}~y)ُϱs#D7,:br]2:u7)МMusڨeN7Ҫ1rƋʛ.W̊=S-6wU6#lL7]ir>Jm*7gȫ\X n)̱%Mޖ nΙFGvtdhT7[dG[d6Y7ݫʪH/D^jv4!GS|UcnѼ#E#ފ͢7ᣂ2զW<Ž#l#}M}\ȯV٨j" WNuOOnI9U|/~Y#z#vv{Qvm@ ُ 7N-!V1qi[G|z9i趯5zQ٬gΧvҵ:PJƽEk6DsyXz.yLo$dsQ6We~1Ngj5iR6Fp5ņ\NI51Xֵm=s9Q6N~V'zԛt.,qPw#s#eN--R/e2.z.w^V\űj}\U:k4;'˵e؜ ~P0@T|5B3G3|mk`ti2ڭ:rDŽ_4OgV0B<@q:5a~̅ IPiTfH/3l)W\m$[h"q\svj'31UU}UwoW:Gj7qŻ4UODFRA&qDGiD_X |qRGOX(O֏{i9kKu8#g5Z=[T~[?㎟}?>:Hp5Z=żKu8#gx/Q|[T~[?㎟}?>:Hp5Z=żKu8#gx/Q|[T~[?㎟}?>:Hp5Z=żKu8#gx/Q|[T~[?㎟}?>:Hp5Z=żKu8#gx/Q|[T~[?㎟}?>:Hp5Z=żKu8#gx/Q|[T~[?㎟}?>:Hp5Z=żKu8#gx/Q|[T~[ !f2>8#!dyrxlAUw ũuo*k?VʶE3-GʧO5sk{\11W??$8}_tԑQOj>{>-?]nsO>IW??$8}~s:I_tԑ|qRG<>-?]nsO>IW??$8}~s:I_tԑ|qRG<>-?]nsO>IW??$8}~s:I_tԑ|qRG<>-?]nsO>IW??$8}~s:I_tԑ|qRG<>-?]nsO>IW??$8}~s:I_tԑ|qRG<>-?]nsO>IW??$8}~s:I_tԑ|qRG<>-?]nsO>IW??$8}~s:IrӽiֹYU/+~%O1o*IG:=fQ:{dUٮۮ#QQ;Kֱ{j5Od0S]U5DzD-z/Ą+MeT15DEVHW';m}ؚL5Z+fUleTnMXH;=grU]ʊEOq)pv`/aZłvlMhbtI6+g#U\ٷle&P-w8SIS d7EOܽQwEuW4[D5:8},ە5mDdH&=:2N^Wms}W'49|dO}gnQb9JGWHW^ʊ2WHUW76^1!,r]s%lި՝MkFJ(sxӼK8y%[ny;X**.ώDvrl7+έŃNUym1Em[.M9XU^Er!<bAR֋T&hdeDsca2Jr*59_pXkIly5%uuMaY?b':woʌUeUMmAKt 0#8ڍk5:""t`uuYlmuUT=< {Hܨn.ʉO_<VQ6Oct+V[/7]cUUW.ʪl cdlUj۪""d8@c'򵤅>'>=:sF]<܄sUr*7nȵ{:qzfYC_hOM#}vSg9Eꈯ,ѦzhqrzUԲH?6ʔ )F3f=Tj}ׂ6TuEIۙE[vVEHhf|295΍\۟#65k=H=u,f$ߩծV+oUV*f<34߭8[sdž}ƛo/)Q"YAOZkQ'*z1~5<\@*'oӻgA8reY_+5_1w#{zVnSP꼫Q*ުbUek܊mb`>7s.gȧ>TX:wU\g<~UtԳQU`K&ʋ򢩊/6Rȫ&U-|U*5{[F*tInȭq!}mEqGHƸ}ɖ Eժ~vkU:u[G; #J{>~4:ͥ+ iTqt·򥪦*z,3D{$cvN}he468jrֵ:rz=zKM h9}4c+wkj#fUjmզq]r΄}.ԗ6+koc}-NH2ꊽɻ%ٰLl^hY7r={NOp3/'h]u~ϧN2< M\vv.pl_\@kuik\wQ';[nr숒ٮo3.uLvDrZQӤZΠi67;Ku]5KoN)gժEPف{MM5&'I뉎0]ܦh5E R뮪/rU%X`UTJڈ1Un[MܷcLuOL>;|gݭTtR"")X:9n"*nܨAZCM̏vaBrroS5U\+&x\ӗmƳ›WTs kv& ;{N6M{̰h_[UOWkO^ ފruOI$&&5,T|5B3G3|mk`ti2ڭ:rDŽ_4OgV0 ?B|Ok["ڨTZkQdz$mTr[{hUTH#QsDDDE]Q!*yFq2jޚHBԎ6"o⨋ck]Lzgm6'lw=ޖ̛\E5s\j@3*ϸ,ֻܳO>&w Q[UhTnD熲^6}6/859CPţ$Da;xs;DH]b+1Ɛ6F|yk)"s"k$N֡j*Ʊ6DD@uR+qijp=4TE] t5P9RjԍDtIגEb̾#޻F&ptɞcPUI+|jMӀy@S(5ٍ>%$+k/MrnҶ>U۪ƻ"u% 7T3hp* \*:Yk읮bUcl2,N>ҪiҟWOf' [5dF9ղWtDLߟy9u6[fȴ1IIC=Czb#t;VwslHn5†q=Cc=8kq Ru|[+$E7usȕQnG릷M<`Jumg3ꙺ2'ʪstrڙcю%pn?nwد$tߕs$M̒Fz5/>P?[ect@ըliSͷUW2TNK3x| ,& V2I۵;b*5*.U58tNq;uWH+7$EVG9U\;PQ)vAKI &'lj5OٌdlHj5M6DOaBgyn<JUEQ_QbߚqW7dۣbʭj1Sy.-&+--’Lôt|( VWJg۪{@Fu \ݑ>9W28${VVʊ"$gqAa56Z3KR_I>۬r#UU{zwBhص@.i,{ONtb&*7HݕvtUGN[sdž}ƛ|N}ϸ~-E=_;P=&!rۃ[%;Y,p(W V4r-6Wd5ƾjQȉclkxˇIIUy}_S''zdWtO46_cLMiТl(09fL2{}Q>ddb.^zg3D5-Zfk&d˕+ZvEwD~ӛcm#~#Kfw{%ʪ]Kl|UR襉+WE6Y5f8{tDLtDoF9LuL,blWTDˡiE}-vFg![K(3y̱s˾jđuLGqګu7ADGG|ʛ"۲=A +ir} ST7$rnrIײ! g9Fz-j/7NUUU{ekh6&4M5M{Z8n>:ӯRsZYU⩙ǧ[]Y-xWXr"∧R^^gdFcxcc\IZW@VznGn|Gg9n'?ItuE41p DG2\Nm܈.I)۽MSU33O8wo64"yк@QetGQ[ YDtѪΞ5^W"*2"nȪw!.h2;$L(6UA";:#cȈ6-s,˪Ͱ8x4U\gqY^4&qMnhcXץ`׃kVO56AIlI|+UDvmu.Qyev]&HQtRG5vnܾĆ8jm&14DTvT{]@8jLj}N:ᨳ,+e5Cwc矙9Wnl^eT]i-i1%O-l %ssQ\El"Ek>*hDVDTUMc5ꭦN~.<7ۣTLGG+rxStS4%$CP.hεS$2{Mvt-X&ʈޫl:P+qz'=t[ʭEo=5nuRha&i&'<?o̾{y6j*9ʊWgӆxڮҨNs6"6.GΈfV(%w.ITh> ~kVnJ{}:JhkS'|3ݥͯfW9U>LuS)4sg28 56)= @]\ѩx=Jw=̰9Z\ǣ^ݨl@b-oٝ*bbcLN>iynS4U};I9lJ꾆/Ls41;W5Yν^^!eU3B5]ՏǨ,n l::h{uUcENlUF_=Ϩ\eLi5GUG ;4rL8 Mv'w@$Lo-~Ќ*[C29c/k' N5YUtމx~-o>=蚏7/j u_ju_9`uȏEG:G_s>"X~Ux&iF~ɸx&iF~ɸ@¶K%uƲ Zhy&Dcz!IUbcYk:Kq%{\m"_ASeg?ɲDK룚-[kQE*9V̪zF7M6'>YZ}TqLrqh3G 'cs_9kٓdTյ䒥Z٧TNtE}"5wW.NwyyQ_*)צꨊzFX^j喢ΎWүꪜv˽cMӋHU'>xgiZpQ"/(T?(_45XgY)S7~-*\>X֥5r;m4)Ȩr7eD63Xzj%oL-uէ#$Y':i)GrW˥)`YhT]UD6 ,[&"DYcc))5}T)q~wZ?Nh>Qu&A iU6G*t{G UoBӚX5Q6]nt1s,jFqW"*6E&qLWDѮ<9iϚ#V^GMwtu=[M,9YhUtT)AUZZޚ=5V.ܝ80'M͒[SzZRjm$MUdFQXn-*:L$5w'"7Sg"5[˺_.N#nUN4L׬ESG5őQ4Z<&xqWp=5WDUXfծjĭsU<3&atRhcdwh9r>6&*y Zp_(6-m.C 3{8˭ZNMd6, k{7Umqud#FI"ڹ1|~"(ToG|4קS,گEqf3gHԣؽ5aT5 T]] ǣk梫.tJ߉{MتA nbPk0~^=ӣ]VI9^UQv,}vV=+2"v1ys[>2gY~#ir[ֱUnQ1]3Q9 ~*4gGZlg7F'm-}NvUEDHC5E]֢+(U-O"M M!Ʋj CǽɨA53< wD;O|4kfk=,0tSGTFrr+]~#/&2~q똚"Tit3]'v-ⰗYDu[i> ~z Sg5E|SsL)}~\,uK7Yi$^Tv͑'>E.C ʵO:vU dƪo*D˾S|~NJmHŧONۡkU9-z+e;>OLQTy}}&}[ Rk`z{KO]f=tqV'>eEUZ5ۥc$RxUtۙzk8~Ԕ )sg kR:nFڵl[4|Nzm#SN3[E&o uU$˶**}/4bʧ\yP}UDt폵l@`s7ʁhFto-~Ќ5YUSXfC,:fDi\xq}7PMGƛahSvӏd/5_J@jҼ2PÅUG ޭOQcvUUD^-gh? &qyǵc$mZ=-;w[u(ddvWv~. ;uV"Ǎ8F!h3|220QVNQQ 5Ll_S =}cct5*fQ0̂mP5?EENEEG'EEEMN7)SS$ [l55ݮUg+Uʩʯ5yVVC[nZWC"9(Nf訛ƪΞџd:0Vqxksjݪg^q:iyjl6fWb)ZDht&'X:QtU[^sNt(0spDЪfDG',9S"4\ll^SQ[<rHUwUʻ;;QU;F:Ν=u&/9f;!V܋6IwQ1Qүxl3L5:ߎv_sh*,U~iȫvn7NAͼLr^TpmN՝3Q{rVf"|5k˟.jĎ#`Ւb(-T.zt#9b{'wsWč K:/geէ};tTUr﮺YƄ`~dQv6Yo4ɚb/Wr"mkq9EV*ݪ+5XO b"yrBEmrˑr.S4ѥUDεUSjMj#IF ¿ %VYSO[jay__2'3ceDyEEET饜.EKeē=ѩWvX cS颚S:M]'<%y.:Wo\銢fc z#O<'Jx}}etkKnK-]GdHoEQ6Us7HCgҜ.7(觨r9ȮErϝUnG*롢؆axKaK$%|u-ɷGHE>UdDc;'ZG8:sh$dUfgv'ʊƕp׷<) Sgc}T]5agnYΆ{sSnc;:"Mzqi<5匒m.A5LTƒsWk&DzlN|?̜gӚUn܍ɪ7ff>TG>yZeݙ5~bUSNn%!^&ztZQ qؔʉ{Jx]:tW~#r?vj[<^ϳ['':˷M؍+&VfU*Η!%SajM{Fƛ5țlj  6S3xqO).{׈*qV'J)u֙u^@SM%jzhu8C 敎M"*"9}эOajZG>Rm-"[墫|uʎkUsȊs+Wb[uMUU1k> L߷F#zժJnWDE:i;PPHZMk(GGNV|OznV~߹2p†3.PŕYZ5׽ꊪƢ&`{;mN"qZHpB6cNݿTkn"f#]f9<#YӌƓ!&b<;Vkڙgm2H7oI:9:PG`0*+ OY4MHUWsH絎tZ}}v-=ӻ&ŎYґ_2"=*W}sfEg5zw79^EXQ6܇7`0qU9SMUSTFi5Q5t, oz$.GP7k_潌V>W':EmroԐ4:\F1j˝2T餦'zMY5eTUj7Z[5b"f&"{thb|#8,EtTMSrzSTGb&h ohlUObF۞Zev;j+]bk4O:Z9m]ʩWg5sU>-9ⲻQi3DlDŽ,iU`-]ƻ""f;4t8Lk(x$ɎTj㬨|ɿi[,j9ۦۢw^>4O' HrT]ZwVB$ݒȽv3b^]gIV;hpL}60nr[DLZq׆'cvlTOLS3@h箠W"7u0.*4JߺGKXm5sRy ֫hmluzwm.*%@744•$m!T8ܻOoqz' NW1tlk~a;=3iq?' T})Z@T5\.'nw$M?'ݎro܉nd.3r|KTRHuH"ܳ~.Mb}G}ψuaX{7}+1؆'rkeSR=wETDjꪈ<~/ʎdTDju.^U5w:{|4kk$AcrU۪'?2Uwbx]%lg Q&jZ*vv"w{zbEڎޟWJ̯⪎)ty+x֍FS-+wףՒ15"nʻjV'_PŬ:WVsӱoduln۫Z:Sur9===$,8b6dq֧:!?}v|}N}ύ̇ o ϭWQF .`_KWZT%j?}͋db.ll*ET2"dqcƢ5D;  c N"C0qUʻg];Gd!iQ[FX()*);]ŝ6*+$$ֵnDNp!Z)X35OVmWGQ,{~x}&Q=+oOQOYO]$lK{7G5SIa֌-n|U1\-|-)ب*Uz/ʈдQcOL՜CZ#t}Lxȩ/5͛k\~~*pCh/wݲA1쎑r;usQSuUoMWk&Z,!Ke܋ l7WJ"eM`0gs_lzwϨL[FM2J#1&rU;{fF75wG"*// :,C31ZSzն;u:eni]-cNʨΉWd]kwG/E詉M3K%S/6D^Jjx6EK.7ÆsvtaOZc,-?Gڣ2p%YT\SԫHTVWͼ"Du2&.oujͭ nJ{:F,LSQf}l6s.^>Us[:+jfFoSUEC5M1Y, B%eOb' E;k+s:{4dӓ`)j= M1Y.7sU]ΊO3Ç]mSG3îWV4!_W9btq\l]Ùʟ1<p;AӪ.ܷU&*F%|>Rr _њ+2|USZir"‹ת\zٵZJB_lӧ^sAgNXʼM_LבeJC>bkSRN+Uo^g_M|I7cVk2Uݑ"&Ȋ:'z`/=*Ң#f%͗11>To(&v4!Ij+ǵn䚉Ywn6m@|EC=t=SieJ#{[9Nw.DKkyfvfeYo*ʒVQG$EQPenD9&:OMm퐎vncsv^98ad6}Pګ^֎F"9S]rT^WntVwXC#dFsW**wӀ-Ji|bwZ&wz#6nɶe&šI[YmvZӶjWwP̎Erh**zvguEoxnb v7맏PP .Wp#1Iq $Jin?M?^ӽW mo4]l j*$TJb潪_j)"b94p*[sdž}ƛ|N}ϸ~-E=_;P=|K,_- q箨c:tYaEt᷇S0@yk۵áue5QXBErƜww÷x jQ=ks_d@o}I~Q~\i$pX.cVk-mt-eխfE2#w.u] [TY}Mk.tm$T*ĐT5F?U7Em 5g~RnN[j߱'N_?:n|qjװok߯.7}USb'53[_[7˅u{=ENUUzy[flZ7}JzFMT_ƀtt9f)5oTzV[%bƋ*}e=ۢz;l QռIp k{lU7s\ݞW3eFjLJTkx/@"^=Z4;X2eT%QrDc9\WuEK@Vxk[43,Z4J#uVĬW;`f{5t*oFv^SF8lu_RD6;m^6rȝ~%q+qnjXijjʫ&܍ .x 87.T,2-:Fsnv"mӼ>S/qIjTy|;߳|mxMwRـO]DHP3"t4Dzu~SZ?܁]AKi]J,k)GV2I}(z'DcrçS8$:?_E&Ix6ז=!Z=[ا27}M.NWYLj4q[nԲRT$lz+o[w*"~8k]Mݲ.3ܫ X\u@x!c/)Qa\hYU3Ǚ̍^Ls|rYixڭd+ڋU1V]_SEYJ i]=;5QZEB E<Ί׋]*i3idX]eNgǠ>*Z})ؓ;r/O[ԌA}Ԉv.ThӺTkȻssZ0Zm4ĩo,X^/M9~DT)~~]Q|Ҟ5C-Zk]=TVx;yb>W25ީU;6Xuw+5%e15|kկUTUjv߲7&a%S)44H%|֪Ʈ쑮cQʛ**u+͖/o]Oy9:fAFת+S~T麢Rol9SEݦߚ4ާ]77t4jqXbf3DդGTӷUdXQom/sښTDN*⛙pEbkgiwG"*9Z]:" eխ[`MX-Đĕ}d.vֱ&v+nvbfj҈ޘkҙӕ1TvΨ5TUWxkq:.* oeRK_5;;-j8_QOaRƱL4Ff˶5s|EmUUj)tݎόЦWto3K  QgN=۩ȭTh!n8!lkUNdG.ʩ yG7N֟_iNFW9(oMT] w(\6gW-5 % kYթXu^;7mdIO$.j9ȞN۪־/4k@r\;Pnܫ-JNŁI*B'?.{+SGxjނu_Ƃp-֒[QO ۺvXT^}T-G~RO/u.)Wg5|Nr'USWn:ƈS(o]F9r32Ɲ6%z TSYGQNN4mkdڵl4K96?_+]WWvUUW5şx:95m**[[ խzt{U5QɷTEa@XfC,ө,xE3Md!~h3_j?4߮@s C8tz&M0?Ë奔ڄ;nDz]W @&y¿ \_.$ESg\otU/sZֺ_̜w4OƥlkV#ui(-|52(o+TFƪ_(RmP3|68dȠb6Tkݒrw)rWٹ\LF#*XbxV}8kl vvt箳\&wbf56hj2]'lByM&[Z젷S۹ϕUŇjkUfNԯWֶ \VUEIvL.pX4>U5\[5;]z$oz*k9.95Fog6x9㦞"vcO`"ҏ&cvjȝ8i:/Ōu\Wi-o5vkpό^7P=I$w1U"uLhx*xg\ÐdT*f]:G3jjTG&]:sn6>wz۫iYKwݦe#eTfecQ]NYqCLZ97D_a򿸙vypΜ])4U7jֽgH~Nx>·X-E__#k'k+dzw1fl6jXsrjDQnqZnߛV넸wI¯cIITii<${[]7F.ʅ{ѿTq •z۵<3NJ?ٯbvj<hװ.YtxUkxONl2Xsv.1UQDVb&ccw׆ c;cng^vWLM2[YMCIsud9NvV^w9S$E^#N8lucnEK*1b9Qn芮U^Uݳ^M|#M7cOS"]-DE{ђl/zrꪥ&*3 N-U]ntLFW-{{ᅚ;IՉfUUExy*jׄD馼'buhZkjV:Ê].ܩm,TGD^%XAV4e*ifKltc%F٪T7% :m:2+}fmuKaeT2ʲr:GX"/L|0527^ϓ33<Ǚ91eM?M7"jɜf=ݪTTSgp?qs߹LngX*8NqɱrXnߦ:[fb"gZx[ p ?>rO"\W2*ml\"姥g+]֧+Q,snSC#_s9jEz*/ dmXb\hUT#Un׹lEUD|LE۪"eu{8Zuڊ^f::Ӛv7 9^+v1Sjb8S LƺƱq5hzk˨ot[YW=ERݔ 7MMӹU K>: ύcG_FOI EmfVAr=Zc쨪i੺fZ=K69Xl k*48fυ.TrUkע#U-y;Es]g>{jMN*"no61LSf*cN1vE 9k-jԪGtܦ'A[̔VѢvK*8'5Eg/"g;wdoztTO{ETD.WA2{Դp̼DQ;䓒F=y_ꪻ*[nSM*b^3=i=\sE˛Asv)wiHZyL s4ޙE?٨U-B3yr1ZN֡nz6lO7k~Kc/ykGgԲ'LٵG5ѵQ쪨vok-ܻL^{|";G`s>4Qvfi"f#U>10}:Y颭b|lN\sT\|_eY2C 4>x2|ܭ|R86vR]!lS^䬝ݜ_nN']օ?oGݞnQ+^EWcu]PGb'*ݦ+LiTOUFΜxc5j2-ʬETTƴUM^LL+(u*n{eduwv(rovDl TUo7G¶}zƬt]nWS$ܑ5>EETEbzJuDږq)]mM!׫Uw|DUDsFފxo%j˯\ծκF=r޿[ukhpzۘvjz&bgZ&|u9:9ь T>7{j+yOTK,I#{UrꪫާF]/hnUijF/_]SoFCKKxi΍c^Uqs*QVNp0^&bbf"ӟDLϭ.3}I K-YQW`ku?e k~X{Վ2\3i,Z9_(ܽ]GO3QZX%t0+*TS}WJg6xZb|z"J7^u@Z)wm4ܕ봨[B6_+I&M,2ƻO=sy\k,nioltqݭ]MSTS 䚙hr_XEEj<kw4Ozx]:S3=F kQE1LGTDi[dsDE/$b9׏"?;u/p:Y:|Z2#VriTsUUw|S!:"SgݿU}oMDrW-3y7Gkm3R趰b%73v%kwUO^r'r70nv 6'?ga06b-[;)S-stU;Rf/źs|6l{P5.FA\($w,Q]vFllX؉۹S_ _/;}4JfLN"*JGQ}gWnûn`Wݓ3OeG%;6zb(}$>W5=ژ]kEFՎ"^6KxU;,~d>}xǡq9~Ys9m7+<4ϚPo:nCJ6(dj.~Nm6*4JIKFmMkCjݷZ<|ts{իYW3ʊ .Dw2^ykn5?،O˳?UG>}^}cDTNzTwkh%VHw9EEhW˧ZyP-$QHf"j~*x4,VTKMꩾ>Yn*O&b2\3*t[TNe.ѓ^R"uۧXxܬɰW؛#56DUJek7d}ߋ_XGn6 Obd\үDڶ6 O9țu؟_\륡UQϨQw;g?oG~ѯZL~Ε hꆝe-gaJ6^_ܨ芽S}m-ݢo[&;'Tc(jꘘD_IjW̑SQws#cUUwbOi=DuU6]>UF*=˲#Jf`/#=gNue7JK"#*a]"\g=/RhꟓU5ULS=SF#7OFڮokuNywǖ?fpp#oY+h$r,r=U[u;os+Mڦ=ڢxSM=QZk1p FnWZGSh]܎E?jDOGWm9~o?61V[Dpwjg͍g L &f"=:zp[鮶ʸaeE<9b{Q{U;QQQ~35}ඬ--YE NuD+cj"+E&}4׍(Oet疒H$r5sStEswDUDg[1o.[=+gIjz9Օ,ߢw"f5uזB\TūHtKjin0$6V=jm=M.n;RZmT,6JykSٺ'wD¶K% ’i$3Fcح^)\iUMpZ:( ak{WtsU**"qt:Ctnͭp?~M 6D_M1IDh=tC̬}}kZ-Ӵ]@MMEeDOL1iƼ{C8Ȳ U}ǽε-3w*٤+tMewCZj5olUjzUdmFFkS"&ȇx\v-U1ZMwjU1ju5FoNDl^*四=TDpO>E2'KTWnOP7ڰ|:>U-鿮Dkc{]9pӮ^,&4^|=`i\8h4uQ 7UPvtOIywD]ߢbuo<>9v9Y};#Q=Uzuꊫ<깟ݽMTY\S+C枣48>>s\d(TT^^lgWF p_gQցUꊽ~ϓsd7U~$m۬[ESRDء5Ƣ5kQ} ^m,Y~i)p\&}MUUM9%W.{xeKo&v4)mIxð_OLC SZ,QӢqs{򪙠5/64˝5#hbxհ5{YojlF p9n;9fOÞcGf_2(Iir%;\9ʊYSd(m8Ў$5maw^{g`*y|MбrUvU {ïKtn5<$kkh>WSd^V1ˀ-U֙fUX^ՔȵUjDVCN5芛qN~[DM 9l$#ȍEvjtj"#Qy*[C29c/k' N5YUtމx~-o>=蚏7/j u_ju_9`-V\ii&r9dor.ʍW"*):NQ3::e<:ցg4{%3t]ݶ"-C\(56gt< SdꝬʨ55W6"*NI3-^8[1j޺D믪?8eYy2V"#vئ4ӄoUMcGӤsDE/$b9׏"?;u:7KL3M#7KL3Mw|23<㬠*A2Wʿ5HN.i| 䨊%YN=KV/O*oكS0xڿ:RmsҮ7SȾ ҦxE[5%;mG,m1P1a0wKaͱ1spO"/*YK"#eޚ5;ԞzA_=Q=E? Wsg2zff?'DS݋u\ӤOR8$ajs*ꩲvTzKwn7k-h_h㤸VCQSO9 b9Y 28 UNNyڼ>ۢ)[\NUӔ 63.*إ䪻RNZ$oHW1|io^t?DK{x[$s%CEEB}C$We/{,g,{iSej [ݫ[2L:xmG=f$hOՑ1ʭM6HONҜEiu]{:tR6̎f$ncvjdTLtQls. 5&/KIItQFmVcX&1UW7x՛ 9)zkr5G*9#kbT"+x\ RkliU 41UVKM+'F["#v CQɦl(o4oٶtj9$uUދ"*9.0#L)!.F“*RHnըj{;V&ꨄk[++f5&|i\o̭Oj&}@!X%d]"/TyHI2FԑFuTME_bnҧ5$W7~ʩ]WMUijՈһmUۑ[y7GMOW ?hDNTEvjR#ej^%ǽKS?l9]49UXrڷCdHM/"*RUQ#ތW5{ZjuGz~P8~5~E:s5zeWU]}!1f'y36.U4]f+ݘKks.yOgUԤ*j,3D{$cvNN{~_:ۮqXD #-RUYM*'F5.bn'iEy]U#^\` e,oڦ5f:yu>oUoM5pO)NG#%ce{湫*/r ۀ Z7> ˺;etĝQ}'sl\g^77|W=ƾ 7oު)$Wb+.Ȉs"lrӬwTWV@t{r#r/"NW*z_/Nm-ᮑarg'2wSWm㏢%|2ZŽaUMnYET)b8OәWf{d9~kq^6tE&'SryDN1:c2kN435O 8Osn|>L_뒖~Mz&/KH}6[sdž}ƛ|N}ϸ~-E=_;P=QZ~iI?AfK+rɣG*]تfaq7P8SԫBeKw~n҉R"2*DUUHJ]3ŲI%IU2*J蚲5WdQȫuCG/.M?MvBld6lQ's gP)TE%Q:$;*kV{nmO5uXmms'2ު9}GOraSS4=zO>B4zեIjVmX ף6޻M/4Z̺үgdB9WuGu I=N 7e&EI43j~2*_Kt 3zݝqiRh L^^͎ȱNM6Oίx<+U.nTS"#˩;M:[y@#mw}l`FZn<5EtdDE]CUC%nX@*SQ}ĚmkXEv?)fm-fwm\]"nƵQ3=sໍʫ_=5S:i;ÿi&;Xx2z'Ԭ6&ƓTSJF;G+^_z#UZh3mݣ-f81EVy3MƱ]t}-p8J^3kuPV]U;ϭW^(L`+`s]Mr"C8_fAVM>Jo-Ken'VUnW.58ѝF)02Awm|uMemG#Mۢlmv7|[.#j+m X)Tkvj .-%afs=s8/\igHjtv׌k<խ*>m6k1]5cиJٶmUr"^k޸]pFWoEEW%ULUQu䕩DDO}yh-oIq$:igV'I]jlp%ޟRZ^Y-bw"|}536V6~}qOu(^޿nTk΀=^ຫ#W*5]=I=ϸ'o {_+U];ZBJM%g8 }lu]RBTcdlQʜ_ D;5Dk]O}g:ǝȱ?T70s> EX.YVIjUDlq1^]W5|y<17 W;U^]%DlndTTDEEe n*w,*ϤxԱ<^G7iQUw$z-v5ilokh~ΞmfkQ8|g\qTSFsf?Ҁn'~x[&;${?29@h_#@,xE3Md!~i<"~?j|{5o 9_}M:e$L19#y,u1=]3ZcZ*k%T!qJ=#i~R9׏"?1k̏ K_5>F=U7;=4C :gr-3y7vm|-4*S+{T_b;zU}QvXSB/^W"~R^fᎲlt-HS*aӴbSwD.){#G5Ȩn582Ov9UO Y2z> r"~U1ٯɞcf%iX~AhUߟ|OTN6۪E|վ dk,v;^YM3Ӫ+6zz;'yi9V |^%LOTU]'wGu0f45ɞyV5cbH֫~2q*k飬f'r~'^0L<PPn;yI:pfյW|SvP/#}*s;ލr"]É陠ٮ!MEJ6m0m4(SU$_uj|;XjWb5ڟ̾ªB"6Ub}̍.Ů7$],Us#T~oc~$]GsEVy\kK}QEdXznW7DVm/"N#oD yAl>ʭǩj{.Dj5rZʎd{v.C,|7pPdd58$X:bHEGMcg,W#;RsE#0%}]ӍMкDfW&1u,\otu7>w+\EtN&3N&-kٗS\q6͕&ѣ]/yKr)`!`kk2hZd(FljV/L{g&]"ǫ_IQ+\FN9XWv*N`ΖZdӭ+,UyCTֺċ4 5[+V.EZ^1.پ?CO&³mW=S]U^gȋV1z*r".˿T@?6حwCj*VMY;ac؊DW9WdC$iiO2.v%,95/ƛUpi2j8L-jjwm5ْFCZjz#$%ǽKS?l*?µcޥ6[{ZCi-}$+tOPIMS &f,rG#Q{U6V/EENS|eh+wZ=vTjS6]߯t*˯Un=|)# cNbPϩ Ew񅊒8"l.XWm!Ưk*Rj2xƢI-$^VEGƻX+f<T*adnf=]O /&犯!UUGgʧ*yD41sGS7cow[HRS9Ȋ+w]UN}/ 5Rgmtg"슉r_o{h{l%v[KplEzMz^b^fFɦ}zi颸͗yQh+WG[.켎_Xw&Y^UG+"6gwtE{%gǺto#µ9X][Enh#}Ȟ8\#5ǯ].DY>qj=d/Lo'l?zOcO=nj[UW3Vm{~wN{|C\Fo\>]yUWJLHzHl >gvue^9Uz綊u{g>kj뼊ˏeGȰUղ8܊^T:}q^U-i:Az̾nZH)y6UHXm7[. ,\kI k,GSekZݽn(":}WxnlV#c b7w/`ut/U~lꩍjO 57;>>Uy>^(AY..ҊK-K?V9?T0>tOsNmjUujS[ULVFʮF@.˲N/ϳ,6 TQsQ4"kLL˱k6zSY}MƚMo8lcٶcel6{ p˧ ]ubڛQA,9cc˳;82ovۮ_VWE\f*تb&ߎ|YkMU!\mլ)jcI#+Wz"Q'+d^rk;$z͡XXZ\65wߪU"xOl׷GN hwm,lekU7 D׷F5wMdr~†SKe׬2Sq,lIU$ͼ"t:9׎Q; ~sy^hv9T d,dIGKP[=li/>ZkޫT5ʙlt ꛺ʯjw-Ʋh8XeM;]I"E[JeE+6rʊ97j(&|P]}otOD7]cjbwqkxff8L N*MmLonVMn]'X4>vG$p6)j%o+ݶWm+Tn׊k7YKD-uI*1d"~96״]*.QYQ](dIedH z+SeDRš^ǽ]W0TM鈦#NtO.&TU11k[ӌ ϦUTOoF|G=^ekZ]ܽN5W[WՉoR]{Kڪ6߶99m;>${ z#zk:g^n`i艊yvhf8~;cUYme}eQ["QQQZr"kwHVen_W~ǷW'^~ǻr2*b*M\f"xkפǶ8r62bES f/[q jՊYDCL?LF7|WdNLIrwkγ3\.bcx.tk?k, LkEXG,^fc7rDM&Ň*˯Un:O8SX,bݿLU.cJkruN@2jCpj5H۶;U{:+TslTL Li"֡yj:4AQ %=|Hȭotw&EڧnQXw/g 69yvk&c,M⭾يW9?_Dw<=6a.OʎyڀOY:UPsYqX܉#d*F66~wtj*Ecxkk13z180sFfۭNd:y;BoyQ_YPw9E].Fc8cEYڪ1nM*zz&܈o.Ug#nٝf{܏zu͎{?29@h_#@,xE3Md!~i<"~?ju=?p:jAZjosh7ʜۢTT8Ob̋MJYh.ڹ5$DNXk=;sSG F$Rɩ5kXNݳr9[rpUug6 [/֚gJ*:lB-j 6GDp^oj8V+ {vl_vr ah૊c'O pDKsscKq%\*DtvtNvMͧ?{NG$ dg)w{۽|)-WlD*=$o+ݺw9{cDMUrt']f7Ji:׺#|2m v{ TUvbuݟڙǜ:c?Vx[KQQ7U]N_ʀùꆑbkmCJ"$*홺^.nzͼEj:=q}q8Z1c"~bȨ=+&WGʪʋ5'NNaٮ71{u']>'xkؿ*n}Y5amRKU=†}ᙻܨTTPvǙU$l+w]Dʼlwe915 *=}#vc Ez{ySr鞩bڧae啾UNޙ"w9:/˺&I,޷~juyK7bhktp p$q{5ɺ9_Zi}S#L9h1GS""&ƕq|^)Ep'+j 6r.1Iz쉵V(Uq$[EQplMW+iY*+ f7>5F+ʨ!-Jz)?aNZ"v׿˺I%c{5ɺ*/z*N;AKbqaƵt4mܯFlv ?9(kӺHj:ūVii_~#'/k=?ћ ;nnT`0:ܳY]/lel+516rdj")/zeau. o4sBeb%DoVvd bV V8(,tP@uVo2\n⪪S,)!kF}evczܩ߿EYb'4dq^9vFuU_aFx]N^s+Co|D2=Nn@q纏 ~ISbdGpTH|tQ\⧑&ȩjv][hNf8u|Β❝tQ4flj.ՊʭbL\aoϟid=+$lkfscuB&;fz+Ulq21keCPҮ&[ʏ{ֱm"1Q,>q{ u);ODn8${vz[3QM64+Ҥ=sʩ쯑~6l!k͖KHꖥ#\k؊*W6\F{KqY*yS<9Txӷ"\wXF"&ꯃ)}djRLY$3F:ekzvR{W؆S;3SKj雙}vۮ':}JFːOiꈑ\iۮ^hӿq$Xls&Jvo»t,mEUStskc~&U0c\,U3IdSX xfZoX=tڗDZBIG5ɺ/TktTU؈Jףq &ifq S\jȩ|U3X!Ʒ2hSyqe̿ċd8Z:X9W^JMz&/KHU&䥤>r |N}ϸ~oϳo֜i`kN3K4"~hܴvk^ȚM=_mTZP-}slnc GO'v/{djq+جv<֍w˾aqWw^N7]ڪLo0ɱ燭_< ҹĽښzUGȋtWldN_|5+OwclQ]_#F5Rgl;drlr#lk\۔ٞ͞S~q UW_y\{㡦6=\L*sPyAz3E3b'#ު:I>2VOhD1>&8éXon.ڝi`/ hƭw uNw6'zo܇(QE2SsLsOq{ac[bYeʛ~ՌO{fUQ߃e鸸 kANlFTf;7j|Zh'/3{njݣKjRr;#mLj:lwGwujȎ vvkGOMGҟ˯^s?YǮz Hs7ʁhFto-~Ќ5YUSXfC,:f~U4մS=*d>]KY+CZCV4H2}Mr~p[ꇬ~h?h+_w?}u{Ͼ~?R8-CV4}P?;L>Sk\o?h+_>zZO]^冀߮OԷ }P?T=gkNS'y'[GzZ揪Vz@Wmw-#T=gkGY+CZӽ |TC}6oVꇬ~iސ>*d>]KY+CZCV4H2}Mr~p[ꇬ~h?h+_w-ځ~>Sx[+Y@E|?5v>"D_ft;j9-b.u*f"~g>.BP?T=gkNL>wSk\o?h+_>zZO]^冀߮OԷ }P?T=gkNS'y'[GzZ揪Vz@Wmw-#T=gkGY+CZӽ |TC}6oVꇬ~iސ>*d>]KY+CZCV4H2}Mr~p[ꇬ~h?h+_w?}u{Ͼ~?R8-CV4}P?;L>Sk\o?h+_>zZO]^冀߮OԷ }P?T=gkNS'y'[GzZ揪Vz@Wmw-#T=gkGY+CZӽ |TC}6oVꇬ~iސ>*d>]K:y)%3U۪(ע)cOZ$ $H(]I]yV[.ʛS~gfob5},Lgm[Wi?OPtx D2mnՒ<}#Z(TMS$"n'"*o"l_H 18j5jlֵ:""'rcHBꪪꚪf@W ˝$UTQ)HMފ LDƒ5MS:L*hSyWI*#'4FTUc@=7G֍%W,>wHҵ]w{ùzEQ&IV=j+\M AFZY% E.TkVxCܗ PȪQKu~2 ܽ2xZVtꊼt7ozi% JdDDOb McD(d>4l+;nQr;biWq;e&2tIvLXr[]*S}Ѫ_a+;Rʳc9m'+Ѷ9:-g^ч^#{q]IX᧼TDNG1vzyf{i'O{=p91r\~KVSu%DJjUڛ/TYj4fmt#QQyjhlȮN;tEt}*gfW!fйmA[-nrݩ]nPxwgmݾ+ʷe~M6h/ \e5=q7=Pm٨z~FmVNUg٨z٫ܾo=2~=u_jEJ.gSdwkI항SdEW"1UQ7o6EBi ,z^U`mC"1iݒ9/֞vʭD$,v6?lɫi/Q˅M<}ST%{Uʉd5s|K ,(qK۝;'~ɺnɺng|& rI1:v<ܭ{zſNtD]s,E&i֪c]=e̿ pX.N1N>j<꒮:)j>9bz=j*9:*|sL8ҺIפRw}<.wT_nй:Q.L9|rzVکSIz&"#UMfUx\KuyOOtsIvg߷6|11\&z~Un=tkI SǽKS?l*?µOn쩤_˧϶,| .0 ӒaW[we [gfn˻iZjqU[l!ִUVEvw&6]؜s|Nc"S4[trj1yzl\ގ<#WBViEŕCٞ5Kk$UT2=i_Uc~y"9eN?fshݯMcLLO)q}Ongo؝c#Tbß(i *EW.:$p<]ѿ)'^+ьv1'xqV7Wyjo3FFWȪW1?|Fv]blU<]ʹDt>efxksvLt|V;w%N X.ԑEs0뢫uMs 11ʡqe̿Bk{s/;1"ֿgWRGމsrRoɣD)i\*f<34߭8[sdž}ƛo/)Q")J;~il{1|8Xj$DȝDΧomx*fM I#7#nENw,vkuaEjW)hjsZ#Ův˲/CFnSsS4׌Y%it,Rm3f7~͎F' Ѳӊџa7;ijLΓ1 ~n<*1;S(<a3рb7|/% R+Scsj' YwǴY7.eI~OJw:tߪ5#r$kW!K7Q8z Ė;Wv>aDa&}>^ؑħ XκM+*%)ycV+ j ܊˶c#7o"!lAbYoƎVt^EV.uED i5?zNQG"n Sҍ.˶ʊlV]x۔Ggm=q>i幭myt0FyNq;'Y!kN9Z\NMҮ6*";j& 3 4ըE%tHqlN¶SG卧eMQ dcTQQ{PT=^@oJ{:HS*{)=yQ:]t^E,~j^?X9JGJY|X/N-Cvs7E42Q{Pa8#vY ȅL^|[ͶO ]%Lw*Y[435gT]T>߸ܣOnNʟ"1W& |f\2vlq*g!z=mT_#{6UwUEDlSSM_W~]q{uGr*p6u}ӲT~$Dq1BC_UOI{[s"{7 lM?'_IS[_r3ݭ*+zCJ"NϿjj6~mcJOrdX?P2h;J09}iL_뒖Ȁ9c>MӋHU'>xgiZpQ"/( vϯb:[g.tjWBj/3koOIȩhgUQTLLO)0Eu[+tS=6 aVcT$SKʜ?nʭ_~Cf"e9vi\et8M̞3UDYN9+,OkG5]Q{nJua^nz:S林T3*s,Djw5UTE麢;cgctK{8[qfM1""";84uUʷg@T|5B3G3|mk`ti2ڭ:rDŽ_4OgV0<)0aZf\ٱ\uꨉr:u6 7%2WUJwb>>6r5ntXVj?E; lmOspS9ȈGWmd7[}6ܴ#vӻ]g-:B׫:*9TEN5wƮʦ9β HizL队8<kgzevv3YU3+]6V֪xGwrSwTEJU_u'BfTRYn2,lU"]^^W"&Lu[u^,fZxN]/1Ph WtUuKѨjF2. ҝ#imYk>ΓOrkeoEE6bsz(Gmv>%ULI6Ne輼MM`a-Uz[<5dx1]7McX/ kdo?uZ:wjDWl_'`co=ҮocDq&:#Grcޥ6[{ZBRr~ Go!|4>ʛ~abuDqMw(vJm8Y7eY#zrE3Ӣ-r9rlȋk6_6-'qln_n&zѾZ08%v,RjY\TUb=z5nDE7c]t[sZJ@:Tntj9rtE詹"g~>9W;\-8r]%7#sN s7%dRrHEs%ȭyʩ˶8oN iͷ]'w ~癥q3wgX4<4Z0S]q=ЮpZxvIyZo2$6DF{*"m̈T6?Ye&mo߮[zԫ[QEjTS'( rqU7wuמp]4Ȝ U6{]uB:6j>/y,e^ƒXz.Qc{խ$5̠͖VTCQL沢V>79DUEsWtTTr+T߫[oȈӗ#"Woz/>1}E%m ͒Z;DZvHbݑz쨍:&.yfiT1ݱHm3FewOc# ;Wa5Sv7{N[t퇊-vViQ܂4}+쪗6W_nw)gg 7Fk#cwcܞ7MN`:hfp=}umڊ湪rtVrm$`[پ'FgwTߣ5k#3Ô=r 53ܞpt-7Γ<߲ ҁ*!oirY*+' \9ݛiFv>6b c !5칗BCo{.eg~Ц$[#a~"*JVh1JZB4}蘿?%-!K[sdž}ƛ|N}ϸ~-E=_;P=fv;xm_hʩVNro\+UQQQxewjSuH,jj5 3 Q:+vyk`@if甒>grT[H`wg'Qjʋ}V_VM]G1Ȳ<&(TE>gG,bM}] 9@h_#:ds7ʁhFLXfC,ө,xE3Md!~h3xԋ CcctaRK Lߝț>6HڼbMT53rocW:qs3DDZIn:N]Q=l>׷t}Z=YP<ǵ<KA>U("ѱMתQ=:nIU,,5>;UriUW#Q!q(tJ;<g5U 6<*rcԘqֱGbTG/'ܦ'j|t-ąP{dqcwǮ=$"wvn]jӵ{NuOO.dE\I-Z*+#T|"5ld53'rٶ}!]VxGPTNvWn"58،*? b"?W=ߋ҈RTp~=/Zjat;['R:9+ޛseaՍQP@C|:!,X}37uN۪$qyQʊm.졧mtвJ;W9bZ{( uj [S.@ =R99{86/39%n+˽ sw 3glgL'/ݺ$.M[B &,Ѿ7$7 |ʝ oN^:9="6?zŧogַOp.ۃ$66G6yj[m4kWȴr()r r+*cF2 Q_;Y\舲M䊵Go]jGdx}5[粲&SI#YlDTWn-L'鰪{ z~Un=tkIGOiTg >Pt`8_w bo!5칗Bl_K3Ы)[ɣD)i b\\.Doϳo֜ZB9c>MӀy@;- pj-*guʤMQծEv]8;@s "Z&K Evrlu+G*7_o]Gfck_% I`vN7g1^*I} 8\uETEEE2EiF6u4Gژ^׹Ůr^*U|dj$N ં:YG$nG5T:+wr+U7oqm5iʺgQ1LPY<,kX/537 Ʃ{zzW;4j9s>G5kk1Qn0pI^\ƻ#2pʻ#B0h֗iUp3‘ =>֯%=;6DW\lyW^<;Fq]CtC̭JDsk/'UBi zZx=˨bTZ`VsD߮6W9M9qZE=N^{MQ&>9vcQsXrʛcRŞl"#6U]OCa\kmꆮұ>ZUz*%j.W"*.ۢ4lXqLGSq],pfPIjXZRI"HEI6j5Qyq N|&M܌e"U^k#WlmEV({}/o݋<.ו99ovscޥ6[{ZBRr~ Go!4>ʒ]~ab\LM!Ʒ2hS-0?}k`^{%+y4}蘿?%-!V>L_뒖Ȁ9c>MӋHU'>xgiZpQ"/(x(dF^&sWM;\O5ZڞY<ֿk-mݖ"wTo,inX1q,>cb6.LLk-]qr1 7Nu"[jNHLXYZiU z]ܝZ*dѫ::fS:4m]zLk^:/%~*8sWϧˬ6FÐӧs*'-O+vN]lmCm_zZ}}iL6?l{ 0qV7Y$v56 ^g_ 6jQ;4X^>6V뎊bvX7U]!|V L K=L%#cqQvWOPs#cW9lN*ɱe֪_TG.ٞQ1 |F*~;)nZSe5:e#Y9ux0kOGEo4j*sz"KoetΚ'Z]qΛ7||Kջ(GkУܭNUUUUU^\μevvTs3!/k[ƀ納k\jr"~~L0yݛqϟKU].vm{=Q:Uq~et%=ݝ,n}۪yvnzf;m_oM8Læ [i!'ֽ+ڦ3<܋ gSO#V:9|b5"KUMkUFm_3=` .6tR!Ugbt\w}3UjjxO rAA.e؆Si)VRFGbo.Er*&orM˕^ L)e67GpwEWrviMU+8$ ,vQp*[ؾ(wJf*"W}Ziޖe^j ~j ]顖h dXQGY_Uٻ+OM饊BJ+m;J+Uy}i&B?P姥M>=u{ZصO79abUW57 Oq[yyAVg3| k^yUvUD%xx,@/8{骘ݛ,֦l55=TDnid=b==lXgZ,Okr57jTj9֪Eߠ W;?Mǘ\B̎ER={RIWfF]u^]m+MWĪ0̲*h*%, E"ucD:쪞&O)i⦉9d=Qds\jg9?MV#GVL_뒖~Mz&/KH}@V1qiŤ*f<34߭8 ~yOG@#VE5j}MӋ-yb[p}3cܑԳiXvGmE+u˃ u@99;FGV6$tZcSTrmkcSr'cw V<5:Xe+C2>Z.tPer5Ĩ]ʎW=$E]kj#7=OxQ Nn]m""ȊwM,QO6VMEEEBN dr %Es_mk/ʎ%3Y"?{ӹULlx Xz>U>avkH\z'h ݼp:<|^摈ܑ7*x*MYgtpUʜܱ5"jo_v,Ƴnw3M՝W5SlN4,/pݽ.ukwvNTDEz.n6:ŕW(#QT_܌b<m^|$uTO˫sb`/MNd>ke:"o&EJj~uw}WdN~ iU?ݬA.]Q殾EnR7zRx/&Я,µI3UeԪ&#iX[ez]xᄘ愈կf˨ٯtO&Jp^ukO4)l.]f9ۢ9Q{Sm^N ٍMdםJڬkfKtZx&9QUC:/[>"?-jD}ơ x7)KD5SU lEU;ʈ܍Hު߶ݩCq۞\U{+W+j+kʩW-f \`WIfUJ47=wZwәS{pVfg]rɻvffu陞r -7>% ML{%G>U"B^ʽzn =ST:?.YUIcQ<骙*zbbah٪%ݧjnW1nꨍMԫQg ti]0*W] ƭrṟ5ǪukYX哕C,7.Qfqۥ5pSHX׹rwx^%8~s*>XGMU;2U\߽QUUJfN=u2 zfoUĽZ\SV8QwETm|yثCio~Cv5QE;$u( GzS㺉ʴUW7tɳب+gB1֞K*t*ا:ethf:W9kQГu ԍ?tPzlWjv1[MӋHU'>xgiZpQ"/(jO<=PuoUQ2kjO>_YOW{RSZʩ̕0RgҪUd<]6z;WmYr3TDk> qypYg"urCɶ6^_~9y/[bjZDSs}{W&y;}ݝα# _7}:,m&q4|W5Fn|&cws┬٫k_3>_ČM~>`s:m YvҫETU "}ioU3־hѳwNEۼFmn=;+a׆W4ҨuOLRY^wDEfyt;oj=v۱󤂱ʼ'M2.rd߻c::Ivn]i'֓@o-~Ќ*[C29c/k' N5YUt9. tHhox)yw;Ds}ZEߡZiHe8a碘WG\EniƟc2Ly(X|W+vrj/7ETj즗-T]EFI/lVg+ͽ$jlnI[NO4Xpr J֮UY*ܫwwnEUM+G.ɧ\οsv"z}4oO8S]o%1JYs^=DkIcTTY1. -ue5 Qn]WPyC]xƈ:Qjnݢ;tuTS *VjF6h~dvj lPhg#ި׹7V_OdEvinAˏa4XeCykU_etm:*Q+HnQp+eꦍL2f};F+\uU۪9`y(쨚%kbYWnT{Z"/+W9N?]_7fUv5}*кVaX.wkYP#U4{g'?Fz]-(6k\*#8W|oo)vQno5V kcL:gG5}wyW_:blɨxh*TDv"vSk*P6pNjܜTY<+v{?;L7:-Ҽ=E[pէEN~$Q6F4ƜgؼU<]qG|t?DWj|q8&S ٲ4Wْns$'z#訽ʊas  ;9;Cx~UD_1޳sU=3qO/Z=-Wnj䱭m.輱K$)܊n7=iE|3~~mzn{ާ*c?=6MMMMx\!rUUdE_B +/a55}U.Z-RoTW>z;eeEDWFSN<jƛꭱm=wtXi#_E6wڝi6TeZ}TewTG4Msy[3g$j=XFlNJ I<gŬaWJvdG }dF&'cz۫M'Н֕mLci{%M'*&f+cYVg~h5UT'/r5UW"I#ϋ^ert*dMݪQg'_E9ox>XKfk]+m;#ok*uZm irֆxsx^ZPBo21Uʾvk<k[kJj"n# ^nokyxWQU1b&^EG" K+.-MTjP*mw^eW*"$cSdyh4ےbO #j:f+u%(DF5D+܈dV'nn&u,QO6VMEEEB9lELՎUStүNz|fusDs+5c&2$TG=nW5z*j&&m/k)5>NMյOHUұ^[1[)<==Y`zʻu_jKZu)wmz']2e|}H.7fjU}TL&}mQ¸7cy~4A6rcDs\QH!?`-jb~9- j*މ66׮Qջ"~6םU%5M7tvnUW:xGsz3 <,E]tܩ'Uz"zxTڝ4yAnNפujL瀼?{i:aoPit ʋI]}oV;ʻw'ғʵ 1vy EMȹ[颯WrAO_*=|QIQs#wm*ʩ=KS +9ZUUXc۶bM۶zns?+j>gkU*4\q~ϻk.5yȎ]DH{QSN7CO6 _mN[WXݑQQ$V&ȝ>$]׸ϳ'W㱴u>کYkɵ>>XVF@uC7yt5|uګhXI-bxw, z/z|~5[\.*y*M➜_)&^KS$H써gs+~ h]u}n_ۈr|3jrÏٲ;eIM<*/#}M]DW:4OE^"s*"cz"5vnmӢxg3Q1F=e'LͼRJ{}mZI Jh0F5"|.S|SbnLtTϪ4|, =|nY[8Q}Eʝezܒg21J4r"RTb.ʼޖ?hD05e!h_n_bYJvɻ7Ea08\߉[=TSѧz귮U3=biÉX-ku;Zhi mFQ~V7e29c>MӋHU'>xgiZpQ"/(19NWạ.Q-V=M]\qW*/DX;WF1e1_T9$D]ck]ywjFMQpcid\ r1Xs_EK"#۹e[ͺBV#d5.sɶY.V~MUNUlloڷUDj13;uGK2M.QNp4sgj}­N} 79/i2"9訍Tr=!,\ufgͭ\YJ{QWȶQ"9umOxDnw#ݣ^2EXu>}-GM8bk,)vLwM߲؆vo\f{x|-%/DSOTDDzm|4G1kíJj H\"n n5o[b{'EF.+b&=mg~i ѻHt.dZJ7#V$|b*m*&n6b3 YddjXv$c޲B#6Fs\6bto ۪x;q5amU>ϲaS4U8 ]ٻm$oz# `鹞yFugM#tMeyc\U6sn}#UWf=mDzGRYp)t~η*6;3;S<1ڽc-QdjēVa5s^TT2&fP=Ι{? <"~?jqY?eZL[{LR/[.!a~%#iC,wޞ&F=E_K\_B2-Gh.Ρ[u^~hyʋ٪?ngn\ G:-Q~=9Nz5}oWZȻ2e9Ն 1W,?޺Z~wh\=1kZYqRkkU["Wn.ۮԹLQ^/ڵºF5ΟusX|_|}{X?i~sVb/2^/w+db5ީRF1pnqg,XoXuLUӪc zSǖoѶ)dh[5}U i"-۞SC]rޤ}5nVTFEM@uֽ떘i2es2ԳTR,뷫/];NKn3fK2[.d*t^Nӓt輻8_(gQ{u+\ܼEmpϨa]74ïLvZ;^H,Vx\%=]j*"nhs޶:_QSQ3ёs興BxFG4/m:qReIi 6kV75r*+^DD;Kګ حbkpuDM"}4H":xUQͨ8 HSvF?3R7dEwEk~>Q;ըa` 93)s,&lrq j/6ejc:'ƪG3z&(uK[֗S(hnb#ν:UqE?;%Ͱ5i::#hLDeHVDroE4 &M8{?55KU=KpԬioNdGtzy6j^ i5rÐ.Ma~#uNmusG/j./$WU6rxNyOmvun 塧rֲT{ɲz9薘Z8lՎ$&>eerP%wK=$ǷuSnb8SQtmjf]iK+-3cU>k =I؝:.ѲlVLJFFe^I;d꫷w*kFE5W`8o[EU=g.SHw396MT-#fԼfZmgSEG;U2:6UuDTNdUH4I,4+ b9񪢫SwNpLjM1=PAW\4]gk^s(F5l_8mK]wQZqNƭj%j#VM^uW&ꪤMƶ?wwvf2٫Qbr9GwJ?5pa} ҦسKUL09%s_"9ɾb;2&8_c]DRL{Qd.Lu$cR&d=9YŖa<3Gviiܝ:mHU$=ngo$ hĸ;̨+5|ǣٜTMm\nP%rkgm*]vI:@̀oϳo֜ZB9c>MӀy@P d2h_ϕ1&Tz*+_QoNf~S˔Zkt^f3UUNR|_&3L5 -r{Z䧏eޓZJG۵+]#/v:Kf#J[5tO*eeTW\DD]?{9]E~rSW"*3U&7n L𧯦}v͗ qOͧ(&Al8ڌckQ6DDNCa4E<pIKUsC39#潪+UEE?@W1KOá9s./iT= Jkṉnz-Dz*m9uˋ:.N;W]YL}&wv_MtF 3}9"ge{gh423uk_wO|QeVWfb.u;;Q2*eQ\4͟Ww7US^6|KCOj;#k(2TFܨEܫ՛uěrtN=.+~5ʬަi8LO {ZfP=Ι{? <"~?jqY?eZL5Efs& {fMAPo-Û۶7~ǽjFȈjX"irZ~kl;%U;r2S.芛7^xxVc 3Y|(k6}}y&b5nUzﭺ76h#atϮ|**&Z꺙ɴ|ȭDG&ꊜȻ#)/U-/٬W:;CU航;_[ʭZl .+s=SQhdU[]Kc?=1QWoBW*: $9?D =ZףGVK}Ķy7YZ-HϔR5ԮtwTjȲ/2r4.s],Up69Sޤ׵ݝ}t^*]YV"&|5iozeZ*v/GOQEXFNIsD^frgK7,[J~Z`TV&ƹ6M̞ L֚ڨISJv1sEw#ھ}<EgZڣK{L zK57jFl̜o*;UJ@4slP{iwy}[1W7~UWshEZî܎u2UQV5UW]Ws;eo2.Z$s+gR7bt]]uUT_rt-8eq%E{Ao$1iF~dV˿]?BwJk~aՒ,ZZz=SeVD9S6NW~3y}+᧠;SQOYޫDە_m1VgWdU&-To̞*2T^*-j[]rvƳ?Ne+mn1woRTZ]D{jO="jnZ؍[ڛql펚9rs>zVgYS]y75Z.GG1gb"+-*'DV|!-HEO}g ]C"UWHEssZٽ"Ĥ޾C25{Ӿ5IWE8F-7'޲GBRz*HDٌG9QʊR}񑨚b:i6woc[|^JȒ+6rUNf7ԟ4RJӪzq{lw5.zF#dl{:'][sdž}ƛ|N}ϸ~-E=_;P=~UUT4VEOOOi^dljn9UWS\8| >EY_kU-x7.UrQvA*I˾Ȱ 2v9tˇ+uTH2뻜b,(DZӝwժMgYuTϫ{WICmJ޵ ]=uKYUvs80rudvJAf/^Q7ig4gX->Sȕ5䨢oV**'XN8]o?qY>sO-B%oDT^DOImS5}D!lj9WiD2j*:9X9uEn&*w8۟Wl}Nl,F߳?dw: nI-ڣRiѫ\iZck]:x9計DK2tJj銩b_=ܷ]Ҩ&'Lts7ʁhFto-~Ќ5YUSXfC,:f F_tNJ^o7-PTNvt*/qR8f^5q>44Ȯh xIlG56j [pV婡k֦Xt\:**.˺w;VCZm.Tjhr(j5FG+e rDT:s/76wJͦ޴[Ts>Vy%;wiYP߲17UDggIWeY5m诪e Ъ5Nbݢ@hM"QYvEVF"oܞD]sZs\**njVb7/-]sHH=j'3StV*n)X:ls-K1Si-]Xv1r5ܭEr&뺙{I26RmsKU\HϮ9jTU$M4i~y5Tѫb[)v-^vU;G:7ӳ[\t/xv7Ij [ +vO6MWmUBApMNc9{nQjuόY+Y  lȊ~{9\n"kѫzuOCq%PkDoTEٻ*'yuRuEq \ +Vv)i+\tBEEjeF=y1wrsLEUTRG~om9 s#y]ʮDMT벁&c,MH ݵ;#^k;^tf~Z18^&r1utF\N{<['4?O\UG45O8݊p`CP,ay 'kK*;wDT jY6S4&&4؞@]ƝxڭsWTUB㵗uҦqY_%d}[6 w.Akj--4Qڎ3vkZFwݾTjlrpƞ޺|Lv?9s.xq_ͮ;i:gUc=|:ӀfZU_R\rXiQpѪw/"mb}V b'xv9N/WOY"9!QgsSm8pcj= R%evV[(;FȨW+sܩWiu~/ _r+֢6n^Hg.9OU\ eŸQl[jRct]풯/Ӯɳj#}76 I0Ka}\um*+ ٭۪ [sdž}ƛ|N}ϸ~-E=_;P=UU5 4յSfWrDDEUU4b5\>LT2H6Ȑ@GK=Tꊭv^VSڗ̗P.Ih1Z eWdEF5:̸m zt\6NMF=s=Xd[kxcƩ"ȍTsnͪucU赣(u/ZFj(U2nwک΍V64kOGeWen[68(#H&5;>;h8ݳSv[D\~ߓ:Ǎ16cj+u;Z䦅o'U꫺U|LުutZ(4/@2- ʝ"lE9eөjgZgIynS4WKK1MC'V/sf[=C]hʊUg5J뤷縒:[t)%js\F:snt[A M-vS+ણ'v >٪-V:)GfjS8Wv^+iNK69^U©o#ֽver!/ټb~vx|wɼ"WG|-du/zՊq[¿@Y4%HjTt'DL0cX8fjAU j]dU=UU^]#Wywؖ4:q]?C\ke[DoCZ:59TUTw2/(I-ͰY-nbt[V}tmgh]bs*鱐Z-weUnD*ijblMg1r*9;HIxFнQ:{`}L~證誜Ȝʊ]y 7,K5^W:E6Hh%VKTTG&UED]Q)UFmPT{ kH UkQQ~T{d+nOcnWwNrlދਊT/ܸqK3rXn1;$^d"/7+o#7?&;Gnk6?>!1#K}™g|9V\N֫Drx56R9Mp4C/~hԝ4~̨K~E쑍DQt:Lu3 ը ٷ-. Q9^"_ڽZj*/t0s鎩|#gs'5tL}KiU_(N\+<.S/eHy舵 TjF$rzZB]U~?F z.A dݬ~s{̱U{ܢ1wYkòь(߫$UTMX淬P#s}/0O xukL{?29@h_#@,xE3Md!~i<"~?jG"z)1*Z4]ծߒؑ}U{USuo72"ʲHȘe{X"r숉ުϱ]Hɐ6㬨F>H^r{MQQ~"NbvM341:pt'N::Ǯtc,1\w nO{#kW"*;#U$[8ᶮCQW{,^mYbB~^^ȝWG-V&MdbIq}55{8㩑\?UʍkȭiLIvJj =m+y%-GgBt"#_*QUU-%8+z"vN!-[Y{;tr X#QQcQ6DDN19QvTD۪+Zx-TvǸp})"DOk+Uvs"6T^ɹd/\YskW/I^2^"l A{bfZMvlQ#{>Q4Jw4쪩ȝ:$ǔk1g%fX\(`YcW)Sγ 6EGowy1dӤ0UAHjѮ+Ȟ)+'#@) UdY%!>zT1Xꊇ'r"dw9\=ʫ%IܪMmwfMu$ksCVEXn;6%zuv_r VzbbyTO Oz.cwӭB  T^ҿu#TWoިFjpu.HV+;Q'1Nʾ;{hr{^j};ؙG ^1Q;w{7cp|Veӻ\fhu6z'S11T2>Lět>\>6*'Foi޻"&l l^6V. |͎Sb2\mvt:'cJ oXɈުX -cSe樢b1U{܊_Rr*6ᗽJ-bzڎj`yQ.nD8kh,R,ȮDݷWt/rr&}?ODҍʰo?#?GsQ>}ff7>c!YO%ҁb9);U;w5wbdswۙU'>xgiZqi ُ 7N߀_;SE)&UhN2'\9qݱQ}ecRIy|{5:qb&gHU~0ul~7)(UD\s8Yۯf15֦Ȉ"'4\NU\j^{*^idsvzm13 1TO#})Y_MGk\wS˿Y(ӆMI][ޯmڕ"#?~~ۻ0Y^7G_^-ȐJʟȫN Wg.ΰy*xi^? + =SE-TM]#ȾE?sbbtΚb@E@ƥat]qj=T ^FMET.]99uz=37Duu+)U]VȪ7zjԃo nKe%gsHIjѭ]ػbE5訛G/\Tuyd&EV4Gʻ>Ҿh`*[C#T|5B0:drDŽ_4OgVN9c/k' @y`5j=WeW6"xuznTEQ5N`f80xֺ"#}<8@ּ٤|2` x6›+eN]:"G"o1&b*+mQ֧NUU{UUzhN*Rݪ/C"HZ~*s=UV dW)#>E:->h:M-2_lOFʜf#Mڞ F]SCuFϪ{*\{\++׮ꪞ/.Դj=֋֪G'7sr*;|&?kiqIW5ʊGHs?~}Z*5F٩/8:@X$.wꜫl*,T4ΰM4]3枸鉎bf%薯E,^?+hr<}V5U{'X芛*# EF|P4556ߕ[SbHY"z^횊Nků!lV2'nczn"h鏕O)m3y,]4\io^zFU3<1S+),ٞ?[dqAp"*9ȾBiKwUt5H%KyZ!oNۑ#eQVũd۠b_}M,u6Ǯ{U4 qN3 |c:iښsU̟3p*Tf릉\b%MreK7-MӀy@sҷSlczW@˭&9\ձdM.WmU뺪'@`*򵩺z!=%\e{d{5fGTg.TDMkDMmvM6}6 uy>~Rc2έqyaj8pi_%Y 8aRVo#""ɭlRb^p;CDSZWtTn/D:_hrzדZGwnnV#ۿ*oU>5:NAN <=Ov-9h)|LC wQva:{r.ͰYn]ڛ$>NtOWǧlÒKڗ$͚KsܽW.ț޻&G\?Znқ5L-U$Խ^TOVmH-ͯxmtpOl96}^T@o ^jq2Q eUdL5oO4jګ0Wb=luu[ܮbJXepǪlfLkU |7U{Siu"N!VY V<}IM ʮY]"u舝:]d)X?N2qo95fŌmG;8q/l|_ztߘ4*5#^Û'z{ɋ[QR? ]h[kv<k5b/\j wjf/T81Ֆ-]QLSC'G#6sߙp^)z9sSSoѳtWzg߳Le|AĽݢ-1Dl?wzrtR8 -xz*7"#:%^yޣ4Sӌ "">i&ر6%DSs>w96z*0sJ;pfv+l /%BSC$3›쪛Qyz*n ȯӱ6jx`{u%s=&TS]憖V54,yV摜vjFǯHJ?8F3\ӻti!D#zw9AtG+- 7Sc!7)ܮighx] "vEZtUyJf9p{*~g߈(k#Zѥv=YIxo<35z+6y=Wnj9?d]1Nk;őּ] wJ(+Yw11UOȝA(s7ʁhFto-~Ќ5YUSXfC,:f@Z\q]*[=69UcU;bݬoOz Q$2]΢ﯙr%?} }"+>6JأZB.Q['3{QW+Z]ȢjIuٛV#HDu7icl&?P-hY Z:hrUMPO=u}cxTr|D{ʦ;:'X鎀xj' $9s^MQS~.ԌUK/|S,&mdWjˌRg{ܙ5ciLʌWVwDj]Z'Гlt⪹8Ҟ˱Z'k‚COrUNf9Z/T;igI Ėp\tVRUfj[7*uV?G{}e'=%^aԹ Z'e_HEu4Q|yWnfStMo[mwvTRVBy$oj_QU }[ \Z%F7iO+%r'_TH"'#N.ܟ.?6:V_];mUv,DͩG;s=q{#J]mm5[]\i,fܨIbb99tUnc Ü&jU壤/g:Ʈ^9r[sdž}ƛ|N}ϸ~-E=_;P=$8XԼ;9}–7z=T5}-z̝<{'m>[пQ~]%1OS,²C6{UCQW~϶'s(/N!վ:{}-_2EMG Ecs")޾`}Ǭ W NWX٘DUbEvKmMyuE:W SuT;ЀxxhlT:}bɡIG,5btqCf3~^oׯ] jfSsN2\+ފޟYUN'5%6}Sr{NѨlMrlDUztncҞ&1E]J{t.'g2eV]jqѧcSyd5Wkj<өbc#E卾?oQN_vNEdϝvCa0w{F/b߳D6-.E]iz&x#=f{feYJ~ۣ~Ȫǵzɺt_j*ni5F`]?rie6uz̍ߔFʪ'ZyL/_Ͱ8{t^vu3<&QGMjFbChkq!Tx>[ԓGRE LE5cUN]S7lo~F۞<0>O{sGx}mRx!{4]"R]m- wV9U.ʉ⨝I7E_LEfY~miW"ztw0c6 vIjeg3.4S9IS}Bp ϙu#jmdcܪJ9bnqFxoꨪUifO$z-j$X^e<2âLvN~0񻆿ɏnǀvq=|P=Z"W4I44ѶIU^*+m{v)6[TWE^U,N;k\9bwmf w*t>~:U?Ldp=b3ov<^<ӳ.ռ| ~wMM{sh?9qS 8(η3 ۜM5gRݎ '3?*gvͭi C Kr\)׈Mu݈Dh jmݍ:$|.vNV/r"#e?fFg.6r*wx-W릾Yڋk?:&=1w˕pyHflΧg`U_^u*Y/t39%NOGn^[O+{g_,$+spA !6Zt ʨ>]+UֳÛ~&F75wG"*/i_2ml+u]uj*66rY+YwC3{[E;2磟eΎ?jȋŻ7U3|mke.){k]ޛK|.ҮZ[Ȝkwh3tR*[C29c/k' N5YUt u:I4K]cEzrJ"ZTMӘHJ$W7Hf׍̮ؤ1O1wD]K8iOT{xE.vWcϢ+6LoQꥱ]6"Tov]芨w'f73ֺk<}uOO%.RMڧ(M Hc2~,n˛4UT^hl9L:q{rU'I'a pW]t~ VK_S3MglMswrk .cZ;WKNGJ;NUuNN˿/Xo'4j4Sv#Syc hjz6=O*qĮRT}4芍?eOUClD۹daqWWakuN]Dȴ0vj\~gFSLM+rrz,2.*w=[3E);}.KgzҫS\IU9܎U55Ze$_0˾QGZNލ߽/Wb#-NOꞯ͞}a,Nv/"<1GD;**`@^Պ]'Tiݗ dIcdVdV:NNnTr**knMw2;fukXcwddUQvT4o\9/m_otvب_%zi^tsU9#wFl^vyE^wRA%*ϊVV{H͜*mʛ{E%]KElxhwsK7f9UUDUVcUPG׍zi?y2 hn2=u#sSwFjyy+0gZ3[OOKe_r2/>t4V1qiŤ*f<34߭8 ~yOG@)HtO!lȪu65­?ZC22Yj,Փ=Sdg~ńVG-Odந>&}09K 7ӌK 7ӌ_ᘞ6ǖ϶1_3kn4UUvj]ev]zգZI8.)O>촰VyҫdTr?mD^3F7sXKE4sM= Hx­X3nt 1%U+_̯EeE^{e6ft<ڜYfjci7bb4ɞ9-rJ::KqV$wʨ'=;;pDϺ^gȗ?5_{Nכn~tn_kΡOq,OV&]Ub5S۲q Z|rr=QݡԱ8l""+jU볶T޹柹*EUDi'N=:ըEw7mۚtLݙr`x9R+ N4gEr5OhK-ndtpmOsHw3,eW߯`{Ieu(E Uۢ#U^~خ:~0Wgv4k0To bv:U{uL:à{hmktw[D~G<)ӽ>/)EFqǭzwWk槩uUT7'://rѩCGsNK{KlE%M*:YuwE^4#hͱ,Jk]i"HU)]3RW+Dbw: 4#h}P^SO;ΐ54d}8.2*% EޝKkcb$W'/ Q=i>l:YM+j:Ekn0XQg$nҾ/)EH@w Y>EjMKNY$LԎF6gJv+mQ {md+Q<{9ӹR6¼oBT {Ćujɥ'SGQLRKjZk^"*3 wĖ n*8vŪosT˫!yflk'V'_}P^SO;ΐ* NeYcKjXe>vՓ#n"Mto!hK_KRGFN˴Im_m OAyM>?/:BCP;ާALj7Z1@ةz&_0eW*f[e\)j%FW*69AyM>?/:BW 6iUjv}N,袎UGH+Q]Ϻ+UȨHNyLul IS3S eEDꝣ&*xƱ1csF{#Q6NSO1Aӗhew[֟nv-kFQV\iI,٢율9y2(nxZ= :55k6vTTd|{U C(1edZde%%;a|lNTMދ"'y֙\1Mfz <2=rk&w$nd({\C4S1]6隫":ef&O9cVܨއQ:|qaɒd6Sj[U2SmѼʛw{JMU1\Efi>hVfp+VHQE7EMb]#[--8ƒ#rjaz鲶zfOv&En|}~WstRtx)`h,I#cQ7W*DDNkM3GDzfz1~jk㬡 S9bz=Oj*tS-#L (-S۬nR-闖E,m`mf1=1= Cb6?USyŖZZ^sQVMyvOd}5/Ai),q}-44ѵOOɳ,ʮvss8X|-wCGidUʯsr.پ Y<g&3Lu72vnS4^Rjsu}Jě"oNb4 SzUQQ+vlX~]`,7h=|Ty+eMdUbwDj"իlLlvK&m-,q#SDFm޿Ҧڏ3ޒa4DF)/c.(jrIV"GΝ7UC Wߔ(M-ƻ5Oj޾.w{Kz~EϹTDGUu~S9έ5Z#m$>o]~䭨 $kW#U~>.yE1<e4QS .qaÉE"UC6K#ETc:;_,+ACO<WW*;f-^՚ORUn"O/D^?WG[[=(5Rur4-r/wztD؞AvG:i4<e|ǞG$vZjd|5N=]M_^wmrO,H\A۷i- QS}7>3t>zkѩdӻL6~}DFj'DT8c^d-qgu撾"&L^Q=W|[8'rM1qs(9ߙR yd9U}^헹QVxt|?: g"WRm~蕨& rnmKUM.w)khpZ|5ښk'_A+&~:5]RU[OGKpar97]r]_*a#tINzT K]7Q[Ѯ7EDMULkjxܺtZStn5@:oSUVj~_N<л-9IH`[sdž}ƛ|N}ϸ~-E=_;P=xE\0WAhKi']ޝ!U_;j'tK+f#p"m޽XD';+firΞd\X̦f?tJVxvIrZioJK &V7sYW#6j*,=n?S9*, K5W"/sQ$~X~?Ի4쭰y}k&XV\]پ[);"lY*9Un,b|juIRT獮߯^2?;oz?U5ʫ zE_"ĩ;x~*vc/rDbvΦH,3+yث_mv;}\hz1HTZ"\((d%D51ʻ"m~qyKdaf\)_G2t"vUղmPg(YJ`H[l=Oi(mU3'I"׆cp"&3]<~Tp^i9~mŘ5njR\!EDTk\E*1#ڼ盱9vTm}Bt < 2rocߧT+W/W/6.27޵2rZ(V7q|XUl? 513wne{sEuQi)u'z5\;]{fRRb~ʭ]T]E0w=Nӫ5Z]35K\|2WFcMo؅rPJ:SOQ ֺujTW>DW{HK4L2bKuv-l=i]OC+ WnDNODXζnE4UM5Q:kq:{ig+{OIH] ߻vDD?KzGU +#I7}d\WgV)3gDLs|ʈ YÖzfSYV巬1<"ȏ^ʨwţ-rؼUsT3>,\eYmftU1M1<:O }/62ʻ@ 7;UUKQ-TM j-'EE;]4Fux*&n-vkS;z׹\cY&lpu0Ꙛ1Ѥg:q颛{ߧͧn燌ŴFU_u]C"wo̩̿!+c~+cY",IR=v2ހ\ounQ:%ʨVzw]޺/zA,ڦum$GѳF6=῵8{$LΚpeYe,0ah"5ӣb&8OJfmGU%O)T*m;ToU# e6dzym+e:E{TGQ:Ob&>P(ywyv~ɿOB,н ]]T[4q5@5WuSn,xΜc3|:`vf("|妚p]+$W mdt G<#㑫ܭrtT+R{>1=̛Ajjf|hrB|'S~pU|]g.^s1ǟO49 u1331[bqkV| ucQZs=ʛ/G7tkSmEG/ߧ;[gsYvSLgXiF*:`5ග#UԶnlNT3mEZ-&Rһ-kVG+Z}[&8;~;E"0 T"W[ ]oݶ?~7UmEMě>A- k2[κ;uƪY%|퓞["3=Ȼf]2C-m;)nG.j'ƪu@עkk(6jQXhQ6F֧ebkSe<"~uZ˭SN'11L=)Ji)j_(6ӜF]PUYmDtU2E=5O+VxlW7eTʪt5L-sUavꨏtn_UT ^gӏ*wnSƚ}6m%̆V08\<:%éx6S[/M:"+ƛʊ WFfm&Agd<*S,j$}^E^W_sRᙞ=c63\4u)|OOZ9Mȩ**3*:1zqq+Me]Ws2~Dqǚr>KȕCjG+5wkQQ\4ZߞM=#buV?dlr:9bkTr9=& UpX]p*Yj=%"I=B=VF;U鋍ӂ7㧵ɲ2%^s:y~q^.EZiz#NGʪ䉉Dm om56^?NO]\΂q\:N2j"0y]5O@0並7_h,:GF_5{43ܷEfXquTڢu+|LwJ$"!-r|N}ϸ~oϳo֜0+k^ck.4HtsvltUUIԶoQMS5F=q b݉c '--׫NOWdOPM3]QM1%zm>n13DPDzK_S|YǷ+UY'{6ISSS;acZ"E[ݦ\zjy)oj Obj㫊nW5j/3wk3._+odTUR]dr9#Uom{gmMu~"=|l3ݳG,EUq54MuGhiꥷ{饩ț#s>M7AhiIQzn47${*Ui̪DZbZ/wB՗/Uc *+'*rQQQ:^/\RIU$u$r!Is2` r۪TF9ݠٌTf0("*jݘbfc:xtB46ɜi_W|M^~ёzn?>9&pAM&_YOrI;H"Nxsl뱖֝or}ρryoߴf wk5LO:5Wcq;? gML14^}1N VI5RȶɿN~dEn7جZ[gn7o\+ G$9]ܩQ:*՜] . G5榺 -T ] 7X˺=ͷU^l>zL\M^>4ȭG+wvj~=̾8{׻EuNk~oi5f? ښXLpkڒ7@8G[zmW*SXSeUz쫶DE]|Y x~/|dEIʨj׵WtG5ȎE۽5-[PJeUVYEDez|[XJ5k:W?sk9.b7f78Q1=McHޖuePRPY;#I>s\vk#O?W;b1+f)cmIj&wq03so6ṅWUiDF'NhX ,71e2LTamDߝv&bτuM}FUձwJz_jߗ~+kOцYv>iS{rNϙwd20XL^*)9$emXMܫi&ˣ{CSuj5r;}YW_z%f~W4MmKQ򲖥+*/Dܯ_Q/? EAs\BA='+~]c"ߚIL.uM:Tj~]{S:s*xN hF5gB讛QƯ|ojSۛ۹ȋ3Q⼩*9p;}{p+R)9d{oZ*<;;諢#(G|`ZW&)N<:3o-~Ќ*[C29c/k' N5YUtܗ%a )S[-6SWWRdp_vDֿ+ɢX5l{ 殧G6flTMߙz. E9i5M/"^y:56Sޛz=z˷ ]lˊ{R9fLS{f1Zt0"1kӚMUzW|&saɣbTU;*nݢwt{t9MFeeh]bߑLU-QfW&ŤvUYbtOUz$~Rm}?¯Z/9*OC:ܡjU5wV"zoe$%N?3 WlEFuj>Gͺ^}0n2ߛ.'>p } sWU^4?lV1+bnWD WzU8uIzݮ3wXakbo""Ne\ yXxr9ޱwGAѫv}]ܾ)׿l}Gt˗@h㺱Zޞ*}Tr/%ԵS<-ƚ;t!t[𳳸]t4Ou^2ͱUqoܮu#-^Ə E-)^ۤEʩKY#tLR4s7ETT_1%W._GAmTI;?HLNcگ!ͫn}™m:lU_EwkS5]?n2NOzhA[ie$> ؏U՟^z7Gz#*mk^W _ ؎k*j5ܟµʊ;t^m6v,)yw߱|ۼoKs|3ĔQ5zlߺsly^Zvq7o+1+u9G(sX<#~nȘNU&^%Vz*Շ{^'z!e^1A#UPԲ9V^)%}@˂UƖJvMڲWevOn^4ihkVo$S=Z_QSa36tv&zOt5.:!RB;v۠r#*; ݽ[ʽ:StN}PUf<_nmF:[u feQQTU3Ֆ#X2$ꨊUUۢ;mP$|ly%iGruK evTMkE*ΧѭE0%jvζ"boUOUG1z ;".I4wJ,-s7g#jr"1=T<\Ew^ئj"f}Ng\pjTTvV($EOIcEdOu܈_+n[X12=Z饊'7nbzsRw,¶&˓eRu}[&ܫ{}E1_EEڊǯ_S3Ys;[`tOL>XnLG]}L_|_i6ڏLr UCwm9kb5mMv=l6-cѻd}+)id(DF5Q7ӻΎFsGW'3zxlOE>J th\:wW'i0STΑؑcsj3 &WjLQN-|ѯTk3p!m_* [)nWIIn,hUZgKr{u'?!V1tR1giuCZk] Hݑvʻ.󁨕$}%$]tEM'wNKγsyxw;*-uQ}F;M"-]uȳ`y{Q<^(剘JWy]2㿊}SWߨڢє1kӧ"'D{'s[""{ ,X].}mqoMW]V7NTUUM< Vb)d#=\ꊊz >+ՖՑڪwh+cXTݯjQvTTꊈQ;&n^K!utsuc~mIQ7oiqOgk5lf#hʋފj,1۝۴zR}:J~1UGDI.&3x95X1ȉ=;jiW`\oMafCxx[Yaz5Y/n׹7^`_kFVyzӺ*5~nl{"NDi0&^Lk3)a{7% $#O1QOz"j*/:1/:Os3*1~|= ~'8`R[|O;[,][Qm@z9۫۲߫S|.a|/W"[dl$b&'/GN17TUuE^p_lӼmoWtYn7 Lުj"oƱB]\W.>sxCzmۜ W?.c?7z{8t5qdiܹ Kzw[OPH9kiB#թثܻmUD͍(k!{OqDG5mN˺"QRWSME[OE=Dnheb=1ɳEEETT^1,Z8\l]*,IcVő;##;OxSΎ}]+m\}x ݎJ|k N5kJ:pNގXdeݻ3U;tЂa=5ttv\j5T,.k[TLM=S(gvkW4nW?H#6KIGF̩_DE2668؍kQD,bEWN]QfeY&%hQu F2JLCJU]܉>7;fƨ[N4wn9_s^%,̟jab{Jm.!k[k+~2f\]*+7oJNoآ 18j5jlֵ:""'r4߫~p 93pTOj8*Ϣ4^T|5B3G3|mk`ti2ڭ:rDŽ_4OgV0qSƮA5y=ͪ)Vj $ 1]ђT:Ev܉6ꑷyš}Lՙ#גhlmwIսQNVG"VpqkU巻l k_-'wk$G.L]ܽZaf9)UWQO9duDqvVqƥAj}mVߏtVkvsG"Mi^Iиz1_EZI=ĦsGձvjV;ED^G'.詺-]Ӎlv;]$I$Y,Ri,UsSuF'Cv>vڟ x֪Vmr珓k7l^U3a3lpcXV9nICNc]rSr˺UU3 5ܙsfߗtU醿[ƎA"yB0v,Q[MrݞUd.9խo%Љ=7ݣQ_Wf芋*=(ߖTۗdGl9ӿ>PYkehYIb*_JS=WWdG̿E =ӌ׽Dbz:&?phnڮ[G@l4Mb 2la2ObQ4U.ʽQ1_jnVSS⇊,@kHF]2'W*䅻EN"+^F^@qɬ-97z办;mUUU-黕Uv۫˔Zk:DsVf9%%.2bև㸔>kVpDck^DFn.M# \*~m)l"=[!k̝E^MFlSṳX*7W2* OYW'Ser͜?<:<{.WM>U34CH(YGavl# kb7y&Trʪ&o`kn^kTLm() @ZFk,5X#:IS;7+VXQR)wUDz9}OeRf4e">F:ud`O΢ZJ# F*lr/EEEQIWܶb:qab0I?yVʊ$29\ltI˲%dmOIQj"ӤV۝ApbKM4)XkڪEjVnK{KSFT73wܽyWfۍ\l&o OeeF*Z&s|"IU**"9UWe6p˜O8Gkjkvlk%f6 S\78SIWL|sF_wEH,UUMEM-eeDPSK,s""*q_/|f[7:kujdH&&{7|ZSUzmVL$F9m{eaՅ;+W ?)4i$JzhmB+}&ڭEMRHu[_n9Ue}UDUskoD뻜$MxEeE?Ika*x28j1cQ֢lȈF36gtmZӉ̵O*;9"=Lp6Q5ۋJmW+UDr&zP@"wUvu\ XZ"{#P^4 蒃0i.q3tV5QUMn|]3%ʛuz[jb%/'nUIK &cO<69&+^׆55 q8bΉ[溟n"UnS i)>meUD9tz#v;mڪխf K}Fu4Y[rH$gĪV XHZk-EQ'iKpUsWdݎMQr"/mcv8Uq|^EV^UT{&:'= mX=m7};קk_ wF@E==]ܽk Wg2"4&pAO{ >Cy!;8$NT#U=nU.5=0j&5SIإh׶7_=ne]\aOfKټ٫2ٞqƻӏW&-8>keyi$3{$jE>EFSUM5Fcޥ6K7 BRr~ %bW!/Gڜ>Wnnj|ְܑ4܊*=]6 yם _0ȎWꈲA.=6뺯D-"|=S;0ټasM>ݫUuV0E|*F!ԕ27.˿ȝ +UhH*6;:ſDhӿnNgRPXen4];&g"d)CYsImt{NG1u6jw"wt2<~]efuD#8{ަ:.Dc|NdXAXoIuLȽUT2%X@^GPFqnm~w/~xèFȎkz F|p?DW5HfaƮ6pǪnhFyve_UzCi\Lk{[]]+ʞ-hzM])G]UL;,LbWknnyu;d>'U}RvWNzfVcMAmQU$d-6wrt?'{F=>R;UYoT=>u"{Ww ʇfK'W%+{ݷz#޿$b%:Y|ƥUlh֯ź./R'-U\i>XpYޟE?l#CP>D5[LuD۫T-'36],Rpmvf#9cb9?QUy7U_EVX{JvAlDD)WeUDߪU!NKQOl#6r?6"uݪfw erlM5M1n>h1JZB4}蘿?%-!џ;[sdž}ƛ|N}ϸ~-E=_;P=0cEbIMP&sakƲlRQU=vG7e~dM, LDƒ5MS:L9]e9I[کlEȨU3¢誒FWmm͸HpgKe]&=evDr{H{تVRj5/CrH>"q_U3eTvۮݧNM^W#|wFfVfocZzc;%Ӄ*ݹ*UwUm1푨9&註gN ?+ki/y% k#?sDW'yε0~pcv^˕$eͲ/YkW5*%ᗅC;EC*s[_rzQ*jܪ3uUTMEꨊJ2DTiGDtϺ=iykL2ʢqOwDꎞ72:jrlUTshWzG'Do2Qklbi]u\ku3 R(ޙV[x ͡Ezu9_j$MjO#B-EzUPvuIMQ27ZEMU7EE<\M&Xs vJ0CxmQU٠5ݯctTGjey}ǨKW#έsܴtY DTMsDTh9f?^U9vDȳ{YeqTswf &>;ݏjwjS˲5M"xC5cs[-+kjT#u;:{vT"*v ~"4鞨]mdX w9:QdJ;nEf"=)6WRXڈOgh=}lK߹pOʖ**xlQEMֵ:"""""w袛tƑ! .UzUS333@h9@h_#:ds7ʁhFLXfC,ө,xE3Md!~h3+^Rpӄg%|!A$}(Z}F*1?ֽ Tu->jF_UZ4S2#Q#8 zI<&tɳ@ga=UʛgUE4S33"9ʴ5LS7 |4Ԋ܃/Lb՗%mC^ɶ+y֪z=cĬt85k⍩?UW-9ti#D{m!fɳ{zW9Sh>JMXٮ5(S'aiQL5i)夫9Xj9cekz***㋂iEFi} aswFsz**:|'ceΝt7D\Ob'Ml61gZ'H3 lNMϢU3OG:s\aιRDrD(ث̪"jÎ%4c1[blbwhe^Ѭcx2DFblf쨤m(CzZnׯ&dU}n}u3 VƱ?Lr*TW8L@14mk OB9K[Im{ dQ{:xOdG9z5MHxG8OuR[-9KW7$<+ܜD۫ #-wl5u*KJU[FE;ur{~^hna x3\kd6kN:W]5:'Z-ڎ񍪼U#=8QXӿneiMU]=3$`;*؀:S}Q--aK4FOY P\Pf`1~1ūhFn] ^fKDI{[*#=gTI}Socr7bNOyDb~භ2rrFH{K[ߴrjzߓڍDª_;|lkduDIj||^גHUNՖfL58]Q.>O{=@:jzީcr?LDieۣؿNNmnV&طUSW sUF+(6,ԕ}5•۵{~7+\ފܨG'i{i?J4GI4sr٩U}P=ʩ W;t!}.Eǿ C<^ֵԢ<2ݮy>X==3:h#Xjo~"8@ES \8Xʭ}*}x2Mz&/KHU&䥤:[|N}ϸ~oϳo֜;$zcvz>Ի$\%M[Nc#Qr9MQwEOi%ɵ:ZլO&sYn[ 5U~?oe708;_L#QcUlU]7mKX^]g|mLUN^WU5[jםb|9 :oj ;4To컢"M&v?]1X=ͭDW{z|uڟo$֜M>yۢGscp=.g5.?;W'r.jߐM sa<39[EUT7eTVK"?~麜\.nIj5GujYxv_:H9p3NetTߑsyժ~ʊ]7 q[nQCW.dtkkrmC"dOKseٌ]7f)LzZj=۱?WzUǪ-j%NF?ʮݭoj9wMبJzc6m3G+\nED:F9wVb*h}UzWMM4zȸ6rG(ɾΖEvG=ʨ7ۉ6#gM쎏okg{mgTͩ۟O {pƷqN-M5Gb9s9WU^7(xxӔO11nܛ2İU/*;#k]DTke&].Wmo[]Y5j{$ߍ}u o~_!CEmej4;"XhUEk*?Mq֏O.cw􊣫؄z N+XK6E2wjTS,szIVznux)y{0]eq̚R3}GS^+OTQUʢcYhngJb5QfepƭvlZ,$mDy*j^_\_ ըn^wں*ܚtr[#{J"ƮsXỺ+? :kYie1R: F`I湨PUUv˲ȗY>Yu*s|IV7G cTϪ4m0o-~Ќ*[C29c/k' N5YUt eVfw[{CjvqFT|WmP9Zߢ ՠXo59s;31/rmyh$:WW%l iD_Wmyڋ"mA8⦖ ȯ_W9c%g{9h8cd0FƣZ֦NDCx` ٷY5U&<>hmݞ>o:E1&26^$d{Udj""+Ue߮/=zs {²ZU\jXU 5ʈ׶FNUfHVi9(6)4~ *G%=Riv䙫";n\S̖3wʈ>ii[]vo :ЬKQ*)WФ7TY]s.ɻڮFѮn)T)愻KrK,rrI#ZU+v]s#;d]Q u~aQy-dc=-ح7SSRjn/w(+Uhœ-חqޑ0[h3=e+eos{Q|[=E62 ۝*GlEۻʟoBC$c ~ɕ+qOq\au=U-C97w}*u8y̸2y$eݵtHp7k\N5;7+;2Q)Xul窰M]֡%zmeV-Gre!Nzɹ]^xks;2sfX5X_o0H#9DrnMs{ȨP͔S77;j̪QU*6fʪY7&ȭ87u/>olqm8!HvkUNfF]ŗw&mFMqT79^}7bxr|6QWG: yk;u\YbЧi;ټ:߷qMx$Ր^h+nW}D1_Uv9wsg|~LεwG9ڱK 7dH(m6"&NTrުL8fymQDpQ*'_SlLy#b"vV9o'3\۱.-فJ,u1\_ ydd}ekѮEz:/qGOsKU94tUQ{%!y1面+oiΎ>wzS߿97] &lqn'"Q]"u/.̑ynwwG'oN|14Β VI㙅IrDhU{ڿ*)\ߢM(G=C7j"; 㲼.aD :Am>iUsȞtO'Ǯ4E%1W(b:h#9ZF]znU31ODwUVyZ(b&yrv'U %X{&GF5xWYoMUKGiZo],*j9x秕1"B_as:7UǦ:c>Lf3=Tq'ICl{Ե3‰p!UH^6=ZOaD? S~$08Q>ŧU GխX zVWVme/'29Xʉ_"z$D[;D3,E20c3/17b^2?MsUJLf.lyRj8i*˴b=:бfE-=4W_*XkYv3:ڝڸt;`a2eke=DOʈ쫲m׹4\˔Yk13~T0VSEYK*I lktTQOԧ'(_@JQ?'\~UkS톯= ᕠh1JZB4}蘿?%-!7[sdž}ƛ|N}ϸ~-E=_;P=<9\4&aךœ3YGʕmF'v`,M ?rc%ݫEE訽PB8#)[[xk-}ɰJv["tVENV6;d==׷;d,g<'ѯW]=q玩1Vq{OFXRTUCb 5۪SEqCؿonj11$@ w8ά+m4Md]LsUvvekUE4(}VUbU9E\Z*V/W= j} /Zi)J7gH/ESelkW}\5=[CC[SzJ7/V'sەR5WFr%:MӁ{^p]*ʧ {:avn2&dreܥ9ɝ6Q8s*IA]s.6Yu_6nh՟Y t"F͜LVD]\a中Nu1q[zШ>S=&Jl8[m^V-SEUEDY]L7[o_hsMZߺ[nG∨o6g0*0ظ]:LW&agoڪaC-Mv1ieudeoFtts}EBsuYQʽ 8fMKeeG$Y%SgڔޞnSuU;}|ZVu+쵷4G;asѻUZ/qK͌Tv3_*mMr*nuCd8W6x2uk[up:5CΕnȾoJy5,SRPۢwW#ڍ1mާDKXM"#$]sl@brܫ.Rڭ6ZU]"*FʼZKkKGʕR::H9gZ,kksVLYgh:; Z(0Yr;)QL+\kQ:o:Z Yqz -%ʒy%Z&br6߿k!hݯMxV26dN`̫{pUxQQR6,,{˶꫷S]3ESLLN^:N&}}<Σμ7{NNt/Ɠ7迮@5kҬk9Y);^޳QvvW"./Mܪqrq:ǙJb\pge#)]uYbݪ'$ߢ+7E!St7EM9%ꗖ>ƕMEoܨwb_MTDcT6w*g^@t ^YN& R I'6JMg/k]N9RmU^炆9*Q*v'Ӻ(m6*#)tUۙ'V/C85Mwkcvz嚦>@9WswGɌn6 U:tkםzo&2k?O5Wz;՝W{e*ߢʺ陏L0s5v]1_gʟJc>ʗ|~O7Cc\۫Luu]?z~42{Lmim=#Z1˺u7N֟,H`V{QɿkF2?j"k'nQʭU=v^ڮ6XջagnNgDO fGžT’u;uNk|O~i.=::6껪rk5h|$;UPj[ BSPzΓבNTDD}1MB2|i^wOor[S]sU:Uktӌ]&cD#n, :]KGo8GۦYȉ#ɷHn,Ƨ7ΤieMu+\,|Hj2W5sU;;%jQS7#{s*]nqkN=աgkmy^߰|r26Ƚkr墨k'qKd5Nl{GI#UwroEC~鶽xjDZ&F?xjYV$Z˺tr9;誊EMZUEMtק3n٧Qb&n-6s~ \r.S8?l !e|풱6j{jH^ZTY%kOS.^fߗi=Kc*/ߣfۺc~6ݝ~ %N[zT'&uvUES4i3gѪ|Sfu6.JER&zr97o QɨVk=̶95FkUkQ6D]nU„Ԑk*^8]o]IIѲicw$h"+ݲ]Wof_Q:򧞼x=F3?bJܦtHtO_Q!:6ᆳL\Cq'&V3 =Y;<#F:E}|kjn nUo3D5nun bgI"U-IcSv&ʨW9:9:l[bˑFO\y#aN\W8DoU>h:>fuxc{iַ#dKIZSwHD܃iu^hh(dX jKQ+DN:*GU+t,STAI;%͒tTVVĵ(gtvVSY$OzWۧG&;-Yb.4DǓ# qX>7\77hW<~tkդS.ꭶ*{>ؽ Obz=v]u6]H>f̳eT[mo۹w^ewf͓僆]fRSXgRVVM:iYڮDE^Fl _ Lea# 3i(5Ҭ87k7֪g^ǟ>.dDV@ɖ&nԋ>WX =a#r9R\*vMhc/ETSptV M\uu1;ES \8Xʭ}*}x2Mz&/KHU&䥤:[|N}ϸ~oϳo֜2 18 ָ֞i*@\Ju$n~Qyzx)uD5z+tlZ#ZHeܦ|?5coUW9lQYROJV57FNn!gۋy*EG\'hYvQ["K˺td[Mw:= \fzO2DT\+wu4r5#U1N )q,T(;T6v*,ډ5svhz&#w]uzӆr45gg/Z.ONW"n}|Ys yCP52lYnG+TQDYE$g/3MO!cfbX=CO+-*1;]$9s^ru-FZ)!n 68ټnU٭DDUWR)2lv6/SfoQq3i43ÜΜY4YEUux(Q5.U]1jJ ?| K$gS][O=KkkԎJsQ}oKȎu G+ӫ>Oj]d a*'|洎^9BݜOhfg4%x.z;e97{QmzEGTZl\jE%W{y!VOٲWS ol0hllr#ȭW+Tv"u^P]E\gYsUBĥ-u/KM[ݦHG]fwj5еQ?w'y"]-*bf8ND5z)7#]9#<= ]PL"[mxZMH%o"ԗjg9ztByL=tnYΩ{)bSLk3ۢbӋ8^[bw,>#AWU*Tci#]<\:"uRSmTߧ|Ems)}ЧF*G#XTFo.ے77zox 29cz$iB~=eXلfYX;S2HjWnV0x\ oNDj]vp~Rˍ(ŵR̔ZYJTDY7ߙrrگU.~v-8Pꗶl*5FsꨈsgWY,BRWZ)!S\+QGoQȻx"fުU7>1τ uҒWEBֱW;K ӹMxz&=bγO)c/)d7CDsZYj4{zwOq]YNqk&/;;Khhjb̋$jB9ʽ?:JJ H(h)C ,F28ڛ5jtDDDDD?cc/шUtE(6)jgnFB<ϊ\;uFI{mwo){}ךWyQ*cwUׯSQfroZ#6_Eݗ6G"3-jR#d3X)"X-ljf\+)4Eb.ӽ j%g"%JkmusY[G+܋ཛ$|e+O Z!i+Y,d~nǫDDBj!,(ao^ڦS1G9צ<.`]=hծ=H+/e3T:{{í4+UU*4T߳FQrnDN?䬿?є~5כ7;f5eDR2=菗Ƚj"cƴ:ueL[ުwOmPh8KulS߱5M$bӹ1TqjQ,S8ޑHx$۴oǪ*/*/qce\tV_YO-rOI*ҍ8ݲn׹?D ^VsQߴ|tvbbw6d~ܩu K0ѹU31IӖbM+jcH{6ci/걍?MoUWV:"wNeTMvEN|[pŬ0H9kuTSJ^l-x#e3Gkc}T^E^dڊ_z*B8'u;XHӹ߹~4.c~LaNfJ*>6{H"&4'T9SDEzJ"M\Ӿ%iiF:4<<\WdER3U>(oˏOmS-=3Qr/U"'?/ngHesY3Lk\լF~K7E^fKa|jZ$J:nn$Gn̾Z"3ׁ>=[JDIi]܉ۢakUO;lM7qwu:F4@fƐ7g淊iwn]㕛~ߌO5 ۹]u+RHj:),qܬ{YnTbm޾_ާkΠl8~e?k@zs[!,x˫cz\ov<BFEVU\2*M 9&o&jբl]Nrmnx^yu. >c,MwQRMQj9[#Qވo$ۢ^ɐ&ZjRT%k6mq7U7gNX˲blѮ334UF2MtNrU9$3*+6Swn*&ܻlgڭgFmY=tvw=DsTYtU,+_6LLiqӽOye8LEZESNVS8uuEnY' XؕyWoKoZkMG[Lo"y~vT*-}kn4pqUL^jzxαդCabuOu.yMe4G"*I ]MtU_ #j.'~)ar*F骥cU}E|MV:'_Galirs8Oj1-عO5oNGV]ZW]*]Zj<Ԭ;ZRuյr ۚDDb'>7mZ [[nv -čHgʋjt]nh'27DU#Hwrӂ13Es33:[KA 70[-9y҉j5j1_Y 8Ue~2pey]Lj릺gG f")?ES V>jOZ&䥤*ߓGމsrR-pU'>xgiZqi ُ 7N߀_;SEU yqm9"ʔW&;dFd_6&&5*N~u7I^24*QLݻW*E*&޶ʽB(ZlwU1ORy jtvꩻYTkU:g1/f6v;lZ6Tɷw4r"l^CNW/R_sQFƳbU")vTjoDYO>_,M~O܁=xZx|&f֥ʲ "﷤z1:/Wlc>Fֹ4kj&jӮ跨¿|ǽ{or ^4ی4nxz>;p=ĕ8$ٯk!* Uȝ;XunIrL@T|5B3G8<{&iY:ae^оwe9TVnG˯+wM?wwo>)4\ݾpE.)4\ݾq(}E\zC-Ja7T3nhɷƀul|p{uuԀruGiMw_RXfC,|p{uFo(mUҌ\wUyUTS:6:f&o259/ks!N\Zw"k L9_U:s2H AqO*+}$$&]yʛ&ޓ^"(Mp,jlU9͝+ǹ.ɓCL]g榙cz.͆mbvet}Uz*jpsXy4V:Ǫa,Uf ,x/9Kޙx7Ǿ[Eb4g,OEܽ6n{mmm. &c>=αlTiZg?:n4fiMSVoN\E}$ :VKt]Ѯk]+sNsZw)? Wyf eEMn>KJXݤ{_Ʈ+:7|!33#v7bΘמ:HЌ 6u^$/Zei6\n, N#F5d]7^溟_IWzqB܋rFoWe:幒m+e>:$΅3m1QwSj\UEZ:ǓD==-5khwrMbɗ Pܒ`J*DMoVQsګ9;MK8<+~c5TvJcUU_6{Wy)ro˳G}܉l>W3/EZwxqcWbtE|6k݃2le-%KŽ5\V9k~IP.ؽ&fbwc]#];D1NKX75I&sʈدr&Ƚv>/6=3-h^W#~d]nTz*.̷VauSObVT9lLJXjnszQRO".ȩ"5:{{7Cg'|xeE[|u=2k*jbFc<uNF! [`uv|g; Sm**t7&l]EQMjZ-eR~767UTy+MO3|w塽AZn'Se}^L;&=Dly5&WnWe~w:pqͫ6s[VbR==Uoh6Z|'Z8ZUsyFW5uFjq9ڟUPPבPPW"Uwu߯Yj\WijU ^7N4F f.&=SxUnʈŗ;m =N+ERJYk;G͓MQ:L=tnYΧp#6kSdO{+ch,q񌱇iwwbt3?1^ |Uu:*kKZjJuqS#rsFUv9ͮ7vm2U]iȑ񫑳B979ڮc;5SGWCE_c_GLh=7s ~\\3ߧsv᫐o\ENH_ )Dە]^e{/o5OVO)%JY,ZX۹Nc8ڍkSdj&ȉ=mܱDQMO8ׇ 4QTW:\Y8QK 9JiVzMh䥪F9sUQG"ʛl~ 8־ s c в^tdsދ#D-](*RXUFĎ65jly{ۮc ܫZ9ܳT~fql^䫳]衬y)jb>9}Z9ZUEEE,CRZ[R>EEM#{&2:ܨcTkl/.ڛӈ~.7f5*FT.r*w^\f$sZevfȫ,I{\͗\ aU~ͽȫǧNeطU"]:@2I1Ydkƫ.NUC uǾ9/+Q%cͦF;W>:h~dTNMNb8^ύH4,(dN>D8wtrqG@Ջ-]t/;[J.U\t]dMC'{ٜ6ݙz!_U]s [ʪihY tr7uMStܧF$W;uLdWmR9ț+NU7Uvz (X^qwERGmumdh.ݪ7~ZޡEOpUXjbdѪEMR!Eθ\o1TU?eZΤ`FQheT߱r~'"*n%Vn$J9`zG]ޛ]Hѵ^7EFNWLUE\Qg(ъrm^Q4rNpZevw\~d^͎5T9mLPRRy4P؍_j~VglE:=:{3VC~o]H"#-g<q=BQSM(*^esUݜucs7EMx)PqW9׎Xn'[mNӟnw=ݷywD.[5Z梢ʊथEm-4PlFw鉿f*#Mu4` F *jiutULU>( ќRbbɫk[5tr&kbDMT,1WN݋q:>}v?,qoSv٧N\)u׳9fCʬc-zkMUGi괓yRdOz,sFɡF^EC-f1أv:ed9=ܒ᪽(Zu&YuzlݣӛEvvfȈג8̨+?bv9itK/dG5 i¼ӻy7zSij|wrHӽ^~Ol!G [gU}dݫεbwE_;z$#TӬ gWb':S2l!.6=ZOaD? S~$/o-L'鰢\)U~->}σi{a6;Q?'\ȣ>O~ҧ ^{+Ab\\[h1JZCn ُ 7N-!V1qi[G|zZil2O 6t.opfr*H""wƈ7zwlL]Qu<څR9^iFo:5wnʯMq=>A⊪,jGRr!jBJ'ȯ;Z|NDe:祚cխ;j>8*$arEtr5nO<0duakqݍڧt}ޢmO86[Pߕ[T!e}\nY^x֬TȻx9_P/ UvGMGE *'vdQ1=!Bhk;.sC|b?>Z-TktʰD6ۧ{^ψízy\Ns>8clhm˺t>km\a+x7tTTStTSp|JJ2Qs7=Ew4Uە7ZZvV*I "7#*njEEE{yXtRmsk`q{cDn1{5s~7}vD8huOC<'Z=Uϧ|nV>нz"|nm|^v)Sjzxʀnހ!nwY=|9+/[EWTT^Tٱy6誄rEIvIiZ>F\E^d>Vn*9ҧƻ0(cɧʞΐ^fzg6$&kީEaV^J|mstNv:WN4BCw[oP+K*78ݝFĎEdLŝrߥ縞QC7+ju܋9SvֺUw{Ht2-e=ʨg7VhXՉvgE+u߇_-ܗj+^ϕT|Πjg9?M1MBl{Ե3‰p!UHh8Q>ŧU G`|rW")?V_Jl5y^'u GމsrRoɣD)i*f<34߭8[sdž}ƛo/)Q"$jA*n>Zh\9p>i$Zpw,ݍ;\*]N1yeEFDMV&g@rzR~W*rCT WHجDΎN!ybx ѶHjrn5z**/zbÅJO62k,Jke_W]VՊh;E\ގG)O>:W^"u-mv8.?ȶhiW3 :LS1].FtbJr'J=7TTF77rTcޥ6K7 BRr~ %bW!/Gڜ>Wh`ES \8Xʭ}*}x2Mz&/KHU&䥤:[|N}ϸ~oϳo֜4s> ўej'쑨z***>u|&el<5"ulrޜ]^dGm ]/ȽLy5uqՆv8W)%&\?_t]C9(_ I{y4UONDZɺ9͞6w|}t페/ӽD@^*_PM?Ubm%_#Hlֺ'l$oG=mj]QcDap溧3#j,ӽ\m;~GܚɚnA#VDlU9_Eު֯(2 q~{yH(DUꨊsިUQ8Vj.Q]ڲTmTm@ljElLD䉾슼udž׆LYlu3E_joֿ'EOF$]UUz7[ [~>*5tG<8#ؼmX8RxhM0v*.s5^$*Nv7uF3}ʲM55m4uOO;Js$cg5^?mu_LvHUUQ)߶r]g44y4v+˓iH752#bddU_Ve"~ȋmOj[髨OMSeV/{\"}*xv꾫Qt]y*+5MzV4nYDOHfV6=1?8_+UC@/ PO!µ~*Ĉ,E9 Nay#>UdwUVM,N;kzk::okǖ-S埲Vht)&U_'"GtF9;S$Ԓ65]IeUEEHYUP-莊}֏s [Opbh]Mvyy冒Xq)^۽Sd^Deu V^"1i,F =DS*NnͲ.~٪rEu"м~V>Gt%E5C}=~ܲv*"zʥwX6k6Q@ơuD잷z6^gs56%ӉUU?ߣN/X3K%6E\o3$z|Zkū >E+Wgc녚s^NS~YT[(l'Vio jk]KӶDN1{oƝSf9Et񣯫=\昵vw/utOџ45 hd6,^%"[h#fWD:G73UsGL%w+Jl:*#dYoMܲT#h6^j'c^g}m*P89y%Ux*'o-Mfo9Q״l+ȧX}Km6S4Մ l|xcsZ EZMY4^rұʨȝQ7j*9} l&AOOipFkvF"&70 /id b28cjtj'}껪S$Jܯ8;ԝ0ZϮ*W+kfֵvE]*0;-Ln[qSGNٲ3Q5yO9#OWN孻lɷTE^OTG#0q6z;ۜ6$S#\O?|vlx.qqXu.%^"TnjN.}QZzܖ:ij>9bz=j*9:*|hsNw6n:c9i+#6j&ꪫ܈@|b6. FĎM R6GH߷3U}~Ŏ#혢3$MG+U"uU誈TTN&pUoq1l6VUP,rU76M*mU\,)]qJcu9m^bf9SϏLp9tܞ;pXoTQt7sMӀy@T?߁|7d-_4 .c zŕk1̚It"X*lwEC KK5+Ք_dow\)ߟ5ʒ8EO"c~χ~Xը̇sM7RoK5ҶmM-?#P'o/c;v]sW11 .efpq]1nw%L4yYD1PjыS]YBUN?DQkpICuN Y/i=ήubs+wsZMڨoSS!d]a|^#f/%o,Nd.UŮ紘jԻ5h[7쾇ڹ}Tٷ T׀UO&bKek4O _S@ӗrEv˵*Œu*WzmVӻkO"mMݹ%[} ]H]y3٪'ѻ>]9`8Y ΧԚjN]lt n Qćqt$VH9Q)ݑy/2tRρ-Td3ݏi9vFۆgY4iuRƽZ#;\+FyHlxk3Xk-R#QcڊHq \:s-C\nMBѽҥ2lU}U,y"*lUѪ-\P9~Sd]ӱTz9Q:vbWg<}Was5W #OZ[ {_V(^婏f9["J+g?*L8.sZ,\뾅,8گ5TsN"tr+ܩtSH8wѽ ۦx-/*$3]G'U^]ѩ̼j.ĎuL,6cQy{g[rwgzZޞi&5C̰w{ުWUrg<)M:֊ڬNk`²I5J6}3\6ꪻﺪ=WAI*7.nVTĩ-&ɼ5 wv^'N=`msY2{njj:fTA#UǢSeT'Lr5ZjgXVyQWynjr_&ȋN;[Ը6WSUjjZ"oH6]׹QW@_%ք?`yڮ>Y흧zӿj/vƧ| {L%ꚇ6ǪC-NnEt3yUy\]׻x7cp޳3n{8Ǣ~Ɇiz 42bRblj7쯆W&ourxw2T|y[FTtg\X9?ɱغgW)cm9TvqS4.<.I\޾*J0x*YGOڊbحTP@G*'OUWuDEMe-Q*#?9W[itnz[lQw9QuTOP5;c~kIqbP6rYQRꈻ}MsA8i"E* ''#QVTUQ'M7t-c\͊1]K@ոV9lYmUUVvMD]zkS:wbo}8C>_Fy{5[##UVtFGrnp/\I&d+[Y6NQ7j4O ]Ubm>tD8CbJV"܎_w[dޑZD*b9ZsKYv/s DSr]rruD=<6ہYVAwKK1;UU#nf͚:*Yl5h Qn>}ADdHUW}nߪȭ_fNXqT1vtl\-D-W:=FӪzoi`)X,쪑)"*;sU}Xl+ #csePͷEM'2SuR/] mࣇskMf˻{FJ}ZI"'*U#Kz{ϸףdr;$9_qZޝ5U]w˿Hx-kdSO?" $M۳u^'C:?|*u]oە1+ztYy]יʗ'qJ6+l䤷GO vj56dhM֧wsQ< .`Orf{Af&lpDs*wCz|X*s\QIM$e+}ڎTZeGHƵ?sX5W!8"*}Zj{pPTFU6jnNT{PapQ=^N,(4"Ww9˶غG*tut+Ewkcvj嚦'IÍk7k$ftUFs& FEHfU-'^vFurw{:20;8m469\{UQU"^3& Y4ev-Tp|;o"m'bi`7#'ӪeKgr;e͎ZJKm^U]HZڝ<ֺEbnmUȝ&ܼq`; {}#"%Ownzknl鄓&1=5}jj{ikpWXRDTZXrn)D]*&s >*yH؎GIS"EOITjrvۘu.SokSsEt}+: aj=;"҈HY+F;mٲAC_^Qok{:{Nj"Vs|Y7_5Mvy=?ƯZ0UϫsWdg/q$i{}h?qRv#VB=BJzJUVrDQ9.|gqƉ s%+qvȪT^Sý?v<G {Ewk˵Y((;:k_ vgO.+}W]Lюbnv۹F's]6ڮ#bZdX>}ۘ櫪d)ʝ$E9-vCm+Gݽ^Ջ! I*ȫCk.鳖yv2m.9aM5~/wgul܊ʻ8(vZ'Mֶj?gxG f{jUqdiFܫ%߱Jg?ݷsתV? 8dwJ~e^chaNәdb*WnԼ7Op=;u,$6["}SzUz .g[tF|։fE4n#kWz{hvIWKt9[hUEVGਯYzw9 ُ 7N-!V1qi[G|zZil2O06qCUv{v_˿~۷rAk{ښLVo$vZ6]r(c6XƦkS""w!1gvni#mߙ}OC$zqUA  % hed1d~VP"Hk੍FM=_'eӍMӀy@T?߁|7d-_4 .U'>xgiZqi ُ 7N߀_;SESp~IKdS ƋWq ws\3O5DuOV""ww?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eogN) +6?xw?eog>fq94m^VFn諎շuj}eo_WM.#~j*xe W5_}49XMU&{ Vʉ 7rӌK߃ k7u{<ˬ)a=DlH^eWnݺ_;SE_qY%%;g^ٜk7b7]=]iy@R̳*n*JJHdYb9U軪w&ijMWvj쏒5r~5dO:,R}ѸkiwBXyU:&BjϷ닝˲*_z˺>_j.P%]1csQ}OL顨Ȟk gH}MTuX*R4vyUUOj)t5DUt|r"^kWVv*(R58#Gmtۼ UWrJi5dxbDEs%S⧖J{5'rM*#FUwm9;;r{m"n詥uXU^^]S~(jaZՕjJTE鱃-Rוug76DZU$T=Tj쿈4KQUYp6Zdcb߯'76뷷&("uG,._w"݊EP6\ydcf;ؿ[UMG"*{HJdmֲ{9)UMZD%+ªfb9 0ZG|zuKߧmm5;fZJY̩o߶秱1Y#滢tR^o &wҫɭ4(#lPDލk")呒o|k梫W_i3#{w/Vҏ8~?8W#d]ެj'2{w`* )EVJcn)ST]t#%!v쫲MԖʢ5)^ORջD{y;}-+UEEE 1 L'Y@IDATxLJ^J|ޑfA]b,QG11hlI,+ * ]E"E?<}l|ݙ3go̙ iR+1$$`$@I p Mx9H8 $\ / Ǟ'K4sp u:$p 8&{N8*T`΄~Ŋm۹s/5$qwa+UT|H菗#yh׮]ŜKy.-~/D>!~k#(-7~!_WuKh$#Hęg(+W\ N۶m3$($q9UFĒ$ jժ&=RYUR8yGKtĭ[K eo pjy U2X#p;o #CGI0ܗ{^$ ~kR8( kM̱`g%K͛7 4P***2 0K:((;ϓL PY*۷-[kתsŋ5kYddѡCչsgUn]sp_dC%C$7YEP I&>@-_\}+2cEQd۳gOu_(" ΙXRXH-@,]=@rJ5uTE`b9ȧXOk׮:udҽ{wSa=ŒQp -E.%|닛m I,^a.*x 3/n0~_ 2үJ 8P)a!$9Ûs|4HQD yϪ~4M.Qk2xW&M5\ZhaJ% VX|@7ry]weaicyw:uLyg}Yy/H+])$("#/z 4(0JHW@{M#kH33yWԣ>jKe"md!âΜ9Ӏ\6IrTЖJǠ@k/46D)IH&^ǎym7ə{7n4M `EPmi9P1!![j.BcQ '{y -ۂ)ꫯ("'#SkMK~YgCJ@$ E 3XB=CjɦeA?Y**2yYg0| | -.J'nʧ%Ei(8@E!i^eŢ# &)Un0Z~ ȁvM CN4u-[֭[g YO:T0T+Lf\pf @PT£ ($}D0A)IKރKqf9hr-̲*-ʊ||Okt*_r|s/(orCBRg](ZPNsOp9sLDyWQPX9UK^*zM!‰=A>'):'LxMy38'yH"~oi,NįY$OT,$(3L@Z&(eRt #/2X]<-o@2R@9Pd io X + l!F K D%e/yJhVʛO0X f-7%R`Z EWQFY ~ǫ6mژ)rXD,/Ģ2Owĉ%.2# $goIGxFO/"3Aq$YȯI,`J,Q\K>Ƃ}34SiEOZXя>xAThApGڴiSu*Gi%䈭 U9YjUĂX4_ d).EHJ8s@˂$CrF&(„! #DȷȃANiߏ|>rh.ΛjCN( 'u@z4P!Q<b*%aܗksH` 4yNLJEFqFӊJ*|+o>vr^\R)B?Ms2n֎2H$ϩtX@fSH|yx lcCs=/LqTE}\|RRQ@B 1+.I!qd?53}'7pYM8, ^Gi=/ܿb=8  ( N2 ȉ <_~f %-04DwmC``5ɕ 4d63]}K͙3ĥR"-kU#I?cFȎ2%g#iT@$s1].@`D4ϰv x(Dx2si (%xA8JDN'x%Cf8Tx B֬ 5%A0wS,G9NX@"y,)="6nlȵ(@'sWQJE4L LI.漟K:$ H+݁ŋFJvI M7QSX6%&gџd]<\&3DIg\r!}|k}ϟ__FX$$!IޘBkA*+0e?nSC_gXiL]PJ~Y5 0`7-ʬΝ; #0kRGq"S Lj1z*9; *7ykdDZ[:t9HJ-iRԈwq:uD)Qj #RA@K.55kf,!#,MÚBV@5#BCh >,% pIWb< |#I^5b8ħ?%,y+]2@QLi)S"\‘44Bb`TXSQ:)L2&MߧLx߫R_}f2>!PԼ&є%X ,v<+V_4GFȈV YV;sypZ8efWr5XOO](A8@9߈i-1ػ뀲1PfC,QJi мNrS ;*o'+6ꢒB ѵ$S"Z$Rp91x>xwA*$dY)Ĭ#Ɓy%o _f!a1Q% RP5(XL) ض/Mz&l`hT6&G~"cMIC&>GQ:vwg VBI%_ dkHeeepr&( >gFaAP(v@!+H5 |rn>r.팬$>,>䓦##B$ YP"/`KXƔ7Cs(gR-^+D )yGRy01N;8!GyFX ȹ.)8x}|P5e_ƍ3/S/\6L(L(WǏ7PDd@& 8и=yL׀nl׮:H)ZRO=NHВv!!yFB5l1BY>Bȋ4Md/iji ڒ?B|RU)jn[|D<'Mi^a! #$z_><#0yAsV sMaeJ [iqYFd=1cm;E^8a lFi1"S)H^ v=ؓ,Lin &X'5O2H@)9ɳ!< /q3$ 'y&D/4J~("΢GW^ƙ8-ރ{0)DQ!#s!+"+.  ;!+Z)?F<Ǣr)V4 I^|ɫ7% h8A<ƛMe,43]Z&?W^y)tv@(i4)HtA(([K>H@'-ᐭ4)?0p'98Cmw fPxe0$֔$I28܃q/qAм幀{%JfQ@F"?ș2!k*AIU׮]g ʈg~)Rwq)U0-J! IQJx*䆜3G< pi >R 9`cUEz `VE OF`Ls-B O#+TVd=) !*E4 \8$ iʖW^Ȑ2FSʍx|>;k81| '&9B1$vd|/@0| !rVBb9DC( G㌒L\KO#Dz<WH$2C֜! #-#G*V>Kqf h#!|R/e QrǀB(H)pPy( }ΔtI礃e%>/);'Ds@r)d sd)xvݸP!j#Gwy/)0&LnX֓CKg\W@@j*&(Voajq sk)RL-4s@$ث hI h#+ʚC;&:?cg}:CL_$ z HXWbc`L`sE2[2ӹ |f+A6m׃7Jͬ <[@7ZMmzvn>[-{CT\P!ֵz]WePUwK飺_CkҊU]zDAXV}1ϛ;Jp?gʞkr}ETNWNA is ^֋T)jQΐ0-H{Auc^W[=VlKd\TK.JrZMz0L4z-3Fq:5:Q/z;̬Lbd=IHR2C1M&0$1d5x@lb WQ/*NK|{Iz3#n?ZAOv;vlФb/H̀PE{$XI0Xe, }G5V0Xlzg&֕2Ǒx'Xj9;q<3z_7(\ [9:zOOm(lBdB*sQEz:`%%")K(`ڇ/Mʷ3֧Rާ" lB_)[l 1*;M r#t~eÌL3 }D%> Vy@`08")!2&!=> (u _. zh9!z_T>8Wtll{8&|yah]9rj@e#: Pd `12͜RbẺ)R!.I yӖt>huy癚{2~wPOct3zBl6tlKCG tīg# 3GtOIX蓤%g%B~GnI0CX2PiV, 0+a2C0C_;/K(($Yrh=vדּ"8`/-$_#*vOTRӿ0~)LR@/Z|z煠$ %@MHɄZˉlX.}ٸ z"61YD/C'\%XMLzej/=XWBC ?+g% 9֞&5qRS=a^ Ql8y ]V@ְQZzUt+jU_@JE_ZZIHKT`f4q" 0i2ub^0)bf^[f~X @\۱ڛ>; Vwj%NԙG"Hw-^~bQހtuשt&j*9m;*[u+L닚8e98j~Ҕ>cb&J:(_kک)i^( Oy^(h0^B(-kOTC.4 Z2?檫k̨dg)YT|C[􄃏4PDy:BHT䵬R R>DAD F 9E+F+nijzIzFE|,i W\v)]QP=p2]Q|ecU bYH^3}(4=HޥxS_s6~啾̓Mz)^MˍU/W6bM-?l:Hż߰$T@fM ۶US3[(]z$[ P?z` 4~R$VA:՛7:Qr%0OU+^4¤45 }zaI pQ:$綉ag!y_K) ]=w%ʏ-ͷic )1 &1၉IDt"q픘Eakކ%ܦ_{<,N H*%lk=#%,r۰$tXl0Tp"8>'G? bzn[4[0'kXIT0r#7ZаoԇQS3ݮ}bq–4J H?NgB(aЩm'{X .Zٌ ( waN衬 .J`n)e﫷$QAJ.4A~QaIoz/O2D9Hqy5Cva$HؒULaV@9o ethO #w _G/fg1@fTK"|[;9)ZE;( ʇ G30Cbr燶Իε (yӹs`0&8P%ϡyHu$PaLNp͡)Lb)'Luѻy;r$v gwLAt:W+H?#hfٲ:r(Ol1ķ8.+H߹6ﻭ; Vq]”@[Y&Ju@/_(.֎J Ėag͆X@~R5)UK39 Hfo)+6~p}62-NQ,I+F?Eҥ}}fdw!0_-XEqQ @+W80thn$: {HY%)H’!ʖJ HjAGi0@r6lm:i]$O7v?ȟOylKҰM ɔ72KK/)ہks0`$1Kxh,Lxؼ:2KѣJ>}[u3Xz[zW0&g3K:]jn'}Le!>v*j l%EqnUj֌2.<MnqX(t.=1ăōF|POva޷[FrRS儰zIȶTbмG?;~aΙ5f5o:/tom7[3[}t"6XL0Wa7)Bmږˈ#'L)zߙ43n@ݶ-C)S}ez:q[>EF%)f4)КAF6k: ^^KbTHbѪ'k( e ku\B-';Ma`pN옯|PA1YQu86m2Nأ6n22;Pg>XD_}5 Œ7Κ)vXݢ8rzB`#JUI*cfsr饪vJW=-ut}Q[M<*y,^ efQycdž%c`鋶>PMOj$Hbp(nb$3xvfx՘*־A:;egEVd׬4M Q ]Όb/΍=SQ 8h+o|ϷWsivҤcGk@0\S׺q9 &zk ;Hy-9J.93DGj'[EI-4H߿W#}|[!ԵZLh 8 b3{YD~i~ |!FjɈ O6'x5\@ l`&D0{Ɇ|4Lt. еZ|$PрV /f6S m\ gwxvذ|g.~Q[*I #p#M(R8Jv교Lk[?4j5I*5jX> E7Hv!ZqX;Ifp$(kt/ I&>ZgRZs2mյZmߣ: _ bKΒJ.xǤD -Ghڅ ƕ+FvEzĭ#Ώx"KĂl-)y|˒nZJ;tPװX ##}Nijk[Oju HQn%s|z2z5\[o]RHP|ib>9i[WN/vt|*d/MBsJ=0w1ml@j;Yx p#sk\|&EeHm,˹G>ݴZ?SjZ\̯/ެAj9*{oLK X׉dgkP kqR e 9+j#GrP, ߬_/Y6?Ԇl6*8ȇ/!,' 4w뗲)Tlٵ[8/cB }l5HonLb9KQ<ڹ}{ q7kضd2?r4l2=)T|*UcKgR `YI.=o'֓}</[} 8/ `kImfe Rvܻ>'sOvYV|I)ò,֢f RP!ZҩLm#cGF| r˝#]];wZlAZN+ 9w zG>+e)lfH _ߊBh:|VH^3ɰYJ>[Ҝ{M۶VxKZe R:]j448{|rZu<|E1v"_B7Kjf #_'-Hkԫg-)2/HՂ[? s|zM J6wmwҺ͚YI) ͹G> l-iƍR[k%mo^"9|}lkI.ڐ/ok1GrO[O<9Jv>zM2 }"i{ഭm |qٰ.)>tm>ì_L1!9KG>k++v+_ DmljiU#_#̬5ˊz#}QvV e(I ut#r<+4*|IV/r\#y SM]%jS'a2@.y˥ݫf϶HnݬIw$3{y l$Ӡue7HIZagB.<9-h8|E(CtlݡD+c=@tX@<I_QKZּW/x /t#fsv c#E-GyO޽lҦm>7ްbr)S|i?>S;|6Q $Hܲo_e 'QJ. u=Xhű~[tR#a0N[3^0qۨ{ Com˻uF7yI*Ga g<=P7Κᶺ+^z{wK|SG5p:ڇ0;u9iOF]@<1EA #iRj*V4z:|aQ~Z{_m7dH m9ĬehGƽ52q) ^S\R|jI"QmI̗)| rfne7g+42 8v7I ߝҰ)4҄o:WwzUˌز #'l% ]W&Q3z&a)ᆱuTщB)5*R;,ӴTJTrqA}")/S-6-T;igCM%Vҏ>-ǖo?=@WԃXAJYoʔA[ڹcF3#'$ї_^V1^CA{/9[j 7];wɫ [@8=XwrI9)\[b ifc |w18"n\l,7nN@bp)ʔ-m5  7A7Oy&,ytR.tWjEs6J`ȥ*7ݤ}e NŘFjfM VNH}bET4qZ0w\kZڡF"9dΆH&@GsOJI$0Ҳo_5+ Dchڢ: zT; ˂ӟ.bF)aЁp6檫;zZX=b'R/]lo^b|z+ChϓO4 .j 'b਴LQԼW/5B[ZMOCmY6nإ%I,HKz!xehܡ߻}bҢ=Xho?I|-(J4HszyP{˰Ԉ~^G[xoԾ:[UuCK3ҝzy ԰w `eSGɗ@[rܰMu^VQ @sx"E߆bQIINQn{@[+pJHE Ǝ,@?@[8 hF{3C$Jө)3nT"|ֺ͚KRlؠhpN}q2GcۓNUZ5s>IH?<V,h6|RC4@tMTKJILY5odM6+>ׯ_dIj/7A7E|M~`+gFݪk{A L |~AXM| E2 +sHVx@rnԮin3ʗ`1Lb16^6/ {](T0 @r6o{w?y3rfc#j&HpL ~/[&b?ӏ^Vo1#36T|x9gEATwUy3n^0QKY_{o?I]{Oի(#~e C?Y*'`=Կ R䔇RS&\rbVREC[^X`cDnܘȼd8q q6N&s(EE Y塰68fx$8{Uߘi)mۼx@ ( Ьyj_ZQf 8fy` b]7-}nXؠu`HlR/N2#Tqt5R{u (/fT,7nڤVuTc튦>\tesׁ4)jMe,GmbvCv*VT8(! L['G} DCIEz~2赶nwPZi(½`~ho3G7֨m[BoҼ d5J@.ZdZbaLH1ei׮uLs=} ~yu Qkbjjj[L)lIJKeMZGin^[qJi*3I,/-oW͙Sj>> |\.\.\. |\$ " copybutton_prompt_is_regexp = True # Disable autodoc's built-in type hints, and use sphinx_autodoc_typehints extension instead autodoc_typehints = "none" # Auto-generage module docs with sphinx-apidoc apidoc_module_dir = PACKAGE_DIR apidoc_output_dir = MODULE_DOCS_DIR apidoc_module_first = True apidoc_separate_modules = True apidoc_toc_file = False # HTML general settings html_show_sphinx = False pygments_style = "friendly" pygments_dark_style = "material" # HTML theme settings html_logo = "_static/logo.png" html_static_path = ["_static"] html_theme = "furo" html_theme_options = { "sidebar_hide_name": True, } PyrateLimiter-3.9.0/docs/contributing.md000066400000000000000000000001101504242573000202700ustar00rootroot00000000000000# Contributing Guide ```{include} ../CONTRIBUTING.md :start-line: 1 ``` PyrateLimiter-3.9.0/docs/index.md000066400000000000000000000004711504242573000167020ustar00rootroot00000000000000# PyrateLimiter ```{include} ../README.md :start-line: 3 :end-before: '## Contents' ``` ## Features ```{include} ../README.md :start-after: '## Features' ``` # Reference Documentation ```{toctree} :maxdepth: 2 reference changelog contributing ``` PyrateLimiter-3.9.0/docs/reference.md000066400000000000000000000003321504242573000175250ustar00rootroot00000000000000# API Reference This section documents the public interfaces of pyrate-limiter. ```{toctree} :glob: true :maxdepth: 2 modules/pyrate_limiter.* ``` PyrateLimiter-3.9.0/examples/000077500000000000000000000000001504242573000161355ustar00rootroot00000000000000PyrateLimiter-3.9.0/examples/asyncio_decorator.py000066400000000000000000000017731504242573000222260ustar00rootroot00000000000000import asyncio import logging import time from datetime import datetime from pyrate_limiter.limiter_factory import create_inmemory_limiter logging.basicConfig(level=logging.DEBUG) async def ticker(): for i in range(10): print(f"[TICK] {datetime.now()}") await asyncio.sleep(0.5) def mapping(name, weight, i): return "mytask", 1 async def main(): print("Running task_async using try_acquire_async and AsyncBucketWrapper") print("Note that the TICKs continue while the tasks are waiting") start = time.time() limiter = create_inmemory_limiter(async_wrapper=True) @limiter.as_decorator()(lambda name, weight: (name, weight)) # type: ignore[arg-type] async def task_async(name: str, weight: int): print(f"try_acquire_async: {datetime.now()} {name}: {weight}") await asyncio.gather(ticker(), *[task_async("mytask", 1) for i in range(10)]) print(f'Run 10 calls in {time.time() - start:,.2f} sec') if __name__ == "__main__": asyncio.run(main()) PyrateLimiter-3.9.0/examples/asyncio_ratelimit.py000066400000000000000000000020171504242573000222260ustar00rootroot00000000000000import asyncio import logging import time from datetime import datetime from pyrate_limiter import Limiter from pyrate_limiter.limiter_factory import create_inmemory_limiter logging.basicConfig(level=logging.DEBUG) async def ticker(): for i in range(10): print(f"[TICK] {datetime.now()}") await asyncio.sleep(0.5) def mapping(name, weight, i): return "mytask", 1 async def main(): print("Running task_async using try_acquire_async and BucketAsyncWrapper") print("Note that the TICKs continue while the tasks are waiting") start = time.time() limiter = create_inmemory_limiter(async_wrapper=True) async def task_async(name, weight, i, limiter: Limiter): await limiter.try_acquire_async(name, weight) print(f"try_acquire_async: {datetime.now()} {name}: {weight}") await asyncio.gather(ticker(), *[task_async(str(i), 1, i, limiter) for i in range(10)]) print(f'Run 10 calls in {time.time() - start:,.2f} sec') if __name__ == "__main__": asyncio.run(main()) PyrateLimiter-3.9.0/examples/httpx_ratelimiter.py000066400000000000000000000107631504242573000222660ustar00rootroot00000000000000""" Example of using pyrate_limiter with httpx. """ import logging from httpx import AsyncHTTPTransport from httpx import HTTPTransport from httpx import Request from httpx import Response from pyrate_limiter import Limiter from pyrate_limiter import limiter_factory logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO, format="%(asctime)s.%(msecs)03d [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S") logger.setLevel(logging.DEBUG) class RateLimiterTransport(HTTPTransport): def __init__(self, limiter: Limiter, **kwargs): super().__init__(**kwargs) self.limiter = limiter def handle_request(self, request: Request, **kwargs) -> Response: # using a constant string for item name means that the same # rate is applied to all requests. while not self.limiter.try_acquire("httpx_ratelimiter"): logger.debug("Lock acquisition timed out, retrying") logger.debug("Acquired lock") return super().handle_request(request, **kwargs) class AsyncRateLimiterTransport(AsyncHTTPTransport): def __init__(self, limiter: Limiter, **kwargs): super().__init__(**kwargs) self.limiter = limiter async def handle_async_request(self, request: Request, **kwargs) -> Response: while not await self.limiter.try_acquire_async("httpx_ratelimiter"): logger.debug("Lock acquisition timed out, retrying") logger.debug("Acquired lock") response = await super().handle_async_request(request, **kwargs) return response # Example below def fetch(start_time: int): import httpx url = "https://httpbin.org/get" assert limiter_factory.LIMITER is not None with httpx.Client(transport=RateLimiterTransport(limiter=limiter_factory.LIMITER)) as client: client.get(url) def singleprocess_example(): from pyrate_limiter import limiter_factory, Duration import httpx import time import os start_time = time.time() url = "https://httpbin.org/get" limiter = limiter_factory.create_inmemory_limiter(rate_per_duration=1, duration=Duration.SECOND, max_delay=Duration.HOUR ) transport = RateLimiterTransport(limiter=limiter) with httpx.Client(transport=transport) as client: for _ in range(10): response = client.get(url) print(f"{round(time.time() - start_time, 2)}s-{os.getpid()}: {response.json()}") def asyncio_example(): import asyncio import time import httpx from pyrate_limiter import limiter_factory, Duration url = "https://httpbin.org/get" async def ticker(): """loops and prints time, showing the eventloop isn't blocked""" while True: print(f"[TICK] {time.time()}") await asyncio.sleep(1) async def afetch(client: httpx.AsyncClient, start_time: int): await client.get(url) async def example(): limiter = limiter_factory.create_inmemory_limiter( rate_per_duration=1, duration=Duration.SECOND, max_delay=Duration.HOUR, async_wrapper=True ) transport = AsyncRateLimiterTransport(limiter=limiter) client = httpx.AsyncClient(transport=transport) tasks = [afetch(client, url) for _ in range(10)] asyncio.create_task(ticker()) results = await asyncio.gather(*tasks) await client.aclose() return results asyncio.run(example()) def multiprocess_example(): import time from concurrent.futures import ProcessPoolExecutor, wait from functools import partial from pyrate_limiter import Duration, MultiprocessBucket, Rate rate = Rate(1, Duration.SECOND) bucket = MultiprocessBucket.init([rate]) start_time = time.time() with ProcessPoolExecutor(initializer=partial(limiter_factory.init_global_limiter, bucket)) as executor: futures = [executor.submit(fetch, start_time) for _ in range(10)] wait(futures) for f in futures: try: f.result() except Exception: logger.exception("Task raised") if __name__ == "__main__": print("Single Process example: 10 requests") singleprocess_example() print("Multiprocessing example: 10 requests") multiprocess_example() print("Asyncio example: 10 requests") asyncio_example() PyrateLimiter-3.9.0/examples/in_memory_multiprocess.py000066400000000000000000000044751504242573000233300ustar00rootroot00000000000000""" Demonstrates using a MultiprocessBucket using a ProcessPoolExecutor, running a simple task. A MultiprocessBucket is useful when the rate is to be shared among a multiprocessing pool or ProcessPoolExecutor. The mp_bucket stores its items in a multiprocessing ListProxy, and a multiprocessing lock is shared across Limiter instances. """ import logging import os import time from concurrent.futures import ProcessPoolExecutor from concurrent.futures import wait from functools import partial from multiprocessing import Lock from pyrate_limiter import Duration from pyrate_limiter import Limiter from pyrate_limiter import MonotonicClock from pyrate_limiter import MultiprocessBucket from pyrate_limiter import Rate LIMITER: Limiter | None = None MAX_DELAY = Duration.DAY REQUESTS_PER_SECOND = 100 NUM_REQUESTS = REQUESTS_PER_SECOND * 5 # Run for ~5 seconds logger = logging.getLogger(__name__) def init_process(bucket: MultiprocessBucket): global LIMITER LIMITER = Limiter(bucket, raise_when_fail=False, clock=MonotonicClock(), max_delay=MAX_DELAY, retry_until_max_delay=True) def my_task(): assert LIMITER is not None LIMITER.try_acquire("my_task") result = {"time": time.monotonic(), "pid": os.getpid()} return result if __name__ == "__main__": logging.basicConfig( format="%(asctime)s %(name)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S", ) rate = Rate(REQUESTS_PER_SECOND, Duration.SECOND) bucket = MultiprocessBucket.init([rate]) mp_lock = Lock() # create a limiter and feed it 100 requests to prime it # Otherwise, the test appears to run too fast init_process(bucket) assert LIMITER is not None [LIMITER.try_acquire("test") for _ in range(REQUESTS_PER_SECOND)] start = time.monotonic() with ProcessPoolExecutor( initializer=partial(init_process, bucket) ) as executor: futures = [executor.submit(my_task) for _ in range(NUM_REQUESTS)] wait(futures) times = [] for f in futures: try: t = f.result() times.append(t) except Exception as e: print(f"Task raised: {e}") end = time.monotonic() print(f"Completed {NUM_REQUESTS=} in {end - start} seconds, at a rate of {REQUESTS_PER_SECOND=}") PyrateLimiter-3.9.0/examples/sqlite_filelock_multiprocess.py000066400000000000000000000041701504242573000244730ustar00rootroot00000000000000""" Demonstrates using a SQLite Bucket across multiple processes, using a filelock to enforce synchronization. This is useful in cases where multiple processes are created, possibly at different times or from different applications. The SQLite Bucket uses a .lock file to ensure that only one process is active at a time. """ import logging import os import time from concurrent.futures import ProcessPoolExecutor from concurrent.futures import wait from functools import partial from pyrate_limiter import Duration from pyrate_limiter import Limiter from pyrate_limiter import limiter_factory LIMITER: Limiter | None = None REQUESTS_PER_SECOND = 10 NUM_REQUESTS = REQUESTS_PER_SECOND * 5 # Run for ~5 seconds logger = logging.getLogger(__name__) def init_process(): global LIMITER LIMITER = limiter_factory.create_sqlite_limiter(rate_per_duration=REQUESTS_PER_SECOND, duration=Duration.SECOND, db_path="pyrate_limiter.sqlite", use_file_lock=True) def my_task(): assert LIMITER is not None LIMITER.try_acquire("my_task") result = {"time": time.monotonic(), "pid": os.getpid()} return result if __name__ == "__main__": logging.basicConfig( format="%(asctime)s %(name)s %(levelname)-8s %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S", ) # prime the rates, to show realistic rates init_process() assert LIMITER is not None [LIMITER.try_acquire("test") for _ in range(REQUESTS_PER_SECOND)] start = time.monotonic() with ProcessPoolExecutor( initializer=partial(init_process) ) as executor: futures = [executor.submit(my_task) for _ in range(NUM_REQUESTS)] wait(futures) times = [] for f in futures: try: t = f.result() times.append(t) except Exception as e: print(f"Task raised: {e}") end = time.monotonic() print(f"Completed {NUM_REQUESTS=} in {end - start} seconds, at a rate of {REQUESTS_PER_SECOND=}") PyrateLimiter-3.9.0/noxfile.py000066400000000000000000000031311504242573000163330ustar00rootroot00000000000000import nox from nox_poetry import session # Reuse virtualenv created by poetry instead of creating new ones nox.options.reuse_existing_virtualenvs = True # Manually select serial tests. TODO: Add a "serial" marker PYTEST_MP_ARGS = ["--verbose", "--cov=pyrate_limiter", "--maxfail=1", "tests/test_multiprocessing.py"] PYTEST_MP2_ARGS = ["--verbose", "--cov=pyrate_limiter", "--cov-append", "--maxfail=1", "-m", "mpbucket and monotonic", "--ignore=tests/test_multiprocessing.py"] # Reduce # of cores to 3: one less than GHA runner's cores: timing tests are sensitive to high load PYTEST_ARGS = ["--verbose", "--maxfail=1", "-m", "not mpbucket", "--numprocesses=3", "--ignore=tests/test_multiprocessing.py"] COVERAGE_ARGS = ["--cov=pyrate_limiter", "--cov-append", "--cov-report=term", "--cov-report=xml", "--cov-report=html"] @session(python=False) def lint(session) -> None: session.run("pre-commit", "run", "--all-files") @session(python=False) def cover(session) -> None: """Run tests and generate coverage reports in both terminal output and XML (for Codecov)""" # Serial Files session.run("pytest", *PYTEST_MP_ARGS) # Serial Markers session.run("pytest", *PYTEST_MP2_ARGS) # Everything else - concurrent session.run("pytest", *PYTEST_ARGS, *COVERAGE_ARGS) @session(python=False) def test(session) -> None: session.run("pytest", *PYTEST_MP_ARGS) session.run("pytest", *PYTEST_ARGS) @session(python=False) def docs(session): """Build Sphinx documentation""" session.run("sphinx-build", "docs", "docs/_build/html", "-j", "auto") PyrateLimiter-3.9.0/poetry.lock000066400000000000000000002756351504242573000165350ustar00rootroot00000000000000# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" optional = true python-versions = ">=3.6" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] [[package]] name = "argcomplete" version = "3.6.2" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591"}, {file = "argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf"}, ] [package.extras] test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] name = "async-timeout" version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"all\" and python_full_version < \"3.11.3\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] name = "attrs" version = "25.3.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "babel" version = "2.17.0" description = "Internationalization utilities" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" optional = false python-versions = ">=3.6" groups = ["main", "dev"] markers = "python_version == \"3.8\"" files = [ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, ] [package.extras] tzdata = ["tzdata"] [[package]] name = "beautifulsoup4" version = "4.13.4" description = "Screen-scraping library" optional = true python-versions = ">=3.7.0" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b"}, {file = "beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195"}, ] [package.dependencies] soupsieve = ">1.2" typing-extensions = ">=4.0.0" [package.extras] cchardet = ["cchardet"] chardet = ["chardet"] charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "certifi" version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = true python-versions = ">=3.6" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] [[package]] name = "charset-normalizer" version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, ] [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] markers = {main = "extra == \"docs\" and sys_platform == \"win32\"", dev = "sys_platform == \"win32\""} [[package]] name = "colorlog" version = "6.9.0" description = "Add colours to the output of Python's logging module." optional = false python-versions = ">=3.6" groups = ["dev"] files = [ {file = "colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff"}, {file = "colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] development = ["black", "flake8", "mypy", "pytest", "types-colorama"] [[package]] name = "coverage" version = "6.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.6" groups = ["dev"] files = [ {file = "coverage-6.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:3dfb23cc180b674a11a559183dff9655beb9da03088f3fe3c4f3a6d200c86f05"}, {file = "coverage-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5dd5ae0a9cd55d71f1335c331e9625382239b8cede818fb62d8d2702336dbf8"}, {file = "coverage-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8426fec5ad5a6e8217921716b504e9b6e1166dc147e8443b4855e329db686282"}, {file = "coverage-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aa5d4d43fa18cc9d0c6e02a83de0b9729b5451a9066574bd276481474f0a53ab"}, {file = "coverage-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b78dd3eeb8f5ff26d2113c41836bac04a9ea91be54c346826b54a373133c8c53"}, {file = "coverage-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:581fddd2f883379bd5af51da9233e0396b6519f3d3eeae4fb88867473be6d56e"}, {file = "coverage-6.0-cp310-cp310-win32.whl", hash = "sha256:43bada49697a62ffa0283c7f01bbc76aac562c37d4bb6c45d56dd008d841194e"}, {file = "coverage-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:fa816e97cfe1f691423078dffa39a18106c176f28008db017b3ce3e947c34aa5"}, {file = "coverage-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5c191e01b23e760338f19d8ba2470c0dad44c8b45e41ac043b2db84efc62f695"}, {file = "coverage-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:274a612f67f931307706b60700f1e4cf80e1d79dff6c282fc9301e4565e78724"}, {file = "coverage-6.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9dbfcbc56d8de5580483cf2caff6a59c64d3e88836cbe5fb5c20c05c29a8808"}, {file = "coverage-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e63490e8a6675cee7a71393ee074586f7eeaf0e9341afd006c5d6f7eec7c16d7"}, {file = "coverage-6.0-cp36-cp36m-win32.whl", hash = "sha256:72f8c99f1527c5a8ee77c890ea810e26b39fd0b4c2dffc062e20a05b2cca60ef"}, {file = "coverage-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:88f1810eb942e7063d051d87aaaa113eb5fd5a7fd2cda03a972de57695b8bb1a"}, {file = "coverage-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:befb5ffa9faabef6dadc42622c73de168001425258f0b7e402a2934574e7a04b"}, {file = "coverage-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7dbda34e8e26bd86606ba8a9c13ccb114802e01758a3d0a75652ffc59a573220"}, {file = "coverage-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b4ee5815c776dfa3958ba71c7cd4cdd8eb40d79358a18352feb19562fe4408c4"}, {file = "coverage-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d82cbef1220703ce56822be7fbddb40736fc1a928ac893472df8aff7421ae0aa"}, {file = "coverage-6.0-cp37-cp37m-win32.whl", hash = "sha256:d795a2c92fe8cb31f6e9cd627ee4f39b64eb66bf47d89d8fcf7cb3d17031c887"}, {file = "coverage-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6e216e4021c934246c308fd3e0d739d9fa8a3f4ea414f584ab90ef9c1592f282"}, {file = "coverage-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8305e14112efb74d0b5fec4df6e41cafde615c2392a7e51c84013cafe945842c"}, {file = "coverage-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4865dc4a7a566147cbdc2b2f033a6cccc99a7dcc89995137765c384f6c73110b"}, {file = "coverage-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:25df2bc53a954ba2ccf230fa274d1de341f6aa633d857d75e5731365f7181749"}, {file = "coverage-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:08fd55d2e00dac4c18a2fa26281076035ec86e764acdc198b9185ce749ada58f"}, {file = "coverage-6.0-cp38-cp38-win32.whl", hash = "sha256:11ce082eb0f7c2bbfe96f6c8bcc3a339daac57de4dc0f3186069ec5c58da911c"}, {file = "coverage-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7844a8c6a0fee401edbf578713c2473e020759267c40261b294036f9d3eb6a2d"}, {file = "coverage-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bea681309bdd88dd1283a8ba834632c43da376d9bce05820826090aad80c0126"}, {file = "coverage-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e735ab8547d8a1fe8e58dd765d6f27ac539b395f52160d767b7189f379f9be7a"}, {file = "coverage-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7593a49300489d064ebb6c58539f52cbbc4a2e6a4385de5e92cae1563f88a425"}, {file = "coverage-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adb0f4c3c8ba8104378518a1954cbf3d891a22c13fd0e0bf135391835f44f288"}, {file = "coverage-6.0-cp39-cp39-win32.whl", hash = "sha256:8da0c4a26a831b392deaba5fdd0cd7838d173b47ce2ec3d0f37be630cb09ef6e"}, {file = "coverage-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:7af2f8e7bb54ace984de790e897f858e88068d8fbc46c9490b7c19c59cf51822"}, {file = "coverage-6.0-pp36-none-any.whl", hash = "sha256:82b58d37c47d93a171be9b5744bcc96a0012cbf53d5622b29a49e6be2097edd7"}, {file = "coverage-6.0-pp37-none-any.whl", hash = "sha256:fff04bfefb879edcf616f1ce5ea6f4a693b5976bdc5e163f8464f349c25b59f0"}, {file = "coverage-6.0.tar.gz", hash = "sha256:17983f6ccc47f4864fd16d20ff677782b23d1207bf222d10e4d676e4636b0872"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "dependency-groups" version = "1.3.1" description = "A tool for resolving PEP 735 Dependency Group data" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "dependency_groups-1.3.1-py3-none-any.whl", hash = "sha256:51aeaa0dfad72430fcfb7bcdbefbd75f3792e5919563077f30bc0d73f4493030"}, {file = "dependency_groups-1.3.1.tar.gz", hash = "sha256:78078301090517fd938c19f64a53ce98c32834dfe0dee6b88004a569a6adfefd"}, ] [package.dependencies] packaging = "*" tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] cli = ["tomli ; python_version < \"3.11\""] [[package]] name = "distlib" version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" groups = ["dev"] files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] name = "docutils" version = "0.17.1" description = "Docutils -- Python Documentation Utilities" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] [[package]] name = "exceptiongroup" version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["dev"] markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] [package.dependencies] typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} [package.extras] test = ["pytest (>=6)"] [[package]] name = "execnet" version = "2.1.1" description = "" optional = false python-versions = "*" groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, ] [[package]] name = "filelock" version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] markers = {main = "extra == \"all\""} [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] [[package]] name = "flake8" version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.6.1" groups = ["dev"] files = [ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.9.0,<2.10.0" pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "flake8-polyfill" version = "1.0.2" description = "Polyfill package for Flake8 plugins" optional = false python-versions = "*" groups = ["dev"] files = [ {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, ] [package.dependencies] flake8 = "*" [[package]] name = "furo" version = "2022.9.29" description = "A clean customisable Sphinx documentation theme." optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, ] [package.dependencies] beautifulsoup4 = "*" pygments = ">=2.7" sphinx = ">=4.0,<6.0" sphinx-basic-ng = "*" [[package]] name = "identify" version = "2.6.1" description = "File identification library for Python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, ] [package.extras] license = ["ukkonen"] [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = true python-versions = ">=3.6" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] [[package]] name = "importlib-metadata" version = "8.5.0" description = "Read metadata from Python packages" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\" and python_version < \"3.10\"" files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] zipp = ">=3.20" [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] name = "iniconfig" version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] [[package]] name = "jinja2" version = "3.1.6" description = "A very fast and expressive template engine." optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, ] [package.dependencies] mdurl = ">=0.1,<1.0" [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark"] code-style = ["pre-commit (>=3.0,<4.0)"] compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] linkify = ["linkify-it-py (>=1,<3)"] plugins = ["mdit-py-plugins"] profiling = ["gprof2dot"] rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] [[package]] name = "mdit-py-plugins" version = "0.3.5" description = "Collection of plugins for markdown-it-py" optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "mdit-py-plugins-0.3.5.tar.gz", hash = "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a"}, {file = "mdit_py_plugins-0.3.5-py3-none-any.whl", hash = "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e"}, ] [package.dependencies] markdown-it-py = ">=1.0.0,<3.0.0" [package.extras] code-style = ["pre-commit"] rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] [[package]] name = "myst-parser" version = "0.18.1" description = "An extended commonmark compliant parser, with bridges to docutils & sphinx." optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "myst-parser-0.18.1.tar.gz", hash = "sha256:79317f4bb2c13053dd6e64f9da1ba1da6cd9c40c8a430c447a7b146a594c246d"}, {file = "myst_parser-0.18.1-py3-none-any.whl", hash = "sha256:61b275b85d9f58aa327f370913ae1bec26ebad372cc99f3ab85c8ec3ee8d9fb8"}, ] [package.dependencies] docutils = ">=0.15,<0.20" jinja2 = "*" markdown-it-py = ">=1.0.0,<3.0.0" mdit-py-plugins = ">=0.3.1,<0.4.0" pyyaml = "*" sphinx = ">=4,<6" typing-extensions = "*" [package.extras] code-style = ["pre-commit (>=2.12,<3.0)"] linkify = ["linkify-it-py (>=1.0,<2.0)"] rtd = ["ipython", "sphinx-book-theme", "sphinx-design", "sphinxcontrib.mermaid (>=0.7.1,<0.8.0)", "sphinxext-opengraph (>=0.6.3,<0.7.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=6,<7)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx (<5.2)", "sphinx-pytest"] [[package]] name = "nodeenv" version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] [[package]] name = "nox" version = "2025.5.1" description = "Flexible test automation." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "nox-2025.5.1-py3-none-any.whl", hash = "sha256:56abd55cf37ff523c254fcec4d152ed51e5fe80e2ab8317221d8b828ac970a31"}, {file = "nox-2025.5.1.tar.gz", hash = "sha256:2a571dfa7a58acc726521ac3cd8184455ebcdcbf26401c7b737b5bc6701427b2"}, ] [package.dependencies] argcomplete = ">=1.9.4,<4" attrs = ">=23.1" colorlog = ">=2.6.1,<7" dependency-groups = ">=1.1" packaging = ">=20.9" tomli = {version = ">=1", markers = "python_version < \"3.11\""} virtualenv = ">=20.14.1" [package.extras] tox-to-nox = ["importlib-resources ; python_version < \"3.9\"", "jinja2", "tox (>=4)"] uv = ["uv (>=0.1.6)"] [[package]] name = "nox-poetry" version = "1.1.0" description = "nox-poetry" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "nox_poetry-1.1.0-py3-none-any.whl", hash = "sha256:30510b183f92f63f6b8d3d9b0d371b8d5ac57e7934dcf4a7042474bb91691756"}, {file = "nox_poetry-1.1.0.tar.gz", hash = "sha256:b19c597ec20cfb071eaa853aee2e28cedf8d88799d68b482e7b6c915948a2817"}, ] [package.dependencies] nox = ">=2020.8.22" packaging = ">=20.9" tomlkit = ">=0.7" [[package]] name = "packaging" version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] markers = {main = "extra == \"docs\""} [[package]] name = "pbr" version = "6.1.1" description = "Python Build Reasonableness" optional = true python-versions = ">=2.6" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "pbr-6.1.1-py2.py3-none-any.whl", hash = "sha256:38d4daea5d9fa63b3f626131b9d34947fd0c8be9b05a29276870580050a25a76"}, {file = "pbr-6.1.1.tar.gz", hash = "sha256:93ea72ce6989eb2eed99d0f75721474f69ad88128afdef5ac377eb797c4bf76b"}, ] [package.dependencies] setuptools = "*" [[package]] name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, ] [package.dependencies] cfgv = ">=2.0.0" identify = ">=1.0.0" nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" [[package]] name = "psycopg" version = "3.2.9" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "psycopg-3.2.9-py3-none-any.whl", hash = "sha256:01a8dadccdaac2123c916208c96e06631641c0566b22005493f09663c7a8d3b6"}, {file = "psycopg-3.2.9.tar.gz", hash = "sha256:2fbb46fcd17bc81f993f28c47f1ebea38d66ae97cc2dbc3cad73b37cefbff700"}, ] [package.dependencies] "backports.zoneinfo" = {version = ">=0.2.0", markers = "python_version < \"3.9\""} psycopg-pool = {version = "*", optional = true, markers = "extra == \"pool\""} typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] binary = ["psycopg-binary (==3.2.9) ; implementation_name != \"pypy\""] c = ["psycopg-c (==3.2.9) ; implementation_name != \"pypy\""] dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-pool" version = "3.2.6" description = "Connection Pool for Psycopg" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7"}, {file = "psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5"}, ] [package.dependencies] typing-extensions = ">=4.6" [[package]] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["dev"] files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] [[package]] name = "pycodestyle" version = "2.9.1" description = "Python style guide checker" optional = false python-versions = ">=3.6" groups = ["dev"] files = [ {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, ] [[package]] name = "pyflakes" version = "2.5.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.6" groups = ["dev"] files = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] [[package]] name = "pygments" version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" version = "2.9.0" description = "JSON Web Token implementation in Python" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"all\"" files = [ {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.extras] crypto = ["cryptography (>=3.4.0)"] dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pytest" version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" version = "0.24.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-forked" version = "1.6.0" description = "run tests in isolated forked subprocesses" optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "pytest-forked-1.6.0.tar.gz", hash = "sha256:4dafd46a9a600f65d822b8f605133ecf5b3e1941ebb3588e943b4e3eb71a5a3f"}, {file = "pytest_forked-1.6.0-py3-none-any.whl", hash = "sha256:810958f66a91afb1a1e2ae83089d8dc1cd2437ac96b12963042fbb9fb4d16af0"}, ] [package.dependencies] py = "*" pytest = ">=3.10" [[package]] name = "pytest-xdist" version = "2.5.0" description = "pytest xdist plugin for distributed testing and loop-on-failing modes" optional = false python-versions = ">=3.6" groups = ["dev"] files = [ {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, ] [package.dependencies] execnet = ">=1.1" pytest = ">=6.2.0" pytest-forked = "*" [package.extras] psutil = ["psutil (>=3.0)"] setproctitle = ["setproctitle"] testing = ["filelock"] [[package]] name = "pytz" version = "2025.2" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" groups = ["main"] markers = "extra == \"docs\" and python_version == \"3.8\"" files = [ {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "redis" version = "5.3.0" description = "Python client for Redis database and key-value store" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"all\"" files = [ {file = "redis-5.3.0-py3-none-any.whl", hash = "sha256:f1deeca1ea2ef25c1e4e46b07f4ea1275140526b1feea4c6459c0ec27a10ef83"}, {file = "redis-5.3.0.tar.gz", hash = "sha256:8d69d2dde11a12dc85d0dbf5c45577a5af048e2456f7077d87ad35c1c81c310e"}, ] [package.dependencies] async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} PyJWT = ">=2.9.0,<2.10.0" [package.extras] hiredis = ["hiredis (>=3.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "schedule" version = "1.2.2" description = "Job scheduling for humans." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "schedule-1.2.2-py3-none-any.whl", hash = "sha256:5bef4a2a0183abf44046ae0d164cadcac21b1db011bdd8102e4a0c1e91e06a7d"}, {file = "schedule-1.2.2.tar.gz", hash = "sha256:15fe9c75fe5fd9b9627f3f19cc0ef1420508f9f9a46f45cd0769ef75ede5f0b7"}, ] [package.extras] timezone = ["pytz"] [[package]] name = "setuptools" version = "75.3.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "setuptools-75.3.2-py3-none-any.whl", hash = "sha256:90ab613b6583fc02d5369cbca13ea26ea0e182d1df2d943ee9cbe81d4c61add9"}, {file = "setuptools-75.3.2.tar.gz", hash = "sha256:3c1383e1038b68556a382c1e8ded8887cd20141b0eb5708a6c8d277de49364f5"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.5.2) ; sys_platform != \"cygwin\""] core = ["importlib-metadata (>=6) ; python_version < \"3.10\"", "importlib-resources (>=5.10.2) ; python_version < \"3.9\"", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "ruff (<=0.7.1)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib-metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.12.*)", "pytest-mypy"] [[package]] name = "snowballstemmer" version = "3.0.1" description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, ] [[package]] name = "soupsieve" version = "2.7" description = "A modern CSS selector implementation for Beautiful Soup." optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4"}, {file = "soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a"}, ] [[package]] name = "sphinx" version = "4.5.0" description = "Python documentation generator" optional = true python-versions = ">=3.6" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=1.3" colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} docutils = ">=0.14,<0.18" imagesize = "*" importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} Jinja2 = ">=2.3" packaging = "*" Pygments = ">=2.0" requests = ">=2.5.0" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "types-requests", "types-typed-ast"] test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast ; python_version < \"3.8\""] [[package]] name = "sphinx-autodoc-typehints" version = "1.19.1" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinx_autodoc_typehints-1.19.1-py3-none-any.whl", hash = "sha256:9be46aeeb1b315eb5df1f3a7cb262149895d16c7d7dcd77b92513c3c3a1e85e6"}, {file = "sphinx_autodoc_typehints-1.19.1.tar.gz", hash = "sha256:6c841db55e0e9be0483ff3962a2152b60e79306f4288d8c4e7e86ac84486a5ea"}, ] [package.dependencies] Sphinx = ">=4.5" [package.extras] testing = ["covdefaults (>=2.2)", "coverage (>=6.3)", "diff-cover (>=6.4)", "nptyping (>=2.1.2)", "pytest (>=7.1)", "pytest-cov (>=3)", "sphobjinv (>=2)", "typing-extensions (>=4.1)"] type-comments = ["typed-ast (>=1.5.2) ; python_version < \"3.8\""] [[package]] name = "sphinx-basic-ng" version = "1.0.0b2" description = "A modern skeleton for Sphinx themes." optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, ] [package.dependencies] sphinx = ">=4.0" [package.extras] docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] [[package]] name = "sphinx-copybutton" version = "0.5.2" description = "Add a copy button to each of your code cells." optional = true python-versions = ">=3.7" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, ] [package.dependencies] sphinx = ">=1.8" [package.extras] code-style = ["pre-commit (==2.12.1)"] rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] [[package]] name = "sphinxcontrib-apidoc" version = "0.3.0" description = "A Sphinx extension for running 'sphinx-apidoc' on each build" optional = true python-versions = "*" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-apidoc-0.3.0.tar.gz", hash = "sha256:729bf592cf7b7dd57c4c05794f732dc026127275d785c2a5494521fdde773fb9"}, {file = "sphinxcontrib_apidoc-0.3.0-py2.py3-none-any.whl", hash = "sha256:6671a46b2c6c5b0dca3d8a147849d159065e50443df79614f921b42fbd15cb09"}, ] [package.dependencies] pbr = "*" Sphinx = ">=1.6.0" [[package]] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." optional = true python-versions = ">=3.5" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = true python-versions = ">=3.5" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, ] [package.extras] test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." optional = true python-versions = ">=3.5" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." optional = true python-versions = ">=3.5" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "tomli" version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tomlkit" version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] name = "typing-extensions" version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [[package]] name = "tzdata" version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" groups = ["main", "dev"] markers = "sys_platform == \"win32\"" files = [ {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] [[package]] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\"" files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" version = "20.31.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"}, {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] [[package]] name = "zipp" version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"docs\" and python_version < \"3.10\"" files = [ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] all = ["filelock", "psycopg", "redis"] docs = ["furo", "myst-parser", "sphinx", "sphinx-autodoc-typehints", "sphinx-copybutton", "sphinxcontrib-apidoc"] [metadata] lock-version = "2.1" python-versions = "^3.8" content-hash = "a68e0552e97a1ff8fa3e2c9e779c73ae04f3206fa726de34389d68b23d0a5a9f" PyrateLimiter-3.9.0/pyproject.toml000066400000000000000000000050351504242573000172360ustar00rootroot00000000000000[tool.poetry] name = "pyrate-limiter" version = "3.9.0" description = "Python Rate-Limiter using Leaky-Bucket Algorithm" authors = ["vutr "] license = "MIT" readme = "README.md" homepage = "https://github.com/vutran1710/PyrateLimiter" repository = "https://github.com/vutran1710/PyrateLimiter" documentation = "https://pyrate-limiter.readthedocs.io" keywords = [ "rate", "rate-limiter", "rate_limiter", "ratelimiter", "leaky-bucket", "ratelimit", "ratelimiting", ] classifiers = [ "Development Status :: 5 - Production/Stable", "Operating System :: OS Independent", "Topic :: Software Development :: Documentation", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed", ] include = [ { path = "LICENSE", format = "sdist" }, { path = "CHANGELOG.md", format = "sdist" }, { path = "docs", format = "sdist" }, { path = "tests", format = "sdist" }, ] [tool.poetry.dependencies] python = "^3.8" # Optional backend dependencies filelock = { optional = true, version = ">=3.0" } redis = { optional = true, version = "^5.0.0" } psycopg = { extras = ["pool"], version = "^3.1.18", optional = true } # Documentation dependencies needed for Readthedocs builds furo = { optional = true, version = "^2022.3.4" } myst-parser = { optional = true, version = ">=0.17" } sphinx = { optional = true, version = "^4.3.0" } sphinx-autodoc-typehints = { optional = true, version = "^1.17" } sphinx-copybutton = { optional = true, version = ">=0.5" } sphinxcontrib-apidoc = { optional = true, version = "^0.3" } [tool.poetry.extras] all = ["filelock", "redis", "psycopg"] docs = [ "furo", "myst-parser", "sphinx", "sphinx-autodoc-typehints", "sphinx-copybutton", "sphinxcontrib-apidoc", ] [tool.poetry.group.dev.dependencies] coverage = "6" flake8_polyfill = "^1.0.2" nox = "^2025.5" nox-poetry = ">=1.0" pre-commit = "^2.17.0" psycopg = { extras = ["pool"], version = "^3.1.18" } pytest = ">=8.3" pytest-asyncio = ">=0.24" pytest-cov = "^4.1.0" pytest-xdist = "^2.5.0" schedule = "^1.1.0" pyyaml = "^6.0.1" [tool.black] line-length = 120 [tool.coverage.run] branch = true source = ['pyrate_limiter'] [tool.coverage.report] exclude_lines = [ "except ImportError:", # Used for missing optional dependencies ] [tool.coverage.xml] output = 'test-reports/coverage.xml' [tool.ipdb] context = 7 [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" [tool.pytest.ini_options] asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" PyrateLimiter-3.9.0/pyrate_limiter/000077500000000000000000000000001504242573000173505ustar00rootroot00000000000000PyrateLimiter-3.9.0/pyrate_limiter/__init__.py000066400000000000000000000002331504242573000214570ustar00rootroot00000000000000# flake8: noqa from .abstracts import * from .buckets import * from .clocks import * from .exceptions import * from .limiter import * from .utils import * PyrateLimiter-3.9.0/pyrate_limiter/abstracts/000077500000000000000000000000001504242573000213365ustar00rootroot00000000000000PyrateLimiter-3.9.0/pyrate_limiter/abstracts/__init__.py000066400000000000000000000001671504242573000234530ustar00rootroot00000000000000from .bucket import * # noqa from .clock import * # noqa from .rate import * # noqa from .wrappers import * # noqa PyrateLimiter-3.9.0/pyrate_limiter/abstracts/bucket.py000066400000000000000000000227571504242573000232020ustar00rootroot00000000000000""" Implement this class to create a workable bucket for Limiter to use """ import asyncio import logging from abc import ABC from abc import abstractmethod from collections import defaultdict from inspect import isawaitable from inspect import iscoroutine from threading import Thread from typing import Awaitable from typing import Dict from typing import List from typing import Optional from typing import Type from typing import Union from .clock import AbstractClock from .rate import Rate from .rate import RateItem logger = logging.getLogger("pyrate_limiter") class AbstractBucket(ABC): """Base bucket interface Assumption: len(rates) always > 0 TODO: allow empty rates """ rates: List[Rate] failing_rate: Optional[Rate] = None @abstractmethod def put(self, item: RateItem) -> Union[bool, Awaitable[bool]]: """Put an item (typically the current time) in the bucket return true if successful, otherwise false """ @abstractmethod def leak( self, current_timestamp: Optional[int] = None, ) -> Union[int, Awaitable[int]]: """leaking bucket - removing items that are outdated""" @abstractmethod def flush(self) -> Union[None, Awaitable[None]]: """Flush the whole bucket - Must remove `failing-rate` after flushing """ @abstractmethod def count(self) -> Union[int, Awaitable[int]]: """Count number of items in the bucket""" @abstractmethod def peek(self, index: int) -> Union[Optional[RateItem], Awaitable[Optional[RateItem]]]: """Peek at the rate-item at a specific index in latest-to-earliest order NOTE: The reason we cannot peek from the start of the queue(earliest-to-latest) is we can't really tell how many outdated items are still in the queue """ def waiting(self, item: RateItem) -> Union[int, Awaitable[int]]: """Calculate time until bucket become availabe to consume an item again""" if self.failing_rate is None: return 0 assert item.weight > 0, "Item's weight must > 0" if item.weight > self.failing_rate.limit: return -1 bound_item = self.peek(self.failing_rate.limit - item.weight) if bound_item is None: # NOTE: No waiting, bucket is immediately ready return 0 def _calc_waiting(inner_bound_item: RateItem) -> int: assert self.failing_rate is not None # NOTE: silence mypy lower_time_bound = item.timestamp - self.failing_rate.interval upper_time_bound = inner_bound_item.timestamp return upper_time_bound - lower_time_bound async def _calc_waiting_async() -> int: nonlocal bound_item while isawaitable(bound_item): bound_item = await bound_item if bound_item is None: # NOTE: No waiting, bucket is immediately ready return 0 assert isinstance(bound_item, RateItem) return _calc_waiting(bound_item) if isawaitable(bound_item): return _calc_waiting_async() assert isinstance(bound_item, RateItem) return _calc_waiting(bound_item) def limiter_lock(self) -> Optional[object]: # type: ignore """An additional lock to be used by Limiter in-front of the thread lock. Intended for multiprocessing environments where a thread lock is insufficient. """ return None class Leaker(Thread): """Responsible for scheduling buckets' leaking at the background either through a daemon task(for sync buckets) or a task using asyncio.Task """ daemon = True name = "PyrateLimiter's Leaker" sync_buckets: Optional[Dict[int, AbstractBucket]] = None async_buckets: Optional[Dict[int, AbstractBucket]] = None clocks: Optional[Dict[int, AbstractClock]] = None leak_interval: int = 10_000 aio_leak_task: Optional[asyncio.Task] = None def __init__(self, leak_interval: int): self.sync_buckets = defaultdict() self.async_buckets = defaultdict() self.clocks = defaultdict() self.leak_interval = leak_interval super().__init__() def register(self, bucket: AbstractBucket, clock: AbstractClock): """Register a new bucket with its associated clock""" assert self.sync_buckets is not None assert self.clocks is not None assert self.async_buckets is not None try_leak = bucket.leak(0) bucket_id = id(bucket) if iscoroutine(try_leak): try_leak.close() self.async_buckets[bucket_id] = bucket else: self.sync_buckets[bucket_id] = bucket self.clocks[bucket_id] = clock def deregister(self, bucket_id: int) -> bool: """Deregister a bucket""" if self.sync_buckets and bucket_id in self.sync_buckets: del self.sync_buckets[bucket_id] assert self.clocks del self.clocks[bucket_id] return True if self.async_buckets and bucket_id in self.async_buckets: del self.async_buckets[bucket_id] assert self.clocks del self.clocks[bucket_id] if not self.async_buckets and self.aio_leak_task: self.aio_leak_task.cancel() self.aio_leak_task = None return True return False async def _leak(self, buckets: Dict[int, AbstractBucket]) -> None: assert self.clocks while buckets: try: for bucket_id, bucket in list(buckets.items()): clock = self.clocks[bucket_id] now = clock.now() while isawaitable(now): now = await now assert isinstance(now, int) leak = bucket.leak(now) while isawaitable(leak): leak = await leak assert isinstance(leak, int) await asyncio.sleep(self.leak_interval / 1000) except RuntimeError as e: logger.info("Leak task stopped due to event loop shutdown. %s", e) return def leak_async(self): if self.async_buckets and not self.aio_leak_task: self.aio_leak_task = asyncio.create_task(self._leak(self.async_buckets)) def run(self) -> None: """ Override the original method of Thread Not meant to be called directly """ assert self.sync_buckets asyncio.run(self._leak(self.sync_buckets)) def start(self) -> None: """ Override the original method of Thread Call to run leaking sync buckets """ if self.sync_buckets and not self.is_alive(): super().start() class BucketFactory(ABC): """Asbtract BucketFactory class. It is reserved for user to implement/override this class with his own bucket-routing/creating logic """ _leaker: Optional[Leaker] = None _leak_interval: int = 10_000 @property def leak_interval(self) -> int: """Retrieve leak-interval from inner Leaker task""" if not self._leaker: return self._leak_interval return self._leaker.leak_interval @leak_interval.setter def leak_interval(self, value: int): """Set leak-interval for inner Leaker task""" if self._leaker: self._leaker.leak_interval = value self._leak_interval = value @abstractmethod def wrap_item( self, name: str, weight: int = 1, ) -> Union[RateItem, Awaitable[RateItem]]: """Add the current timestamp to the receiving item using any clock backend - Turn it into a RateItem - Can return either a coroutine or a RateItem instance """ @abstractmethod def get(self, item: RateItem) -> Union[AbstractBucket, Awaitable[AbstractBucket]]: """Get the corresponding bucket to this item""" def create( self, clock: AbstractClock, bucket_class: Type[AbstractBucket], *args, **kwargs, ) -> AbstractBucket: """Creating a bucket dynamically""" bucket = bucket_class(*args, **kwargs) self.schedule_leak(bucket, clock) return bucket def schedule_leak(self, new_bucket: AbstractBucket, associated_clock: AbstractClock) -> None: """Schedule all the buckets' leak, reset bucket's failing rate""" assert new_bucket.rates, "Bucket rates are not set" if not self._leaker: self._leaker = Leaker(self.leak_interval) self._leaker.register(new_bucket, associated_clock) self._leaker.start() self._leaker.leak_async() def get_buckets(self) -> List[AbstractBucket]: """Iterator over all buckets in the factory """ if not self._leaker: return [] buckets = [] if self._leaker.sync_buckets: for _, bucket in self._leaker.sync_buckets.items(): buckets.append(bucket) if self._leaker.async_buckets: for _, bucket in self._leaker.async_buckets.items(): buckets.append(bucket) return buckets def dispose(self, bucket: Union[int, AbstractBucket]) -> bool: """Delete a bucket from the factory""" if isinstance(bucket, AbstractBucket): bucket = id(bucket) assert isinstance(bucket, int), "not valid bucket id" if not self._leaker: return False return self._leaker.deregister(bucket) PyrateLimiter-3.9.0/pyrate_limiter/abstracts/clock.py000066400000000000000000000004541504242573000230060ustar00rootroot00000000000000from abc import ABC from abc import abstractmethod from typing import Awaitable from typing import Union class AbstractClock(ABC): """Clock that return timestamp for `now`""" @abstractmethod def now(self) -> Union[int, Awaitable[int]]: """Get time as of now, in miliseconds""" PyrateLimiter-3.9.0/pyrate_limiter/abstracts/rate.py000066400000000000000000000046271504242573000226540ustar00rootroot00000000000000"""Unit classes that deals with rate, item & duration """ from enum import Enum from typing import Union class Duration(Enum): """Interval helper class""" SECOND = 1000 MINUTE = 1000 * 60 HOUR = 1000 * 60 * 60 DAY = 1000 * 60 * 60 * 24 WEEK = 1000 * 60 * 60 * 24 * 7 def __mul__(self, mutiplier: float) -> int: return int(self.value * mutiplier) def __rmul__(self, multiplier: float) -> int: return self.__mul__(multiplier) def __add__(self, another_duration: Union["Duration", int]) -> int: return self.value + int(another_duration) def __radd__(self, another_duration: Union["Duration", int]) -> int: return self.__add__(another_duration) def __int__(self) -> int: return self.value def __eq__(self, duration: object) -> bool: if not isinstance(duration, (Duration, int)): return NotImplemented return self.value == int(duration) @staticmethod def readable(value: int) -> str: notes = [ (Duration.WEEK, "w"), (Duration.DAY, "d"), (Duration.HOUR, "h"), (Duration.MINUTE, "m"), (Duration.SECOND, "s"), ] for note, shorten in notes: if value >= note.value: decimal_value = value / note.value return f"{decimal_value:0.1f}{shorten}" # noqa: E231 return f"{value}ms" class RateItem: """RateItem is a wrapper for bucket to work with""" name: str weight: int timestamp: int def __init__(self, name: str, timestamp: int, weight: int = 1): self.name = name self.timestamp = timestamp self.weight = weight def __str__(self) -> str: return f"RateItem(name={self.name}, weight={self.weight}, timestamp={self.timestamp})" class Rate: """Rate definition. Args: limit: Number of requests allowed within ``interval`` interval: Time interval, in miliseconds """ limit: int interval: int def __init__( self, limit: int, interval: Union[int, Duration], ): self.limit = limit self.interval = int(interval) assert self.interval assert self.limit def __str__(self) -> str: return f"limit={self.limit}/{Duration.readable(self.interval)}" def __repr__(self) -> str: return f"limit={self.limit}/{self.interval}" PyrateLimiter-3.9.0/pyrate_limiter/abstracts/wrappers.py000066400000000000000000000036031504242573000235550ustar00rootroot00000000000000""" Wrappers over different abstract types """ from inspect import isawaitable from typing import Optional from .bucket import AbstractBucket from .rate import RateItem class BucketAsyncWrapper(AbstractBucket): """BucketAsyncWrapper is a wrapping over any bucket that turns a async/synchronous bucket into an async one """ def __init__(self, bucket: AbstractBucket): assert isinstance(bucket, AbstractBucket) self.bucket = bucket async def put(self, item: RateItem): result = self.bucket.put(item) while isawaitable(result): result = await result return result async def count(self): result = self.bucket.count() while isawaitable(result): result = await result return result async def leak(self, current_timestamp: Optional[int] = None) -> int: result = self.bucket.leak(current_timestamp) while isawaitable(result): result = await result assert isinstance(result, int) return result async def flush(self) -> None: result = self.bucket.flush() while isawaitable(result): # TODO: AbstractBucket.flush() may not have correct type annotation? result = await result # type: ignore return None async def peek(self, index: int) -> Optional[RateItem]: item = self.bucket.peek(index) while isawaitable(item): item = await item assert item is None or isinstance(item, RateItem) return item async def waiting(self, item: RateItem) -> int: wait = super().waiting(item) if isawaitable(wait): wait = await wait assert isinstance(wait, int) return wait @property def failing_rate(self): return self.bucket.failing_rate @property def rates(self): return self.bucket.rates PyrateLimiter-3.9.0/pyrate_limiter/buckets/000077500000000000000000000000001504242573000210105ustar00rootroot00000000000000PyrateLimiter-3.9.0/pyrate_limiter/buckets/__init__.py000066400000000000000000000005361504242573000231250ustar00rootroot00000000000000# flake8: noqa """Conrete bucket implementations """ from .in_memory_bucket import InMemoryBucket from .mp_bucket import MultiprocessBucket from .postgres import PostgresBucket from .postgres import Queries as PgQueries from .redis_bucket import RedisBucket from .sqlite_bucket import Queries as SQLiteQueries from .sqlite_bucket import SQLiteBucket PyrateLimiter-3.9.0/pyrate_limiter/buckets/in_memory_bucket.py000066400000000000000000000052561504242573000247250ustar00rootroot00000000000000"""Naive bucket implementation using built-in list """ from typing import List from typing import Optional from ..abstracts import AbstractBucket from ..abstracts import Rate from ..abstracts import RateItem from ..utils import binary_search class InMemoryBucket(AbstractBucket): """Simple In-memory Bucket using native list Clock can be either `time.time` or `time.monotonic` When leak, clock is required Pros: fast, safe, and precise Cons: since it resides in local memory, the data is not persistent, nor scalable Usecase: small applications, simple logic """ items: List[RateItem] failing_rate: Optional[Rate] def __init__(self, rates: List[Rate]): self.rates = sorted(rates, key=lambda r: r.interval) self.items = [] def put(self, item: RateItem) -> bool: if item.weight == 0: return True current_length = len(self.items) after_length = item.weight + current_length for rate in self.rates: if after_length < rate.limit: break lower_bound_value = item.timestamp - rate.interval lower_bound_idx = binary_search(self.items, lower_bound_value) if lower_bound_idx >= 0: count_existing_items = len(self.items) - lower_bound_idx space_available = rate.limit - count_existing_items else: space_available = rate.limit if space_available < item.weight: self.failing_rate = rate return False self.failing_rate = None if item.weight > 1: self.items.extend([item for _ in range(item.weight)]) else: self.items.append(item) return True def leak(self, current_timestamp: Optional[int] = None) -> int: assert current_timestamp is not None if self.items: max_interval = self.rates[-1].interval lower_bound = current_timestamp - max_interval if lower_bound > self.items[-1].timestamp: remove_count = len(self.items) del self.items[:] return remove_count if lower_bound < self.items[0].timestamp: return 0 idx = binary_search(self.items, lower_bound) del self.items[:idx] return idx return 0 def flush(self) -> None: self.failing_rate = None del self.items[:] def count(self) -> int: return len(self.items) def peek(self, index: int) -> Optional[RateItem]: if not self.items: return None return self.items[-1 - index] if abs(index) < self.count() else None PyrateLimiter-3.9.0/pyrate_limiter/buckets/mp_bucket.py000066400000000000000000000030451504242573000233350ustar00rootroot00000000000000"""multiprocessing In-memory Bucket using a multiprocessing.Manager.ListProxy and a multiprocessing.Lock. """ from multiprocessing import Manager from multiprocessing import RLock from multiprocessing.managers import ListProxy from multiprocessing.synchronize import RLock as LockType from typing import List from typing import Optional from ..abstracts import Rate from ..abstracts import RateItem from pyrate_limiter.buckets import InMemoryBucket class MultiprocessBucket(InMemoryBucket): items: List[RateItem] # ListProxy mp_lock: LockType def __init__(self, rates: List[Rate], items: List[RateItem], mp_lock: LockType): if not isinstance(items, ListProxy): raise ValueError("items must be a ListProxy") self.rates = sorted(rates, key=lambda r: r.interval) self.items = items self.mp_lock = mp_lock def put(self, item: RateItem) -> bool: with self.mp_lock: return super().put(item) def leak(self, current_timestamp: Optional[int] = None) -> int: with self.mp_lock: return super().leak(current_timestamp) def limiter_lock(self): return self.mp_lock @classmethod def init( cls, rates: List[Rate], ): """ Creates a single ListProxy so that this bucket can be shared across multiple processes. """ shared_items: List[RateItem] = Manager().list() # type: ignore[assignment] mp_lock: LockType = RLock() return cls(rates=rates, items=shared_items, mp_lock=mp_lock) PyrateLimiter-3.9.0/pyrate_limiter/buckets/postgres.py000066400000000000000000000124761504242573000232420ustar00rootroot00000000000000"""A bucket using PostgreSQL as backend """ from __future__ import annotations from contextlib import contextmanager from typing import Awaitable from typing import List from typing import Optional from typing import TYPE_CHECKING from typing import Union from ..abstracts import AbstractBucket from ..abstracts import Rate from ..abstracts import RateItem if TYPE_CHECKING: from psycopg_pool import ConnectionPool # type: ignore[import-untyped] class Queries: CREATE_BUCKET_TABLE = """ CREATE TABLE IF NOT EXISTS {table} ( name VARCHAR, weight SMALLINT, item_timestamp TIMESTAMP ) """ CREATE_INDEX_ON_TIMESTAMP = """ CREATE INDEX IF NOT EXISTS {index} ON {table} (item_timestamp) """ COUNT = """ SELECT COUNT(*) FROM {table} """ PUT = """ INSERT INTO {table} (name, weight, item_timestamp) VALUES (%s, %s, TO_TIMESTAMP(%s)) """ FLUSH = """ DELETE FROM {table} """ PEEK = """ SELECT name, weight, (extract(EPOCH FROM item_timestamp) * 1000) as item_timestamp FROM {table} ORDER BY item_timestamp DESC LIMIT 1 OFFSET {offset} """ LEAK = """ DELETE FROM {table} WHERE item_timestamp < TO_TIMESTAMP({timestamp}) """ LEAK_COUNT = """ SELECT COUNT(*) FROM {table} WHERE item_timestamp < TO_TIMESTAMP({timestamp}) """ class PostgresBucket(AbstractBucket): table: str pool: ConnectionPool def __init__(self, pool: ConnectionPool, table: str, rates: List[Rate]): self.table = table.lower() self.pool = pool assert rates self.rates = rates self._full_tbl = f'ratelimit___{self.table}' self._create_table() @contextmanager def _get_conn(self): with self.pool.connection() as conn: yield conn def _create_table(self): with self._get_conn() as conn: conn.execute(Queries.CREATE_BUCKET_TABLE.format(table=self._full_tbl)) index_name = f'timestampIndex_{self.table}' conn.execute(Queries.CREATE_INDEX_ON_TIMESTAMP.format(table=self._full_tbl, index=index_name)) def put(self, item: RateItem) -> Union[bool, Awaitable[bool]]: """Put an item (typically the current time) in the bucket return true if successful, otherwise false """ if item.weight == 0: return True with self._get_conn() as conn: for rate in self.rates: bound = f"SELECT TO_TIMESTAMP({item.timestamp / 1000}) - INTERVAL '{rate.interval} milliseconds'" query = f'SELECT COUNT(*) FROM {self._full_tbl} WHERE item_timestamp >= ({bound})' cur = conn.execute(query) count = int(cur.fetchone()[0]) cur.close() if rate.limit - count < item.weight: self.failing_rate = rate return False self.failing_rate = None query = Queries.PUT.format(table=self._full_tbl) # https://www.psycopg.org/docs/extras.html#fast-exec for _ in range(item.weight): conn.execute(query, (item.name, item.weight, item.timestamp / 1000)) return True def leak( self, current_timestamp: Optional[int] = None, ) -> Union[int, Awaitable[int]]: """leaking bucket - removing items that are outdated""" assert current_timestamp is not None, "current-time must be passed on for leak" lower_bound = current_timestamp - self.rates[-1].interval if lower_bound <= 0: return 0 count = 0 with self._get_conn() as conn: conn = conn.execute(Queries.LEAK_COUNT.format(table=self._full_tbl, timestamp=lower_bound / 1000)) result = conn.fetchone() if result: conn.execute(Queries.LEAK.format(table=self._full_tbl, timestamp=lower_bound / 1000)) count = int(result[0]) return count def flush(self) -> Union[None, Awaitable[None]]: """Flush the whole bucket - Must remove `failing-rate` after flushing """ with self._get_conn() as conn: conn.execute(Queries.FLUSH.format(table=self._full_tbl)) self.failing_rate = None return None def count(self) -> Union[int, Awaitable[int]]: """Count number of items in the bucket""" count = 0 with self._get_conn() as conn: conn = conn.execute(Queries.COUNT.format(table=self._full_tbl)) result = conn.fetchone() assert result count = int(result[0]) return count def peek(self, index: int) -> Union[Optional[RateItem], Awaitable[Optional[RateItem]]]: """Peek at the rate-item at a specific index in latest-to-earliest order NOTE: The reason we cannot peek from the start of the queue(earliest-to-latest) is we can't really tell how many outdated items are still in the queue """ item = None with self._get_conn() as conn: conn = conn.execute(Queries.PEEK.format(table=self._full_tbl, offset=index)) result = conn.fetchone() if result: name, weight, timestamp = result[0], int(result[1]), int(result[2]) item = RateItem(name=name, weight=weight, timestamp=timestamp) return item PyrateLimiter-3.9.0/pyrate_limiter/buckets/redis_bucket.py000066400000000000000000000126111504242573000240260ustar00rootroot00000000000000"""Bucket implementation using Redis """ from __future__ import annotations from inspect import isawaitable from typing import Awaitable from typing import List from typing import Optional from typing import Tuple from typing import TYPE_CHECKING from typing import Union from ..abstracts import AbstractBucket from ..abstracts import Rate from ..abstracts import RateItem from ..utils import id_generator if TYPE_CHECKING: from redis import Redis from redis.asyncio import Redis as AsyncRedis class LuaScript: """Scripts that deal with bucket operations""" PUT_ITEM = """ local bucket = KEYS[1] local now = ARGV[1] local space_required = tonumber(ARGV[2]) local item_name = ARGV[3] local rates_count = tonumber(ARGV[4]) for i=1,rates_count do local offset = (i - 1) * 2 local interval = tonumber(ARGV[5 + offset]) local limit = tonumber(ARGV[5 + offset + 1]) local count = redis.call('ZCOUNT', bucket, now - interval, now) local space_available = limit - tonumber(count) if space_available < space_required then return i - 1 end end for i=1,space_required do redis.call('ZADD', bucket, now, item_name..i) end return -1 """ class RedisBucket(AbstractBucket): """A bucket using redis for storing data - We are not using redis' built-in TIME since it is non-deterministic - In distributed context, use local server time or a remote time server - Each bucket instance use a dedicated connection to avoid race-condition - can be either sync or async """ rates: List[Rate] failing_rate: Optional[Rate] bucket_key: str script_hash: str redis: Union[Redis, AsyncRedis] def __init__( self, rates: List[Rate], redis: Union[Redis, AsyncRedis], bucket_key: str, script_hash: str, ): self.rates = rates self.redis = redis self.bucket_key = bucket_key self.script_hash = script_hash self.failing_rate = None @classmethod def init( cls, rates: List[Rate], redis: Union[Redis, AsyncRedis], bucket_key: str, ): script_hash = redis.script_load(LuaScript.PUT_ITEM) if isawaitable(script_hash): async def _async_init(): nonlocal script_hash script_hash = await script_hash return cls(rates, redis, bucket_key, script_hash) return _async_init() return cls(rates, redis, bucket_key, script_hash) def _check_and_insert(self, item: RateItem) -> Union[Rate, None, Awaitable[Optional[Rate]]]: keys = [self.bucket_key] args = [ item.timestamp, item.weight, # NOTE: this is to avoid key collision since we are using ZSET f"{item.name}:{id_generator()}:", # noqa: E231 len(self.rates), *[value for rate in self.rates for value in (rate.interval, rate.limit)], ] idx = self.redis.evalsha(self.script_hash, len(keys), *keys, *args) def _handle_sync(returned_idx: int): assert isinstance(returned_idx, int), "Not int" if returned_idx < 0: return None return self.rates[returned_idx] async def _handle_async(returned_idx: Awaitable[int]): assert isawaitable(returned_idx), "Not corotine" awaited_idx = await returned_idx return _handle_sync(awaited_idx) return _handle_async(idx) if isawaitable(idx) else _handle_sync(idx) def put(self, item: RateItem) -> Union[bool, Awaitable[bool]]: """Add item to key""" failing_rate = self._check_and_insert(item) if isawaitable(failing_rate): async def _handle_async(): self.failing_rate = await failing_rate return not bool(self.failing_rate) return _handle_async() assert isinstance(failing_rate, Rate) or failing_rate is None self.failing_rate = failing_rate return not bool(self.failing_rate) def leak(self, current_timestamp: Optional[int] = None) -> Union[int, Awaitable[int]]: assert current_timestamp is not None return self.redis.zremrangebyscore( self.bucket_key, 0, current_timestamp - self.rates[-1].interval, ) def flush(self): self.failing_rate = None return self.redis.delete(self.bucket_key) def count(self): return self.redis.zcard(self.bucket_key) def peek(self, index: int) -> Union[RateItem, None, Awaitable[Optional[RateItem]]]: items = self.redis.zrange( self.bucket_key, -1 - index, -1 - index, withscores=True, score_cast_func=int, ) if not items: return None def _handle_items(received_items: List[Tuple[str, int]]): if not received_items: return None item = received_items[0] rate_item = RateItem(name=str(item[0]), timestamp=item[1]) return rate_item if isawaitable(items): async def _awaiting(): nonlocal items items = await items return _handle_items(items) return _awaiting() assert isinstance(items, list) return _handle_items(items) PyrateLimiter-3.9.0/pyrate_limiter/buckets/sqlite_bucket.py000066400000000000000000000203711504242573000242230ustar00rootroot00000000000000"""Bucket implementation using SQLite""" import logging import sqlite3 from contextlib import nullcontext from pathlib import Path from tempfile import gettempdir from threading import RLock from time import time from typing import List from typing import Optional from typing import Tuple from typing import Union from ..abstracts import AbstractBucket from ..abstracts import Rate from ..abstracts import RateItem logger = logging.getLogger(__name__) class Queries: CREATE_BUCKET_TABLE = """ CREATE TABLE IF NOT EXISTS '{table}' ( name VARCHAR, item_timestamp INTEGER ) """ CREATE_INDEX_ON_TIMESTAMP = """ CREATE INDEX IF NOT EXISTS '{index_name}' ON '{table_name}' (item_timestamp) """ COUNT_BEFORE_INSERT = """ SELECT :interval{index} as interval, COUNT(*) FROM '{table}' WHERE item_timestamp >= :current_timestamp - :interval{index} """ PUT_ITEM = """ INSERT INTO '{table}' (name, item_timestamp) VALUES %s """ LEAK = """ DELETE FROM "{table}" WHERE rowid IN ( SELECT rowid FROM "{table}" ORDER BY item_timestamp ASC LIMIT {count}); """.strip() COUNT_BEFORE_LEAK = """SELECT COUNT(*) FROM '{table}' WHERE item_timestamp < {current_timestamp} - {interval}""" FLUSH = """DELETE FROM '{table}'""" # The below sqls are for testing only DROP_TABLE = "DROP TABLE IF EXISTS '{table}'" DROP_INDEX = "DROP INDEX IF EXISTS '{index}'" COUNT_ALL = "SELECT COUNT(*) FROM '{table}'" GET_ALL_ITEM = "SELECT * FROM '{table}' ORDER BY item_timestamp ASC" GET_FIRST_ITEM = ( "SELECT name, item_timestamp FROM '{table}' ORDER BY item_timestamp ASC" ) GET_LAG = """ SELECT (strftime ('%s', 'now') || substr(strftime ('%f', 'now'), 4)) - ( SELECT item_timestamp FROM '{table}' ORDER BY item_timestamp ASC LIMIT 1 ) """ PEEK = 'SELECT * FROM "{table}" ORDER BY item_timestamp DESC LIMIT 1 OFFSET {count}' class SQLiteBucket(AbstractBucket): """For sqlite bucket, we are using the sql time function as the clock item's timestamp wont matter here """ rates: List[Rate] failing_rate: Optional[Rate] conn: sqlite3.Connection table: str full_count_query: str lock: RLock use_limiter_lock: bool def __init__( self, rates: List[Rate], conn: sqlite3.Connection, table: str, lock=None ): self.conn = conn self.table = table self.rates = rates if not lock: self.use_limiter_lock = False self.lock = RLock() else: self.use_limiter_lock = True self.lock = lock def limiter_lock(self): if self.use_limiter_lock: return self.lock else: return None def _build_full_count_query(self, current_timestamp: int) -> Tuple[str, dict]: full_query: List[str] = [] parameters = {"current_timestamp": current_timestamp} for index, rate in enumerate(self.rates): parameters[f"interval{index}"] = rate.interval query = Queries.COUNT_BEFORE_INSERT.format(table=self.table, index=index) full_query.append(query) join_full_query = ( " union ".join(full_query) if len(full_query) > 1 else full_query[0] ) return join_full_query, parameters def put(self, item: RateItem) -> bool: with self.lock: query, parameters = self._build_full_count_query(item.timestamp) cur = self.conn.execute(query, parameters) rate_limit_counts = cur.fetchall() cur.close() for idx, result in enumerate(rate_limit_counts): interval, count = result rate = self.rates[idx] assert interval == rate.interval space_available = rate.limit - count if space_available < item.weight: self.failing_rate = rate return False items = ", ".join( [f"('{name}', {item.timestamp})" for name in [item.name] * item.weight] ) query = (Queries.PUT_ITEM.format(table=self.table)) % items self.conn.execute(query).close() self.conn.commit() return True def leak(self, current_timestamp: Optional[int] = None) -> int: """Leaking/clean up bucket""" with self.lock: assert current_timestamp is not None query = Queries.COUNT_BEFORE_LEAK.format( table=self.table, interval=self.rates[-1].interval, current_timestamp=current_timestamp, ) cur = self.conn.execute(query) count = cur.fetchone()[0] query = Queries.LEAK.format(table=self.table, count=count) cur.execute(query) cur.close() self.conn.commit() return count def flush(self) -> None: with self.lock: self.conn.execute(Queries.FLUSH.format(table=self.table)).close() self.conn.commit() self.failing_rate = None def count(self) -> int: with self.lock: cur = self.conn.execute( Queries.COUNT_ALL.format(table=self.table) ) ret = cur.fetchone()[0] cur.close() return ret def peek(self, index: int) -> Optional[RateItem]: with self.lock: query = Queries.PEEK.format(table=self.table, count=index) cur = self.conn.execute(query) item = cur.fetchone() cur.close() if not item: return None return RateItem(item[0], item[1]) @classmethod def init_from_file( cls, rates: List[Rate], table: str = "rate_bucket", db_path: Optional[str] = None, create_new_table: bool = True, use_file_lock: bool = False ) -> "SQLiteBucket": if db_path is None and use_file_lock: raise ValueError("db_path must be specified when using use_file_lock") if db_path is None: temp_dir = Path(gettempdir()) db_path = str(temp_dir / f"pyrate_limiter_{time()}.sqlite") # TBD: FileLock switched to a thread-local FileLock in 3.11.0. # Should we set FileLock's thread_local to False, for cases where user is both multiprocessing & threading? # As is, the file lock should be Multi Process - Single Thread and non-filelock is Single Process - Multi Thread # A hybrid lock may be needed to gracefully handle both cases file_lock = None file_lock_ctx = nullcontext() if use_file_lock: try: from filelock import FileLock # type: ignore[import-untyped] file_lock = FileLock(db_path + ".lock") # type: ignore[no-redef] file_lock_ctx: Union[nullcontext, FileLock] = file_lock # type: ignore[no-redef] except ImportError: raise ImportError( "filelock is required for file locking. " "Please install it as optional dependency" ) with file_lock_ctx: assert db_path is not None assert db_path.endswith(".sqlite"), ( "Please provide a valid sqlite file path" ) sqlite_connection = sqlite3.connect( db_path, isolation_level="DEFERRED", check_same_thread=False, ) cur = sqlite_connection.cursor() if use_file_lock: # https://www.sqlite.org/wal.html cur.execute("PRAGMA journal_mode=WAL;") # https://www.sqlite.org/pragma.html#pragma_synchronous cur.execute("PRAGMA synchronous=NORMAL;") if create_new_table: cur.execute( Queries.CREATE_BUCKET_TABLE.format(table=table) ) create_idx_query = Queries.CREATE_INDEX_ON_TIMESTAMP.format( index_name=f"idx_{table}_rate_item_timestamp", table_name=table, ) cur.execute(create_idx_query) cur.close() sqlite_connection.commit() return cls(rates, sqlite_connection, table=table, lock=file_lock) PyrateLimiter-3.9.0/pyrate_limiter/clocks.py000066400000000000000000000044751504242573000212120ustar00rootroot00000000000000"""Clock implementation using different backend""" from __future__ import annotations import sqlite3 from contextlib import nullcontext from time import monotonic from time import time from typing import Optional from typing import TYPE_CHECKING from typing import Union from .abstracts import AbstractClock from .buckets import SQLiteBucket from .utils import dedicated_sqlite_clock_connection if TYPE_CHECKING: from psycopg_pool import ConnectionPool from threading import RLock class MonotonicClock(AbstractClock): def __init__(self): monotonic() def now(self): return int(1000 * monotonic()) class TimeClock(AbstractClock): def now(self): return int(1000 * time()) class TimeAsyncClock(AbstractClock): """Time Async Clock, meant for testing only""" async def now(self) -> int: return int(1000 * time()) class SQLiteClock(AbstractClock): """Get timestamp using SQLite as remote clock backend""" time_query = ( "SELECT CAST(ROUND((julianday('now') - 2440587.5)*86400000) As INTEGER)" ) def __init__(self, conn: Union[sqlite3.Connection, SQLiteBucket]): """ In multiprocessing cases, use the bucket, so that a shared lock is used. """ self.lock: Optional[RLock] = None if isinstance(conn, SQLiteBucket): self.conn = conn.conn self.lock = conn.lock else: self.conn = conn @classmethod def default(cls): conn = dedicated_sqlite_clock_connection() return cls(conn) def now(self) -> int: with self.lock if self.lock else nullcontext(): cur = self.conn.execute(self.time_query) now = cur.fetchone()[0] cur.close() return int(now) class PostgresClock(AbstractClock): """Get timestamp using Postgres as remote clock backend""" def __init__(self, pool: "ConnectionPool"): self.pool = pool def now(self) -> int: value = 0 with self.pool.connection() as conn: with conn.cursor() as cur: cur.execute("SELECT EXTRACT(epoch FROM current_timestamp) * 1000") result = cur.fetchone() assert result, "unable to get current-timestamp from postgres" value = int(result[0]) return value PyrateLimiter-3.9.0/pyrate_limiter/exceptions.py000066400000000000000000000027771504242573000221200ustar00rootroot00000000000000# pylint: disable=C0114,C0115 from typing import Dict from typing import Union from .abstracts.rate import Rate from .abstracts.rate import RateItem class BucketFullException(Exception): def __init__(self, item: RateItem, rate: Rate): error = f"Bucket for item={item.name} with Rate {rate} is already full" self.item = item self.rate = rate self.meta_info: Dict[str, Union[str, float]] = { "error": error, "name": item.name, "weight": item.weight, "rate": str(rate), } super().__init__(error) def __reduce__(self): return (self.__class__, (self.item, self.rate)) class LimiterDelayException(Exception): def __init__(self, item: RateItem, rate: Rate, actual_delay: int, max_delay: int): self.item = item self.rate = rate self.actual_delay = actual_delay self.max_delay = max_delay error = f""" Actual delay exceeded allowance: actual={actual_delay}, allowed={max_delay} Bucket for {item.name} with Rate {rate} is already full """ self.meta_info: Dict[str, Union[str, float]] = { "error": error, "name": item.name, "weight": item.weight, "rate": str(rate), "max_delay": max_delay, "actual_delay": actual_delay, } super().__init__(error) def __reduce__(self): return (self.__class__, (self.item, self.rate, self.actual_delay, self.max_delay)) PyrateLimiter-3.9.0/pyrate_limiter/limiter.py000066400000000000000000000412061504242573000213720ustar00rootroot00000000000000"""Limiter class implementation """ import asyncio import logging from contextlib import contextmanager from functools import wraps from inspect import isawaitable from threading import local from threading import RLock from time import sleep from typing import Any from typing import Awaitable from typing import Callable from typing import Iterable from typing import List from typing import Optional from typing import Tuple from typing import Union from .abstracts import AbstractBucket from .abstracts import AbstractClock from .abstracts import BucketFactory from .abstracts import Duration from .abstracts import Rate from .abstracts import RateItem from .buckets import InMemoryBucket from .clocks import TimeClock from .exceptions import BucketFullException from .exceptions import LimiterDelayException logger = logging.getLogger("pyrate_limiter") ItemMapping = Callable[[Any], Tuple[str, int]] DecoratorWrapper = Callable[[Callable[[Any], Any]], Callable[[Any], Any]] class SingleBucketFactory(BucketFactory): """Single-bucket factory for quick use with Limiter""" bucket: AbstractBucket clock: AbstractClock def __init__(self, bucket: AbstractBucket, clock: AbstractClock): self.clock = clock self.bucket = bucket self.schedule_leak(bucket, clock) def wrap_item(self, name: str, weight: int = 1): now = self.clock.now() async def wrap_async(): return RateItem(name, await now, weight=weight) def wrap_sync(): return RateItem(name, now, weight=weight) return wrap_async() if isawaitable(now) else wrap_sync() def get(self, _: RateItem) -> AbstractBucket: return self.bucket @contextmanager def combined_lock(locks: Iterable, timeout_sec: Optional[float] = None): """Acquires and releases multiple locks. Intended to be used in multiprocessing for a cross-process lock combined with in process thread RLocks""" acquired = [] try: for lock in locks: if timeout_sec is not None: if not lock.acquire(timeout=timeout_sec): raise TimeoutError("Timeout while acquiring combined lock.") else: lock.acquire() acquired.append(lock) yield finally: for lock in reversed(acquired): lock.release() class Limiter: """This class responsibility is to sum up all underlying logic and make working with async/sync functions easily """ bucket_factory: BucketFactory raise_when_fail: bool retry_until_max_delay: bool max_delay: Optional[int] = None lock: Union[RLock, Iterable] buffer_ms: int # async_lock is thread local, created on first use _thread_local: local def __init__( self, argument: Union[BucketFactory, AbstractBucket, Rate, List[Rate]], clock: AbstractClock = TimeClock(), raise_when_fail: bool = True, max_delay: Optional[Union[int, Duration]] = None, retry_until_max_delay: bool = False, buffer_ms: int = 50 ): """Init Limiter using either a single bucket / multiple-bucket factory / single rate / rate list. Parameters: argument (Union[BucketFactory, AbstractBucket, Rate, List[Rate]]): The bucket or rate configuration. clock (AbstractClock, optional): The clock instance to use for rate limiting. Defaults to TimeClock(). raise_when_fail (bool, optional): Whether to raise an exception when rate limiting fails. Defaults to True. max_delay (Optional[Union[int, Duration]], optional): The maximum delay allowed for rate limiting. Defaults to None. retry_until_max_delay (bool, optional): If True, retry operations until the maximum delay is reached. Useful for ensuring operations eventually succeed within the allowed delay window. Defaults to False. """ self.bucket_factory = self._init_bucket_factory(argument, clock=clock) self.raise_when_fail = raise_when_fail self.retry_until_max_delay = retry_until_max_delay self.buffer_ms = buffer_ms if max_delay is not None: if isinstance(max_delay, Duration): max_delay = int(max_delay) assert max_delay >= 0, "Max-delay must not be negative" self.max_delay = max_delay self.lock = RLock() self._thread_local = local() if isinstance(argument, AbstractBucket): limiter_lock = argument.limiter_lock() if limiter_lock is not None: self.lock = (limiter_lock, self.lock) def buckets(self) -> List[AbstractBucket]: """Get list of active buckets """ return self.bucket_factory.get_buckets() def dispose(self, bucket: Union[int, AbstractBucket]) -> bool: """Dispose/Remove a specific bucket, using bucket-id or bucket object as param """ return self.bucket_factory.dispose(bucket) def _init_bucket_factory( self, argument: Union[BucketFactory, AbstractBucket, Rate, List[Rate]], clock: AbstractClock, ) -> BucketFactory: if isinstance(argument, Rate): argument = [argument] if isinstance(argument, list): assert len(argument) > 0, "Rates must not be empty" assert isinstance(argument[0], Rate), "Not valid rates list" rates = argument logger.info("Initializing default bucket(InMemoryBucket) with rates: %s", rates) argument = InMemoryBucket(rates) if isinstance(argument, AbstractBucket): argument = SingleBucketFactory(argument, clock) assert isinstance(argument, BucketFactory), "Not a valid bucket/bucket-factory" return argument def _raise_bucket_full_if_necessary( self, bucket: AbstractBucket, item: RateItem, ): if self.raise_when_fail: assert bucket.failing_rate is not None # NOTE: silence mypy raise BucketFullException(item, bucket.failing_rate) def _raise_delay_exception_if_necessary( self, bucket: AbstractBucket, item: RateItem, delay: int, ): if self.raise_when_fail: assert bucket.failing_rate is not None # NOTE: silence mypy assert isinstance(self.max_delay, int) raise LimiterDelayException( item, bucket.failing_rate, delay, self.max_delay, ) def delay_or_raise( self, bucket: AbstractBucket, item: RateItem, ) -> Union[bool, Awaitable[bool]]: """On `try_acquire` failed, handle delay or raise error immediately""" assert bucket.failing_rate is not None if self.max_delay is None: self._raise_bucket_full_if_necessary(bucket, item) return False delay = bucket.waiting(item) def _handle_reacquire(re_acquire: bool) -> bool: if not re_acquire: logger.error("""Failed to re-acquire after the expected delay. If it failed, either clock or bucket is unstable. If asyncio, use try_acquire_async(). If multiprocessing, use retry_until_max_delay=True.""") self._raise_bucket_full_if_necessary(bucket, item) return re_acquire if isawaitable(delay): async def _handle_async(): nonlocal delay delay = await delay assert isinstance(delay, int), "Delay not integer" total_delay = 0 delay += self.buffer_ms while True: total_delay += delay if self.retry_until_max_delay: if self.max_delay is not None and total_delay > self.max_delay: logger.error("Total delay exceeded max_delay: total_delay=%s, max_delay=%s", total_delay, self.max_delay) self._raise_delay_exception_if_necessary(bucket, item, total_delay) return False else: if self.max_delay is not None and delay > self.max_delay: logger.error( "Required delay too large: actual=%s, expected=%s", delay, self.max_delay, ) self._raise_delay_exception_if_necessary(bucket, item, delay) return False await asyncio.sleep(delay / 1000) item.timestamp += delay re_acquire = bucket.put(item) if isawaitable(re_acquire): re_acquire = await re_acquire if not self.retry_until_max_delay: return _handle_reacquire(re_acquire) elif re_acquire: return True return _handle_async() assert isinstance(delay, int) if delay < 0: logger.error( "Cannot fit item into bucket: item=%s, rate=%s, bucket=%s", item, bucket.failing_rate, bucket, ) self._raise_bucket_full_if_necessary(bucket, item) return False total_delay = 0 while True: logger.debug("delay=%d, total_delay=%s", delay, total_delay) delay = bucket.waiting(item) assert isinstance(delay, int) delay += self.buffer_ms total_delay += delay if self.max_delay is not None and total_delay > self.max_delay: logger.error( "Required delay too large: actual=%s, expected=%s", delay, self.max_delay, ) if self.retry_until_max_delay: self._raise_delay_exception_if_necessary(bucket, item, total_delay) else: self._raise_delay_exception_if_necessary(bucket, item, delay) return False sleep(delay / 1000) item.timestamp += delay re_acquire = bucket.put(item) # NOTE: if delay is not Awaitable, then `bucket.put` is not Awaitable assert isinstance(re_acquire, bool) if not self.retry_until_max_delay: return _handle_reacquire(re_acquire) elif re_acquire: return True def handle_bucket_put( self, bucket: AbstractBucket, item: RateItem, ) -> Union[bool, Awaitable[bool]]: """Putting item into bucket""" def _handle_result(is_success: bool): if not is_success: return self.delay_or_raise(bucket, item) return True acquire = bucket.put(item) if isawaitable(acquire): async def _put_async(): nonlocal acquire acquire = await acquire result = _handle_result(acquire) while isawaitable(result): result = await result return result return _put_async() return _handle_result(acquire) # type: ignore def _get_async_lock(self): """Must be called before first try_acquire_async for each thread""" try: return self._thread_local.async_lock except AttributeError: lock = asyncio.Lock() self._thread_local.async_lock = lock return lock async def try_acquire_async(self, name: str, weight: int = 1) -> bool: """ async version of try_acquire. This uses a top level, thread-local async lock to ensure that the async loop doesn't block This does not make the entire code async: use an async bucket for that. """ async with self._get_async_lock(): acquired = self.try_acquire(name=name, weight=weight) if isawaitable(acquired): return await acquired else: logger.warning("async call made without an async bucket.") return acquired def try_acquire(self, name: str, weight: int = 1) -> Union[bool, Awaitable[bool]]: """Try acquiring an item with name & weight Return true on success, false on failure """ with self.lock if not isinstance(self.lock, Iterable) else combined_lock(self.lock): assert weight >= 0, "item's weight must be >= 0" if weight == 0: # NOTE: if item is weightless, just let it go through # NOTE: this might change in the future return True item = self.bucket_factory.wrap_item(name, weight) if isawaitable(item): async def _handle_async(): nonlocal item item = await item bucket = self.bucket_factory.get(item) if isawaitable(bucket): bucket = await bucket assert isinstance(bucket, AbstractBucket), f"Invalid bucket: item: {name}" result = self.handle_bucket_put(bucket, item) while isawaitable(result): result = await result return result return _handle_async() assert isinstance(item, RateItem) # NOTE: this is to silence mypy warning bucket = self.bucket_factory.get(item) if isawaitable(bucket): async def _handle_async_bucket(): nonlocal bucket bucket = await bucket assert isinstance(bucket, AbstractBucket), f"Invalid bucket: item: {name}" result = self.handle_bucket_put(bucket, item) while isawaitable(result): result = await result return result return _handle_async_bucket() assert isinstance(bucket, AbstractBucket), f"Invalid bucket: item: {name}" result = self.handle_bucket_put(bucket, item) if isawaitable(result): async def _handle_async_result(): nonlocal result while isawaitable(result): result = await result return result return _handle_async_result() return result def as_decorator(self) -> Callable[[ItemMapping], DecoratorWrapper]: """Use limiter decorator Use with both sync & async function """ def with_mapping_func(mapping: ItemMapping) -> DecoratorWrapper: def decorator_wrapper(func: Callable[[Any], Any]) -> Callable[[Any], Any]: """Actual function wrapper""" if asyncio.iscoroutinefunction(func): @wraps(func) async def wrapper_async(*args, **kwargs): (name, weight) = mapping(*args, **kwargs) assert isinstance(name, str), "Mapping name is expected but not found" assert isinstance(weight, int), "Mapping weight is expected but not found" accquire_ok = self.try_acquire_async(name, weight) if isawaitable(accquire_ok): await accquire_ok return await func(*args, **kwargs) return wrapper_async else: @wraps(func) def wrapper(*args, **kwargs): (name, weight) = mapping(*args, **kwargs) assert isinstance(name, str), "Mapping name is expected but not found" assert isinstance(weight, int), "Mapping weight is expected but not found" accquire_ok = self.try_acquire(name, weight) if not isawaitable(accquire_ok): return func(*args, **kwargs) async def _handle_accquire_async(): nonlocal accquire_ok accquire_ok = await accquire_ok result = func(*args, **kwargs) if isawaitable(result): return await result return result return _handle_accquire_async() return wrapper return decorator_wrapper return with_mapping_func PyrateLimiter-3.9.0/pyrate_limiter/limiter_factory.py000066400000000000000000000120601504242573000231150ustar00rootroot00000000000000""" A collection of common use cases and patterns for pyrate_limiter """ import logging from typing import List from typing import Optional from typing import Union from pyrate_limiter import AbstractBucket from pyrate_limiter import BucketAsyncWrapper from pyrate_limiter import Duration from pyrate_limiter import InMemoryBucket from pyrate_limiter import Limiter from pyrate_limiter import Rate from pyrate_limiter import SQLiteBucket logger = logging.getLogger(__name__) # Global for convenience in multiprocessing, populated by init_mp_limiter. # Intended to be called by a ProcessPoolExecutor's initializer LIMITER: Optional[Limiter] = None def create_sqlite_bucket( rates: List[Rate], db_path: Optional[str], table_name: str = "pyrate_limiter", use_file_lock: bool = False, ): """ Create and initialize a SQLite bucket for rate limiting. Args: rates: List of rate limit configurations. db_path: Path to the SQLite database file (or in-memory if None). table_name: Name of the table to store rate bucket data. use_file_lock: Enable file locking for multi-process synchronization. Returns: SQLiteBucket: Initialized SQLite-backed bucket. """ logger.info(f"{table_name=}") bucket = SQLiteBucket.init_from_file( rates, db_path=str(db_path), table=table_name, create_new_table=True, use_file_lock=use_file_lock, ) return bucket def create_sqlite_limiter( rate_per_duration: int = 3, duration: Union[int, Duration] = Duration.SECOND, db_path: Optional[str] = None, table_name: str = "rate_bucket", max_delay: Union[int, Duration] = Duration.DAY, buffer_ms: int = 50, use_file_lock: bool = False, async_wrapper: bool = False, ) -> Limiter: """ Create a SQLite-backed rate limiter with configurable rate, persistence, and optional async support. Args: rate_per_duration: Number of allowed requests per duration. duration: Time window for the rate limit. db_path: Path to the SQLite database file (or in-memory if None). table_name: Name of the table used for rate buckets. max_delay: Maximum delay before failing requests. buffer_ms: Extra wait time in milliseconds to account for clock drift. use_file_lock: Enable file locking for multi-process synchronization. async_wrapper: Whether to wrap the bucket for async usage. Returns: Limiter: Configured SQLite-backed limiter instance. """ rate = Rate(rate_per_duration, duration) rate_limits = [rate] bucket: AbstractBucket = SQLiteBucket.init_from_file( rate_limits, db_path=str(db_path), table=table_name, create_new_table=True, use_file_lock=use_file_lock, ) if async_wrapper: bucket = BucketAsyncWrapper(bucket) limiter = Limiter( bucket, raise_when_fail=False, max_delay=max_delay, retry_until_max_delay=True, buffer_ms=buffer_ms ) return limiter def create_inmemory_limiter( rate_per_duration: int = 3, duration: Union[int, Duration] = Duration.SECOND, max_delay: Union[int, Duration] = Duration.DAY, buffer_ms: int = 50, async_wrapper: bool = False, ) -> Limiter: """ Create an in-memory rate limiter with configurable rate, duration, delay, and optional async support. Args: rate_per_duration: Number of allowed requests per duration. duration: Time window for the rate limit. max_delay: Maximum delay before failing requests. buffer_ms: Extra wait time in milliseconds to account for clock drift. async_wrapper: Whether to wrap the bucket for async usage. Returns: Limiter: Configured in-memory limiter instance. """ rate = Rate(rate_per_duration, duration) rate_limits = [rate] bucket: AbstractBucket = InMemoryBucket(rate_limits) if async_wrapper: bucket = BucketAsyncWrapper(InMemoryBucket(rate_limits)) limiter = Limiter( bucket, raise_when_fail=False, max_delay=max_delay, retry_until_max_delay=True, buffer_ms=buffer_ms ) return limiter def init_global_limiter(bucket: AbstractBucket, max_delay: Union[int, Duration] = Duration.HOUR, raise_when_fail: bool = False, retry_until_max_delay: bool = True, buffer_ms: int = 50): """ Initialize a global Limiter instance using the provided bucket. Intended for use as an initializer for ProcessPoolExecutor. Args: bucket: The rate-limiting bucket to be used. max_delay: Maximum delay before failing requests. raise_when_fail: Whether to raise an exception when a request fails. retry_until_max_delay: Retry until the maximum delay is reached. buffer_ms: Additional buffer time in milliseconds for retries. """ global LIMITER LIMITER = Limiter(bucket, raise_when_fail=raise_when_fail, max_delay=max_delay, retry_until_max_delay=retry_until_max_delay, buffer_ms=buffer_ms) PyrateLimiter-3.9.0/pyrate_limiter/py.typed000066400000000000000000000000001504242573000210350ustar00rootroot00000000000000PyrateLimiter-3.9.0/pyrate_limiter/utils.py000066400000000000000000000040161504242573000210630ustar00rootroot00000000000000import random import sqlite3 import string from pathlib import Path from tempfile import gettempdir from typing import List from .abstracts import Rate from .abstracts import RateItem def binary_search(items: List[RateItem], value: int) -> int: """Find the index of item in list where left.timestamp < value <= right.timestamp this is to determine the current size of some window that stretches from now back to lower-boundary = value and """ if not items: return 0 if value > items[-1].timestamp: return -1 if value <= items[0].timestamp: return 0 if len(items) == 2: return 1 left_pointer, right_pointer, mid = 0, len(items) - 1, -2 while left_pointer <= right_pointer: mid = (left_pointer + right_pointer) // 2 left, right = items[mid - 1].timestamp, items[mid].timestamp if left < value <= right: break if left >= value: right_pointer = mid if right < value: left_pointer = mid + 1 return mid def validate_rate_list(rates: List[Rate]) -> bool: """Raise false if rates are incorrectly ordered.""" if not rates: return False for idx, current_rate in enumerate(rates[1:]): prev_rate = rates[idx] if current_rate.interval <= prev_rate.interval: return False if current_rate.limit <= prev_rate.limit: return False if (current_rate.limit / current_rate.interval) > (prev_rate.limit / prev_rate.interval): return False return True def id_generator( size=6, chars=string.ascii_uppercase + string.digits + string.ascii_lowercase, ) -> str: return "".join(random.choice(chars) for _ in range(size)) def dedicated_sqlite_clock_connection(): temp_dir = Path(gettempdir()) default_db_path = temp_dir / "pyrate_limiter_clock_only.sqlite" conn = sqlite3.connect( default_db_path, isolation_level="EXCLUSIVE", check_same_thread=False, ) return conn PyrateLimiter-3.9.0/setup.cfg000066400000000000000000000004501504242573000161370ustar00rootroot00000000000000[flake8] max-line-length = 120 ignore = E203, E401 # Code Climate still uses the 'pep8' section; renamed to 'pycodestyle' when used locally [pep8] max-line-length = 120 ignore = E203 [pycodestyle] max-line-length = 120 ignore = E203 [tool.pylint] disable = C0330,C0116,R0903,C0115,C0114,R0801 PyrateLimiter-3.9.0/tests/000077500000000000000000000000001504242573000154615ustar00rootroot00000000000000PyrateLimiter-3.9.0/tests/__init__.py000066400000000000000000000000001504242573000175600ustar00rootroot00000000000000PyrateLimiter-3.9.0/tests/conftest.py000066400000000000000000000102071504242573000176600ustar00rootroot00000000000000"""Pytest config & fixtures""" from logging import basicConfig from logging import getLogger from os import getenv from pathlib import Path from tempfile import gettempdir from typing import List from typing import Union import pytest from pyrate_limiter import Duration from pyrate_limiter import id_generator from pyrate_limiter import InMemoryBucket from pyrate_limiter import limiter_factory from pyrate_limiter import MonotonicClock from pyrate_limiter import MultiprocessBucket from pyrate_limiter import PostgresBucket from pyrate_limiter import Rate from pyrate_limiter import RedisBucket from pyrate_limiter import TimeAsyncClock from pyrate_limiter import TimeClock # Make log messages visible on test failure (or with pytest -s) basicConfig(level="INFO") # Uncomment for more verbose output: logger = getLogger("pyrate_limiter") logger.setLevel(getenv("LOG_LEVEL", "INFO")) DEFAULT_RATES = [Rate(3, 1000), Rate(4, 1500)] clocks = [ pytest.param(MonotonicClock(), marks=pytest.mark.monotonic), pytest.param(TimeClock(), marks=pytest.mark.timeclock), pytest.param(TimeAsyncClock(), marks=pytest.mark.asyncclock), ] ClockSet = Union[ MonotonicClock, TimeClock, TimeAsyncClock, ] @pytest.fixture(params=clocks) def clock(request): """Parametrization for different clock.""" return request.param async def create_in_memory_bucket(rates: List[Rate]): return InMemoryBucket(rates) async def create_redis_bucket(rates: List[Rate]): from redis import ConnectionPool from redis import Redis pool = ConnectionPool.from_url(getenv("REDIS", "redis://localhost:6379")) redis_db = Redis(connection_pool=pool) bucket_key = f"test-bucket/{id_generator()}" redis_db.delete(bucket_key) bucket = RedisBucket.init(rates, redis_db, bucket_key) assert bucket.count() == 0 return bucket async def create_async_redis_bucket(rates: List[Rate]): from redis.asyncio import ConnectionPool as AsyncConnectionPool from redis.asyncio import Redis as AsyncRedis pool: AsyncConnectionPool = AsyncConnectionPool.from_url( getenv("REDIS", "redis://localhost:6379") ) redis_db: AsyncRedis = AsyncRedis(connection_pool=pool) bucket_key = f"test-bucket/{id_generator()}" await redis_db.delete(bucket_key) bucket = await RedisBucket.init(rates, redis_db, bucket_key) assert await bucket.count() == 0 return bucket async def create_mp_bucket(rates: List[Rate]): bucket = MultiprocessBucket.init(rates) return bucket async def create_sqlite_bucket(rates: List[Rate], file_lock: bool = False): temp_dir = Path(gettempdir()) default_db_path = temp_dir / f"pyrate_limiter_{id_generator(size=5)}.sqlite" table_name = f"pyrate-test-bucket-{id_generator(size=10)}" logger.info("SQLite db path: %s", default_db_path) return limiter_factory.create_sqlite_bucket(rates=rates, db_path=str(default_db_path), table_name=table_name, use_file_lock=file_lock) async def create_filelocksqlite_bucket(rates: List[Rate]): return await create_sqlite_bucket(rates=rates, file_lock=True) async def create_postgres_bucket(rates: List[Rate]): from psycopg_pool import ConnectionPool as PgConnectionPool pool = PgConnectionPool("postgresql://postgres:postgres@localhost:5432") table = f"test_bucket_{id_generator()}" bucket = PostgresBucket(pool, table, rates) assert bucket.count() == 0 return bucket @pytest.fixture( params=[ create_in_memory_bucket, create_redis_bucket, create_sqlite_bucket, create_async_redis_bucket, create_postgres_bucket, create_filelocksqlite_bucket, pytest.param(create_mp_bucket, marks=pytest.mark.mpbucket) ] ) def create_bucket(request): """Parametrization for different bucket.""" return request.param @pytest.fixture(params=[True, False]) def limiter_should_raise(request): return request.param @pytest.fixture(params=[None, 500, Duration.SECOND * 2, Duration.MINUTE]) def limiter_delay(request): return request.param PyrateLimiter-3.9.0/tests/demo_bucket_factory.py000066400000000000000000000074251504242573000220530ustar00rootroot00000000000000from inspect import isawaitable from os import getenv from typing import Dict from typing import Optional from redis.asyncio import ConnectionPool as AsyncConnectionPool from redis.asyncio import Redis as AsyncRedis from .conftest import DEFAULT_RATES from .helpers import flushing_bucket from pyrate_limiter import AbstractBucket from pyrate_limiter import AbstractClock from pyrate_limiter import BucketFactory from pyrate_limiter import id_generator from pyrate_limiter import InMemoryBucket from pyrate_limiter import RateItem from pyrate_limiter import RedisBucket class DemoBucketFactory(BucketFactory): """Multi-bucket factory used for testing schedule-leaks""" buckets: Optional[Dict[str, AbstractBucket]] = None clock: AbstractClock auto_leak: bool def __init__(self, bucket_clock: AbstractClock, auto_leak=False, **buckets: AbstractBucket): self.auto_leak = auto_leak self.clock = bucket_clock self.buckets = {} self.leak_interval = 300 for item_name_pattern, bucket in buckets.items(): assert isinstance(bucket, AbstractBucket) self.schedule_leak(bucket, bucket_clock) self.buckets[item_name_pattern] = bucket def wrap_item(self, name: str, weight: int = 1): now = self.clock.now() async def wrap_async(): return RateItem(name, await now, weight=weight) def wrap_sync(): return RateItem(name, now, weight=weight) return wrap_async() if isawaitable(now) else wrap_sync() def get(self, item: RateItem) -> AbstractBucket: assert self.buckets is not None if item.name in self.buckets: bucket = self.buckets[item.name] assert isinstance(bucket, AbstractBucket) return bucket bucket = self.create(self.clock, InMemoryBucket, DEFAULT_RATES) self.buckets[item.name] = bucket return bucket def schedule_leak(self, *args): if self.auto_leak: super().schedule_leak(*args) class DemoAsyncGetBucketFactory(BucketFactory): """Async multi-bucket factory used for testing schedule-leaks""" def __init__(self, bucket_clock: AbstractClock, auto_leak=False, **buckets: AbstractBucket): self.auto_leak = auto_leak self.clock = bucket_clock self.buckets = {} self.leak_interval = 300 for item_name_pattern, bucket in buckets.items(): assert isinstance(bucket, AbstractBucket) self.schedule_leak(bucket, bucket_clock) self.buckets[item_name_pattern] = bucket def wrap_item(self, name: str, weight: int = 1): now = self.clock.now() async def wrap_async(): return RateItem(name, await now, weight=weight) def wrap_sync(): return RateItem(name, now, weight=weight) return wrap_async() if isawaitable(now) else wrap_sync() async def get(self, item: RateItem) -> AbstractBucket: assert self.buckets is not None if item.name in self.buckets: bucket = self.buckets[item.name] assert isinstance(bucket, AbstractBucket) return bucket pool: AsyncConnectionPool = AsyncConnectionPool.from_url(getenv("REDIS", "redis://localhost:6379")) redis_db: AsyncRedis = AsyncRedis(connection_pool=pool) key = f"test-bucket/{id_generator()}" await redis_db.delete(key) bucket = await RedisBucket.init(DEFAULT_RATES, redis_db, key) self.schedule_leak(bucket, self.clock) self.buckets.update({item.name: bucket}) return bucket def schedule_leak(self, *args): if self.auto_leak: super().schedule_leak(*args) async def flush(self): for bucket in self.buckets.values(): await flushing_bucket(bucket) PyrateLimiter-3.9.0/tests/helpers.py000066400000000000000000000055131504242573000175010ustar00rootroot00000000000000"""Duh.... """ from asyncio import sleep from concurrent.futures import ThreadPoolExecutor from inspect import isawaitable from time import time from typing import List from typing import Tuple from .conftest import logger from pyrate_limiter import AbstractBucket from pyrate_limiter import Limiter from pyrate_limiter import RateItem async def inspect_bucket_items(bucket: AbstractBucket, expected_item_count: int): """Inspect items in the bucket - Assert number of item == expected-item-count - Assert that items are ordered by timestamps, from latest to earliest """ collected_items = [] for idx in range(expected_item_count): item = bucket.peek(idx) if isawaitable(item): item = await item assert isinstance(item, RateItem) collected_items.append(item) item_names = [item.name for item in collected_items] for i in range(1, expected_item_count): item = collected_items[i] prev_item = collected_items[i - 1] assert item.timestamp <= prev_item.timestamp return item_names async def concurrent_acquire(limiter: Limiter, items: List[str]): with ThreadPoolExecutor() as executor: result = list(executor.map(limiter.try_acquire, items)) for idx, coro in enumerate(result): while isawaitable(coro): coro = await coro result[idx] = coro return result async def async_acquire(limiter: Limiter, item: str, weight: int = 1) -> Tuple[bool, int]: start = time() acquire = await limiter.try_acquire_async(item, weight=weight) time_cost_in_ms = int((time() - start) * 1000) assert isinstance(acquire, bool) return acquire, time_cost_in_ms async def async_count(bucket: AbstractBucket) -> int: count = bucket.count() if isawaitable(count): count = await count assert isinstance(count, int) return count async def prefilling_bucket(limiter: Limiter, sleep_interval: float, item: str): """Pre-filling bucket to the limit before testing the time cost might vary depending on the bucket's backend - For in-memory bucket, this should be less than a 1ms - For external bucket's source ie Redis, this mostly depends on the network latency """ acquire_ok, cost = await async_acquire(limiter, item) logger.info("cost = %s", cost) assert cost <= 50 assert acquire_ok await sleep(sleep_interval) acquire_ok, cost = await async_acquire(limiter, item) logger.info("cost = %s", cost) assert cost <= 50 assert acquire_ok await sleep(sleep_interval) acquire_ok, cost = await async_acquire(limiter, item) logger.info("cost = %s", cost) assert cost <= 50 assert acquire_ok async def flushing_bucket(bucket: AbstractBucket): flush = bucket.flush() if isawaitable(flush): await flush PyrateLimiter-3.9.0/tests/test_bucket_all.py000066400000000000000000000170571504242573000212110ustar00rootroot00000000000000""" Testing buckets of all implementations """ import asyncio from inspect import isawaitable from time import time import pytest from .conftest import ClockSet from .conftest import logger from pyrate_limiter import AbstractClock from pyrate_limiter import BucketAsyncWrapper from pyrate_limiter import Rate from pyrate_limiter import RateItem from pyrate_limiter import TimeClock async def get_now(clock: AbstractClock) -> int: """Util function to get time now""" now = clock.now() if isawaitable(now): now = await now assert isinstance(now, int) return now @pytest.mark.asyncio async def test_bucket_01(clock: ClockSet, create_bucket): rates = [Rate(20, 1000)] bucket = BucketAsyncWrapper(await create_bucket(rates)) assert bucket is not None peek = await bucket.peek(0) assert peek is None await bucket.put(RateItem("my-item", await get_now(clock))) assert await bucket.count() == 1 await bucket.put(RateItem("my-item", await get_now(clock), weight=10)) assert await bucket.count() == 11 assert await bucket.put(RateItem("my-item", await get_now(clock), weight=20)) is False assert bucket.failing_rate == rates[0] assert await bucket.put(RateItem("my-item", await get_now(clock), weight=9)) is True assert await bucket.count() == 20 assert await bucket.put(RateItem("my-item", await get_now(clock))) is False await asyncio.sleep(2) assert await bucket.put(RateItem("my-item", await get_now(clock))) is True await asyncio.sleep(2) assert await bucket.put(RateItem("my-item", await get_now(clock), weight=30)) is False @pytest.mark.asyncio async def test_bucket_02(clock: ClockSet, create_bucket): rates = [Rate(30, 1000), Rate(50, 2000)] bucket = BucketAsyncWrapper(await create_bucket(rates)) start = time() while await bucket.count() < 150: await bucket.put(RateItem("item", await get_now(clock))) if await bucket.count() == 31: cost = time() - start logger.info(">30 items: %s", cost) assert cost > 0.99 if await bucket.count() == 51: cost = time() - start logger.info(">50 items: %s", cost) assert cost > 2 if await bucket.count() == 81: cost = time() - start logger.info(">80 items: %s", cost) assert cost > 3 if await bucket.count() == 101: cost = time() - start logger.info(">100 items: %s", cost) assert cost > 4 @pytest.mark.asyncio async def test_bucket_03(clock: ClockSet, create_bucket): rates = [Rate(30, 1000), Rate(50, 2000)] bucket = BucketAsyncWrapper(await create_bucket(rates)) peek = await bucket.peek(0) assert peek is None await bucket.put(RateItem("item1", await get_now(clock))) peek = await bucket.peek(0) assert isinstance(peek, RateItem) assert "item1" in peek.name await bucket.put(RateItem("item2", await get_now(clock))) peek = await bucket.peek(0) assert isinstance(peek, RateItem) assert "item2" in peek.name peek = await bucket.peek(1) assert isinstance(peek, RateItem) assert "item1" in peek.name await bucket.put(RateItem("item3", await get_now(clock))) peek = await bucket.peek(0) assert isinstance(peek, RateItem) assert "item3" in peek.name peek = await bucket.peek(1) assert isinstance(peek, RateItem) assert "item2" in peek.name peek = await bucket.peek(2) assert isinstance(peek, RateItem) assert "item1" in peek.name peek = await bucket.peek(3) assert peek is None @pytest.mark.asyncio async def test_bucket_waiting(clock: ClockSet, create_bucket): rates = [Rate(3, 500)] bucket = await create_bucket(rates) logger.info("Testing `bucket.waiting` with Bucket: %s, \nclock=%s", bucket, clock) bucket = BucketAsyncWrapper(bucket) async def create_item(weight: int = 1) -> RateItem: now = clock.now() if isawaitable(now): now = await now assert isinstance(now, int) return RateItem("item", now, weight) start = await get_now(clock) assert start > 0 assert await bucket.waiting(await create_item()) == 0 for _ in range(3): assert await bucket.put(await create_item()) is True # NOTE: sleep 100ms between each item await asyncio.sleep(0.1) end = await get_now(clock) assert end > 0 elapsed = end - start assert elapsed > 0 logger.info("Elapsed: %s", elapsed) assert await bucket.put(await create_item()) is False availability = await bucket.waiting(await create_item()) # type: ignore assert isinstance(availability, int) logger.info("1 space available in: %s", availability) await asyncio.sleep(availability / 1000 - 0.03) assert await bucket.put(await create_item()) is False await asyncio.sleep(0.04) assert await bucket.put(await create_item()) is True assert await bucket.put(await create_item(2)) is False availability = await bucket.waiting(await create_item(2)) # type: ignore assert isinstance(availability, int) logger.info("2 space available in: %s", availability) await asyncio.sleep(availability / 1000 - 0.03) assert await bucket.put(await create_item(2)) is False await asyncio.sleep(0.04) assert await bucket.put(await create_item(2)) is True assert await bucket.put(await create_item(3)) is False availability = await bucket.waiting(await create_item(3)) # type: ignore assert isinstance(availability, int) logger.info("3 space available in: %s", availability) await asyncio.sleep(availability / 1000 - 0.03) assert await bucket.put(await create_item(3)) is False await asyncio.sleep(0.04) assert await bucket.put(await create_item(3)) is True @pytest.mark.asyncio async def test_bucket_leak(clock: ClockSet, create_bucket): rates = [Rate(100, 3000)] bucket = BucketAsyncWrapper(await create_bucket(rates)) while await bucket.count() < 200: await bucket.put(RateItem("item", await get_now(clock))) await bucket.leak(await get_now(clock)) assert await bucket.count() == 100 assert await bucket.leak(await get_now(clock)) == 0 assert await bucket.count() == 100 await asyncio.sleep(3.01) assert await bucket.leak(await get_now(clock)) == 100 assert await bucket.leak(await get_now(clock)) == 0 assert await bucket.count() == 0 @pytest.mark.asyncio async def test_bucket_flush(create_bucket): """Testing bucket's flush, only need 1 single clock type""" rates = [Rate(50, 1000)] bucket = BucketAsyncWrapper(await create_bucket(rates)) assert isinstance(bucket.rates[0], Rate) clock = TimeClock() while await bucket.put(RateItem("item", clock.now())): pass assert await bucket.count() == 50 assert bucket.failing_rate is not None await bucket.flush() assert await bucket.count() == 0 assert bucket.failing_rate is None @pytest.mark.asyncio async def test_bucket_performance(create_bucket): """Bucket's performance test Putting a very large number of item into bucket Only need to test with a single clock type """ clock = TimeClock() rates = [Rate(30000, 50000)] bucket = BucketAsyncWrapper(await create_bucket(rates)) before = time() for _ in range(10_000): item = RateItem("item", clock.now()) assert await bucket.put(item) is True after = time() elapsed = after - before assert await bucket.count() == 10_000 logger.info("Bucket: %s \nPerformance test: insert 10k items %s(secs)", bucket.bucket, elapsed) PyrateLimiter-3.9.0/tests/test_bucket_factory.py000066400000000000000000000045251504242573000221040ustar00rootroot00000000000000"""Complete Limiter test suite """ import asyncio from inspect import isawaitable from time import sleep import pytest from .conftest import DEFAULT_RATES from .conftest import logger from .demo_bucket_factory import DemoBucketFactory from .helpers import async_count from pyrate_limiter import AbstractBucket from pyrate_limiter import RateItem @pytest.mark.asyncio async def test_factory_01(clock, create_bucket): factory = DemoBucketFactory( clock, hello=await create_bucket(DEFAULT_RATES), ) item = factory.wrap_item("hello", 1) if isawaitable(item): item = await item assert isinstance(item, RateItem) assert item.weight == 1 bucket = factory.get(item) assert isinstance(bucket, AbstractBucket) @pytest.mark.asyncio async def test_factory_leak(clock, create_bucket): bucket1 = await create_bucket(DEFAULT_RATES) bucket2 = await create_bucket(DEFAULT_RATES) assert id(bucket1) != id(bucket2) factory = DemoBucketFactory(clock, auto_leak=True, b1=bucket1, b2=bucket2) assert len(factory.buckets) == 2 logger.info("Factory initiated with %s buckets", len(factory.buckets)) for item_name in ["b1", "b2", "a1"]: for _ in range(3): is_async = False item = factory.wrap_item(item_name) if isawaitable(item): is_async = True item = await item bucket = factory.get(item) put_ok = bucket.put(item) if isawaitable(put_ok): is_async = True put_ok = await put_ok assert put_ok sleep(0.1) if item_name == "b1": assert await async_count(bucket1) == 3 if item_name == "b2": assert await async_count(bucket2) == 3 if item_name == "a1": assert await async_count(factory.buckets[item_name]) == 3 if is_async: await asyncio.sleep(6) else: sleep(6) assert await async_count(bucket1) == 0 assert await async_count(bucket2) == 0 assert await async_count(factory.buckets[item_name]) == 0 assert len(factory.buckets) == 3 for bucket in factory.get_buckets(): factory.dispose(bucket) sleep(1) assert factory._leaker.is_alive() is False assert factory._leaker.aio_leak_task is None PyrateLimiter-3.9.0/tests/test_limiter.py000066400000000000000000000267061504242573000205520ustar00rootroot00000000000000"""Complete Limiter test suite """ import time from inspect import isawaitable import pytest from .conftest import DEFAULT_RATES from .conftest import logger from .demo_bucket_factory import DemoAsyncGetBucketFactory from .demo_bucket_factory import DemoBucketFactory from .helpers import async_acquire from .helpers import concurrent_acquire from .helpers import flushing_bucket from .helpers import inspect_bucket_items from .helpers import prefilling_bucket from pyrate_limiter import AbstractBucket from pyrate_limiter import BucketAsyncWrapper from pyrate_limiter import BucketFactory from pyrate_limiter import BucketFullException from pyrate_limiter import Duration from pyrate_limiter import InMemoryBucket from pyrate_limiter import Limiter from pyrate_limiter import LimiterDelayException from pyrate_limiter import Rate from pyrate_limiter import SingleBucketFactory from pyrate_limiter import TimeClock @pytest.mark.asyncio async def test_limiter_constructor_01(clock): limiter = Limiter(DEFAULT_RATES[0], clock=clock) assert isinstance(limiter.bucket_factory, BucketFactory) assert isinstance(limiter.bucket_factory.bucket, InMemoryBucket) assert limiter.bucket_factory.bucket.rates == [DEFAULT_RATES[0]] assert limiter.bucket_factory.clock == clock limiter = Limiter(DEFAULT_RATES, clock=clock) assert isinstance(limiter.bucket_factory, BucketFactory) assert isinstance(limiter.bucket_factory.bucket, InMemoryBucket) assert limiter.bucket_factory.bucket.rates == DEFAULT_RATES assert limiter.bucket_factory.clock == clock assert len(limiter.buckets()) == 1 @pytest.mark.asyncio async def test_limiter_constructor_02( clock, create_bucket, limiter_should_raise, limiter_delay, ): bucket = await create_bucket(DEFAULT_RATES) limiter = Limiter(bucket) assert isinstance(limiter.bucket_factory, SingleBucketFactory) assert isinstance(limiter.bucket_factory.clock, TimeClock) assert limiter.max_delay is None assert limiter.raise_when_fail is True limiter = Limiter( bucket, clock=clock, raise_when_fail=limiter_should_raise, max_delay=limiter_delay, ) assert isinstance(limiter.bucket_factory, BucketFactory) assert limiter.raise_when_fail == limiter_should_raise assert limiter.max_delay == limiter_delay acquire_ok = limiter.try_acquire("example") if isawaitable(acquire_ok): acquire_ok = await acquire_ok assert acquire_ok factory = DemoBucketFactory(clock, demo=bucket) limiter = Limiter( factory, raise_when_fail=limiter_should_raise, max_delay=limiter_delay, ) assert limiter.bucket_factory is factory assert limiter.raise_when_fail == limiter_should_raise assert limiter.max_delay == limiter_delay @pytest.mark.asyncio async def test_limiter_01( clock, create_bucket, limiter_should_raise, limiter_delay, ): bucket = await create_bucket(DEFAULT_RATES) factory = DemoBucketFactory(clock, demo=bucket) limiter = Limiter( factory, raise_when_fail=limiter_should_raise, max_delay=limiter_delay, buffer_ms=1 ) bucket = BucketAsyncWrapper(bucket) item = "demo" logger.info("If weight = 0, it just passes thru") acquire_ok, cost = await async_acquire(limiter, item, weight=0) assert acquire_ok assert cost <= 10 assert await bucket.count() == 0 logger.info("Limiter Test #1") await prefilling_bucket(limiter, 0.3, item) if not limiter_should_raise: acquire_ok, cost = await async_acquire(limiter, item) if limiter_delay is None: assert cost <= 50 assert not acquire_ok else: assert acquire_ok else: if limiter_delay is None: with pytest.raises(BucketFullException): acquire_ok, cost = await async_acquire(limiter, item) else: acquire_ok, cost = await async_acquire(limiter, item) assert cost > 350 assert acquire_ok # # Flush before testing again await flushing_bucket(bucket) logger.info("Limiter Test #2") await prefilling_bucket(limiter, 0, item) if limiter_should_raise: if limiter_delay == 500: with pytest.raises(LimiterDelayException) as err: await async_acquire(limiter, item) assert err.meta_info["max_delay"] == 500 assert err.meta_info["actual_delay"] > 600 assert err.meta_info["name"] == item elif limiter_delay == 2000: acquire_ok, cost = await async_acquire(limiter, item) assert acquire_ok elif limiter_delay == Duration.MINUTE: acquire_ok, cost = await async_acquire(limiter, item) assert acquire_ok else: with pytest.raises(BucketFullException) as err: await async_acquire(limiter, item) else: acquire_ok, cost = await async_acquire(limiter, item) if limiter_delay == 500 or limiter_delay is None: assert not acquire_ok else: assert acquire_ok # Flush before testing again await flushing_bucket(bucket) logger.info("Limiter Test #3: exceeding weight") await prefilling_bucket(limiter, 0, item) if limiter_should_raise: with pytest.raises(BucketFullException) as err: await async_acquire(limiter, item, 5) else: acquire_ok, cost = await async_acquire(limiter, item, 5) assert cost <= 50 assert not acquire_ok @pytest.mark.asyncio async def test_limiter_async_factory_get( clock, limiter_should_raise, limiter_delay, ): factory = DemoAsyncGetBucketFactory(clock) limiter = Limiter( factory, raise_when_fail=limiter_should_raise, max_delay=limiter_delay, buffer_ms=5 ) item = "demo" logger.info("If weight = 0, it just passes thru") acquire_ok, cost = await async_acquire(limiter, item, weight=0) assert acquire_ok assert cost <= 10 logger.info("Limiter Test #1") await prefilling_bucket(limiter, 0.3, item) if not limiter_should_raise: acquire_ok, cost = await async_acquire(limiter, item) if limiter_delay is None: assert cost <= 50 assert not acquire_ok else: assert acquire_ok else: if limiter_delay is None: with pytest.raises(BucketFullException): acquire_ok, cost = await async_acquire(limiter, item) else: acquire_ok, cost = await async_acquire(limiter, item) assert cost > 350 assert acquire_ok # # Flush before testing again await factory.flush() logger.info("Limiter Test #2") await prefilling_bucket(limiter, 0, item) if limiter_should_raise: if limiter_delay == 500: with pytest.raises(LimiterDelayException) as err: await async_acquire(limiter, item) assert err.meta_info["max_delay"] == 500 assert err.meta_info["actual_delay"] > 600 assert err.meta_info["name"] == item elif limiter_delay == 2000: acquire_ok, cost = await async_acquire(limiter, item) assert acquire_ok elif limiter_delay == Duration.MINUTE: acquire_ok, cost = await async_acquire(limiter, item) assert acquire_ok else: with pytest.raises(BucketFullException) as err: await async_acquire(limiter, item) else: acquire_ok, cost = await async_acquire(limiter, item) if limiter_delay == 500 or limiter_delay is None: assert not acquire_ok else: assert acquire_ok # Flush before testing again await factory.flush() logger.info("Limiter Test #3: exceeding weight") await prefilling_bucket(limiter, 0, item) if limiter_should_raise: with pytest.raises(BucketFullException) as err: await async_acquire(limiter, item, 5) else: acquire_ok, cost = await async_acquire(limiter, item, 5) assert cost <= 50 assert not acquire_ok @pytest.mark.asyncio async def test_limiter_concurrency( clock, create_bucket, limiter_should_raise, limiter_delay, ): bucket: AbstractBucket = await create_bucket(DEFAULT_RATES) factory = DemoBucketFactory(clock, demo=bucket) limiter = Limiter( factory, raise_when_fail=limiter_should_raise, max_delay=limiter_delay, ) logger.info("Test Limiter Concurrency: inserting 4 items") items = ["demo" for _ in range(4)] if not limiter_should_raise: if not limiter_delay or limiter_delay == 500: result = await concurrent_acquire(limiter, items) item_names = await inspect_bucket_items(bucket, 3) logger.info( "(No raise, delay is None or delay > max_delay) Result = %s, Item = %s", result, item_names, ) else: result = await concurrent_acquire(limiter, items) item_names = await inspect_bucket_items(bucket, 3) logger.info( "(No raise, delay < max_delay) Result = %s, Item = %s", result, item_names, ) else: if not limiter_delay: with pytest.raises(BucketFullException): await concurrent_acquire(limiter, items) elif limiter_delay == 500: with pytest.raises(LimiterDelayException): await concurrent_acquire(limiter, items) else: result = await concurrent_acquire(limiter, items) item_names = await inspect_bucket_items(bucket, 4) logger.info("(Raise, delay) Result = %s, Item = %s", result, item_names) @pytest.mark.asyncio async def test_limiter_decorator( clock, create_bucket, limiter_should_raise, limiter_delay, ): bucket = await create_bucket(DEFAULT_RATES) factory = DemoBucketFactory(clock, demo=bucket) limiter = Limiter( factory, raise_when_fail=limiter_should_raise, max_delay=limiter_delay, ) limiter_wrapper = limiter.as_decorator() def mapping(_: int): return "demo", 1 counter = 0 @limiter_wrapper(mapping) def inc_counter(num: int): nonlocal counter counter += num @limiter_wrapper(mapping) async def async_inc_counter(num: int): nonlocal counter counter += num inc = inc_counter(1) if isawaitable(inc): await inc assert counter == 1 await async_inc_counter(1) assert counter == 2 def test_wait_too_long(): requests_per_second = 10 rate = Rate(requests_per_second, Duration.SECOND) bucket = InMemoryBucket([rate]) limiter = Limiter(bucket, raise_when_fail=False, clock=TimeClock(), max_delay=Duration.SECOND, retry_until_max_delay=True) # raise_when_fail = False for i in range(500): success = limiter.try_acquire("mytest", 1) if not success: break assert not success # retried and then failed time.sleep(1) # raise_when_fail = True limiter = Limiter(bucket, raise_when_fail=True, clock=TimeClock(), max_delay=Duration.SECOND, retry_until_max_delay=True) with pytest.raises(LimiterDelayException): for i in range(500): success = limiter.try_acquire("mytest", 1) if not success: break PyrateLimiter-3.9.0/tests/test_multiprocessing.py000066400000000000000000000225011504242573000223210ustar00rootroot00000000000000"""Rate limiter multiprocessing tests""" import asyncio import time from collections import deque from concurrent.futures import ProcessPoolExecutor from concurrent.futures import wait from functools import partial from pathlib import Path from tempfile import gettempdir from typing import List from typing import Optional import pytest from pyrate_limiter import AbstractBucket from pyrate_limiter import BucketAsyncWrapper from pyrate_limiter import BucketFullException from pyrate_limiter import Duration from pyrate_limiter import Limiter from pyrate_limiter import LimiterDelayException from pyrate_limiter import Rate from pyrate_limiter import SQLiteBucket from pyrate_limiter import SQLiteClock from pyrate_limiter import TimeClock from pyrate_limiter.buckets.mp_bucket import MultiprocessBucket MAX_DELAY = Duration.DAY LIMITER: Optional[Limiter] = None BUCKET: Optional[AbstractBucket] = None def init_process_mp( bucket: MultiprocessBucket, use_async_bucket: bool, raise_when_fail: bool = False, max_delay: Duration = MAX_DELAY, ): global LIMITER global BUCKET BUCKET = bucket if not use_async_bucket: # if we're doing async, don't initialize the limiter here, we'll do it in the task so it's in the event loop LIMITER = Limiter( bucket, raise_when_fail=raise_when_fail, clock=TimeClock(), max_delay=max_delay, retry_until_max_delay=not raise_when_fail, ) def my_task(): assert LIMITER is not None while not LIMITER.try_acquire("my_task"): time.sleep(0.01) result = time.time() time.sleep(0.01) return result def analyze_times(start: float, requests_per_second: int, times: List[float]): elapsed = sorted(t - start for t in times) w: deque[float] = deque() ops_last_sec: List[int] = [] for t in elapsed: w.append(t) while w and w[0] <= t - 1: w.popleft() ops_last_sec.append(len(w)) print(f"{max(ops_last_sec)=}, {requests_per_second=}") assert ( max(ops_last_sec) <= requests_per_second * 1.05 ) # a small amount of error is observed when multiprocessing def init_process_sqlite(requests_per_second, db_path): global LIMITER rate = Rate(requests_per_second, Duration.SECOND) bucket = SQLiteBucket.init_from_file([rate], db_path=db_path, use_file_lock=True) LIMITER = Limiter( bucket, raise_when_fail=False, max_delay=MAX_DELAY, retry_until_max_delay=True, clock=SQLiteClock(bucket), ) def my_task_async(num_requests): async def task_async(limiter: Limiter, name="mytask", weight=1): while not await limiter.try_acquire_async(name, weight): pass return time.monotonic() async def run_many_async_tasks(): assert BUCKET is not None bucket = BucketAsyncWrapper(BUCKET) limiter = Limiter( bucket, raise_when_fail=False, clock=SQLiteClock.default(), max_delay=MAX_DELAY, retry_until_max_delay=True, ) return await asyncio.gather( *(task_async(limiter, str(i), 1) for i in range(num_requests)) ) return asyncio.run(run_many_async_tasks()) def test_mp_bucket(): requests_per_second = 250 num_seconds = 5 num_requests = requests_per_second * num_seconds rate = Rate(requests_per_second, Duration.SECOND) bucket = MultiprocessBucket.init([rate]) def prime_bucket(): # Prime the bucket limiter = Limiter(bucket) [limiter.try_acquire("mytest") for i in range(requests_per_second)] start = time.time() with ProcessPoolExecutor( initializer=partial(init_process_mp, bucket, False), ) as executor: prime_bucket() futures = [executor.submit(my_task) for _ in range(num_requests)] wait(futures) times = [] for f in futures: try: t = f.result() times.append(t) except Exception as e: raise e analyze_times(start, requests_per_second, times) def test_sqlite_filelock_bucket(): requests_per_second = 250 num_seconds = 5 num_requests = requests_per_second * num_seconds # Initialize the table temp_dir = Path(gettempdir()) db_path = str(temp_dir / f"pyrate_limiter_{time.time()}.sqlite") # prime the bucket def prime_bucket(): rate = Rate(requests_per_second, Duration.SECOND) bucket = SQLiteBucket.init_from_file( [rate], db_path=db_path, use_file_lock=True ) limiter = Limiter( bucket, raise_when_fail=False, max_delay=MAX_DELAY, retry_until_max_delay=True, clock=SQLiteClock(bucket), ) [limiter.try_acquire("mytest") for i in range(requests_per_second)] # Start the ProcessPoolExecutor start = time.time() with ProcessPoolExecutor( initializer=partial( init_process_sqlite, requests_per_second=requests_per_second, db_path=db_path, ) ) as executor: prime_bucket() futures = [executor.submit(my_task) for _ in range(num_requests)] wait(futures) times = [] for f in futures: try: t = f.result() times.append(t) except Exception as e: raise e analyze_times(start, requests_per_second, times) @pytest.mark.asyncio async def test_mp_bucket_async(): requests_per_second = 250 num_seconds = 5 num_requests = requests_per_second * num_seconds rate = Rate(requests_per_second, Duration.SECOND) bucket = MultiprocessBucket.init([rate]) async def prime_bucket(): # prime the bucket limiter = Limiter( BucketAsyncWrapper(bucket), retry_until_max_delay=True, max_delay=MAX_DELAY, clock=SQLiteClock.default(), ) for i in range(100): await limiter.try_acquire_async("mytest") start = time.time() with ProcessPoolExecutor( initializer=partial(init_process_mp, bucket, True), ) as executor: # make sure requests is divisible by num workers num_workers = executor._max_workers num_requests = num_workers * (num_requests // num_workers) wait([executor.submit(my_task_async, 250)]) futures = [ executor.submit(my_task_async, num_requests // num_workers) for _ in range(num_workers) ] wait(futures) time.sleep(2) futures = [ executor.submit(my_task_async, num_requests // num_workers) for _ in range(num_workers) ] wait(futures) times = [] for f in futures: try: t = f.result() times += t except Exception as e: raise e analyze_times(start, requests_per_second, times) def test_mp_bucket_failures(): requests_per_second = 1 num_seconds = 5 num_requests = requests_per_second * num_seconds rate = Rate(requests_per_second, Duration.SECOND) bucket = MultiprocessBucket.init([rate]) with ProcessPoolExecutor( initializer=partial(init_process_mp, bucket, False, True, Duration.SECOND), ) as executor: futures = [executor.submit(my_task) for _ in range(num_requests)] wait(futures) with pytest.raises(LimiterDelayException): for f in futures: try: f.result() except Exception as e: raise e def test_limiter_delay(): requests_per_second = 1 num_seconds = 5 num_requests = requests_per_second * num_seconds rate = Rate(requests_per_second, Duration.SECOND) bucket = MultiprocessBucket.init([rate]) with pytest.raises(LimiterDelayException): limiter = Limiter( bucket, raise_when_fail=True, clock=TimeClock(), max_delay=Duration.SECOND, retry_until_max_delay=False, ) for i in range(1000): limiter.try_acquire("mytest", 1) with ProcessPoolExecutor( initializer=partial(init_process_mp, bucket, False, True, Duration.SECOND), ) as executor: futures = [executor.submit(my_task) for _ in range(num_requests)] wait(futures) with pytest.raises(LimiterDelayException): for f in futures: try: f.result() except Exception as e: raise e def test_bucket_full(): requests_per_second = 1 num_seconds = 5 num_requests = requests_per_second * num_seconds rate = Rate(requests_per_second, Duration.SECOND) bucket = MultiprocessBucket.init([rate]) limiter = Limiter( bucket, raise_when_fail=True, clock=TimeClock(), max_delay=None, retry_until_max_delay=False, ) with pytest.raises(BucketFullException): for i in range(1000): limiter.try_acquire("mytest", 1) with ProcessPoolExecutor( initializer=partial(init_process_mp, bucket, False, True, None), ) as executor: futures = [executor.submit(my_task) for _ in range(num_requests)] wait(futures) with pytest.raises(BucketFullException): for f in futures: try: f.result() except Exception as e: raise e PyrateLimiter-3.9.0/tests/test_others.py000066400000000000000000000104121504242573000203740ustar00rootroot00000000000000import logging from inspect import isawaitable from time import time import pytest from pyrate_limiter import binary_search from pyrate_limiter import Duration from pyrate_limiter import Rate from pyrate_limiter import RateItem from pyrate_limiter import SQLiteClock from pyrate_limiter import validate_rate_list def test_duration(): assert int(Duration.SECOND) == 1000 assert Duration.SECOND.value == 1000 assert Duration.SECOND * 60 == 60 * Duration.SECOND == Duration.MINUTE.value == int(Duration.MINUTE) assert Duration.MINUTE * 60 == 60 * Duration.MINUTE == Duration.HOUR.value == int(Duration.HOUR) assert Duration.HOUR * 24 == 24 * Duration.HOUR == Duration.DAY.value == int(Duration.DAY) assert Duration.DAY * 7 == 7 * Duration.DAY == Duration.WEEK.value == int(Duration.WEEK) assert Duration.DAY + Duration.DAY == Duration.DAY * 2 assert Duration.MINUTE + 30000 == 30000 + Duration.MINUTE == 90000 def test_readable_duration(): assert Duration.readable(300) == "300ms" assert Duration.readable(1000) == "1.0s" assert Duration.readable(1300) == "1.3s" assert Duration.readable(Duration.SECOND * 3.5) == "3.5s" assert Duration.readable(Duration.SECOND * 60 * 24 + Duration.SECOND * 30) == "24.5m" assert Duration.readable(Duration.MINUTE * 3.5) == "3.5m" assert Duration.readable(Duration.MINUTE * 60 + Duration.MINUTE * 30) == "1.5h" assert Duration.readable(Duration.HOUR * 3.5) == "3.5h" assert Duration.readable(Duration.DAY * 3.5) == "3.5d" assert Duration.readable(Duration.WEEK * 3.5) == "3.5w" def test_rate(): rate = Rate(1000, Duration.SECOND) assert str(rate) == "limit=1000/1.0s" assert repr(rate) == "limit=1000/1000" rate = Rate(1000, Duration.SECOND * 3) assert str(rate) == "limit=1000/3.0s" assert repr(rate) == "limit=1000/3000" rate = Rate(1000, 3500) assert str(rate) == "limit=1000/3.5s" rate = Rate(1000, Duration.MINUTE * 3.5) assert str(rate) == "limit=1000/3.5m" rate = Rate(1000, Duration.MINUTE * 3) assert str(rate) == "limit=1000/3.0m" def test_binary_search(): """Testing binary-search that find item in array""" # Normal list of items items = [RateItem("item", nth * 2) for nth in range(5)] print([item.timestamp for item in items]) assert binary_search(items, 0) == 0 assert binary_search(items, 1) == 1 assert binary_search(items, 2) == 1 assert binary_search(items, 3) == 2 assert binary_search(items, 9) == -1 assert binary_search(items, 8) == 4 # If the value is larger than the last item, idx would be -1 assert binary_search(items, 11) == -1 # Empty list items = [] assert binary_search(items, 1) == 0 assert binary_search(items, 2) == 0 assert binary_search(items, 3) == 0 def test_rate_validator(): rates = [] assert validate_rate_list(rates) is False rates = [Rate(1, 1)] assert validate_rate_list(rates) is True rates = [Rate(2, 1), Rate(1, 1)] assert validate_rate_list(rates) is False rates = [Rate(1, 1), Rate(2, 1)] assert validate_rate_list(rates) is False rates = [Rate(1, 1), Rate(2, 2)] assert validate_rate_list(rates) is True rates = [Rate(2, 1), Rate(1, 2)] assert validate_rate_list(rates) is False rates = [Rate(2, 1), Rate(3, 2)] assert validate_rate_list(rates) is True rates = [Rate(1, 1), Rate(3, 2), Rate(4, 1)] assert validate_rate_list(rates) is False rates = [Rate(2, 1), Rate(3, 2), Rate(4, 3)] assert validate_rate_list(rates) is True @pytest.mark.asyncio async def test_clock(clock): """Testing clock backends """ now = clock.now() while isawaitable(now): now = await now logging.info("Testing clock: %s -> %d", clock, now) assert now > 0 if now > 1000: # NOTE: if not MonotonicClock, the time values should be almost equal use_time = time() * 1000 assert int(now) - round(use_time) < 2 @pytest.mark.asyncio async def test_sqlite_clock(): """Testing clock backends """ await test_clock(SQLiteClock.default()) from .conftest import create_sqlite_bucket bucket = await create_sqlite_bucket([Rate(1, Duration.SECOND)]) await test_clock(SQLiteClock(bucket.conn)) await test_clock(SQLiteClock(bucket))