pax_global_header00006660000000000000000000000064147231207400014512gustar00rootroot0000000000000052 comment=ae3845a18ec8721426cd0c95c46388a4c6963c02 returns-0.24.0/000077500000000000000000000000001472312074000132775ustar00rootroot00000000000000returns-0.24.0/.editorconfig000066400000000000000000000004521472312074000157550ustar00rootroot00000000000000# Check http://editorconfig.org for more information # This is the main config file for this project: root = true [*] charset = utf-8 trim_trailing_whitespace = true end_of_line = lf indent_style = space insert_final_newline = true indent_size = 2 [*.py] indent_size = 4 [*.pyi] indent_size = 4 returns-0.24.0/.github/000077500000000000000000000000001472312074000146375ustar00rootroot00000000000000returns-0.24.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001472312074000170225ustar00rootroot00000000000000returns-0.24.0/.github/ISSUE_TEMPLATE/Bug.md000066400000000000000000000012471472312074000200650ustar00rootroot00000000000000--- name: Bug about: Create a report to help us improve labels: 'bug' --- # Bug report ## What's wrong ## How is that should be ## System information - `python` version: - `returns` version: - `mypy` version: - `hypothesis` version (if any): - `pytest` version (if any): returns-0.24.0/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000004651472312074000210170ustar00rootroot00000000000000# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser blank_issues_enabled: true # default contact_links: - name: >- 💬 Telegram: @drypython url: https://t.me/drypython about: Chat with dry-python devs returns-0.24.0/.github/dependabot.yml000066400000000000000000000003611472312074000174670ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: daily open-pull-requests-limit: 10 - package-ecosystem: github-actions directory: "/" schedule: interval: daily open-pull-requests-limit: 10 returns-0.24.0/.github/pull_request_template.md000066400000000000000000000022641472312074000216040ustar00rootroot00000000000000# I have made things! ## Checklist - [ ] I have double checked that there are no unrelated changes in this pull request (old patches, accidental config files, etc) - [ ] I have created at least one test case for the changes I have made - [ ] I have updated the documentation for the changes I have made - [ ] I have added my changes to the `CHANGELOG.md` ## Related issues 🙏 Please, if you or your company finds `dry-python` valuable, help us sustain the project by sponsoring it transparently on https://github.com/sponsors/dry-python. As a thank you, your profile/company logo will be added to our main README which receives hundreds of unique visitors per day. returns-0.24.0/.github/workflows/000077500000000000000000000000001472312074000166745ustar00rootroot00000000000000returns-0.24.0/.github/workflows/test.yml000066400000000000000000000052161472312074000204020ustar00rootroot00000000000000name: test on: push: branches: - master pull_request: workflow_dispatch: permissions: contents: read concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true jobs: tests: runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: ['3.10', '3.11', '3.12', '3.13'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install poetry run: | curl -sSL "https://install.python-poetry.org" | python # Adding `poetry` to `$PATH`: echo "$HOME/.poetry/bin" >> $GITHUB_PATH - name: Install dependencies run: | poetry config virtualenvs.in-project true poetry install --all-extras poetry run pip install -U pip - name: Run tests run: | poetry run flake8 . # In order to make `exclude` option work, we need to separate the checks # of returns and its tests into two separated commands poetry run mypy --enable-error-code=unused-awaitable returns poetry run mypy tests # Different python versions are covered differently: poetry run pytest returns docs/pages tests poetry run codespell returns tests docs typesafety README.md CONTRIBUTING.md CHANGELOG.md poetry run poetry check poetry run pip check poetry run python -m slotscheck returns --verbose - name: Upload coverage to Codecov if: matrix.python-version == 3.11 uses: codecov/codecov-action@v5 with: file: ./coverage.xml typesafety-tests: runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: ['3.10', '3.11', '3.12', '3.13'] shard: [0, 1, 2, 3] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install poetry run: | curl -sSL "https://install.python-poetry.org" | python # Adding `poetry` to `$PATH`: echo "$HOME/.poetry/bin" >> $GITHUB_PATH - name: Install dependencies run: | poetry config virtualenvs.in-project true poetry install --all-extras poetry run pip install -U pip - name: Run typesafety tests run: | poetry run pytest typesafety \ --num-shards=4 --shard-id=${{ matrix.shard }} \ -p no:cov -o addopts="" --mypy-ini-file=setup.cfg returns-0.24.0/.gitignore000066400000000000000000000056021472312074000152720ustar00rootroot00000000000000#### joe made this: http://goel.io/joe #### python #### # Byte-compiled / optimized / DLL files .pytest_cache __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg pip-wheel-metadata/ # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ #### macos #### # General *.DS_Store .AppleDouble .LSOverride # Icon must end with two \r Icon # Thumbnails ._* # Files that might appear in the root of a volume .DocumentRevisions-V100 .fseventsd .Spotlight-V100 .TemporaryItems .Trashes .VolumeIcon.icns .com.apple.timemachine.donotpresent # Directories potentially created on remote AFP share .AppleDB .AppleDesktop Network Trash Folder Temporary Items .apdisk #### windows #### # Windows thumbnail cache files Thumbs.db ehthumbs.db ehthumbs_vista.db # Dump file *.stackdump # Folder config file Desktop.ini # Recycle Bin used on file shares $RECYCLE.BIN/ # Windows Installer files *.cab *.msi *.msm *.msp # Windows shortcuts *.lnk #### linux #### *~ # temporary files which can be created if a process still has a handle open of a deleted file .fuse_hidden* # KDE directory preferences .directory # Linux trash folder which might appear on any partition or disk .Trash-* # .nfs files are created when an open file is removed but is still being accessed .nfs* #### jetbrains #### # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 # User-specific stuff: .idea/ ## File-based project format: *.iws ## Plugin-specific files: # IntelliJ /out/ # mpeltonen/sbt-idea plugin .idea_modules/ # JIRA plugin atlassian-ide-plugin.xml # Cursive Clojure plugin .idea/replstate.xml # Crashlytics plugin (for Android Studio and IntelliJ) com_crashlytics_export_strings.xml crashlytics.properties crashlytics-build.properties fabric.properties ### Custom ### ex.py experiments/ .vscode/tags .pyre .pytype build/ returns-0.24.0/.readthedocs.yml000066400000000000000000000012541472312074000163670ustar00rootroot00000000000000# .readthedocs.yml version: 2 # Set the version of Python and other tools you might need build: os: ubuntu-lts-latest tools: {python: "3.11"} jobs: pre_create_environment: - asdf plugin add poetry - asdf install poetry latest - asdf global poetry latest - poetry config virtualenvs.create false - poetry self add poetry-plugin-export - poetry export --only main --only docs --extras check_laws --format=requirements.txt --output=requirements.txt python: install: - requirements: requirements.txt # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/conf.py fail_on_warning: true formats: all returns-0.24.0/CHANGELOG.md000066400000000000000000000475651472312074000151310ustar00rootroot00000000000000# Version history We follow Semantic Versions since the `1.0.0` release. Versions before `1.0.0` are `0Ver`-based: incremental in minor, bugfixes only are patches. See [0Ver](https://0ver.org/). ## 0.24.0 ### Features - Drop `python3.9` support - Add `python3.13` support - Add support for `mypy>=1.12` - Add picky exceptions to `future_safe` decorator like `safe` has - Improve inference of `ResultLike` objects when exception catching decorator is applied with explicit exception types - Add picky exceptions to `impure_safe` decorator like `safe` has. Issue #1543 - Add partition function to result module. Issue #1905 - Add `default_error` parameter to `returns.converters.maybe_to_result`, which provides a default error value for `Failure` ## 0.23.0 ### Features - *Breaking*: Remove `success_type` and `failure_type` fields from `IOResult`, `Maybe` and `Result` types ### Misc - Now requires `mypy>=1.10` - Adds `[check-laws]` extra for installation ## 0.22.0 ### Features - *Breaking*: Drops `python3.7` support - Now requires `mypy>=1.5` - Adds `trampolines` support ## 0.21.0 ### Features - Now requires `mypy>=1.4` - Adds `[compatible-mypy]` extra for installation ## 0.20.1 ### Bugfixes - Fixed HKT `to_str` example in documentation - Fixed backward deserialization compatibility for BaseContainer ## 0.20.0 ### Features - Now requires `mypy>=1.2` ### Bugfixes - Fixes a problem with `do-notation` and type aliases - Fixes custom pickle protocol to handle `None` values gracefully - Removes broken drylabs.io link in README - Revises pointfree documentation ## 0.19.0 aka The Do Notation ### Features - Adds `do` notation - Adds `attempt` decorator ### Misc - Check ``__slots__`` correctness with `slotscheck` ## 0.18.0 New Year Release! 🎄 ### Features - Now requires `typing_extensions>=4.0` - Now requires `mypy>=0.930` - Removes plugin for `@safe`, `@maybe`, `@future`, etc. Because we now use `ParamSpec` type to properly type decorators ### Bugfixes - Fixes `__slots__` not being set properly in containers and their base classes - Fixes patching of containers in pytest plugin not undone after each test ## 0.17.0 ### Features - Enables Pattern Matching support for `Result` containers - Enables Pattern Matching support for `Maybe` container - Enables Pattern Matching support for `IOResult` container - Improves `hypothesis` plugin, now we detect when type cannot be constructed and give a clear error message - Adds the option to pass what exceptions `@safe` will handle ## 0.16.0 ### Features - Makes `_Nothing` a singleton - Refactor `flow` function to be faster ### Bugfixes - Fixes that `assert_trace` was not catching containers from `@safe`-wrapped functions ### Misc - Fixes typos in documentation ## 0.15.0 aka The initial HKT release ### Features - Adds Higher Kinded Types partial support - **Breaking**: drops `python3.6` support - **Breaking**: makes our `mypy` plugin not optional, but required! - **Breaking**: changes all `RequiresContext`-based type arguments order, previously we used to specify `_EnvType` as the first type argument, now it is the last one. This is done to respect new HKT rules - **Breaking**: renames `.rescue` to `.lash` - **Breaking**: removes all old interfaces from `primitives/interfaces.py`, use new typeclasses instead - **Breaking**: ``Maybe`` is fully reworked to be lawful - **Breaking**: removes `value_or` pointfree method, because it is impossible to express with HKT - **Breaking**: removes `.value_or`, `.unwrap`, and `.failure` methods from `FutureResult` and `RequiresContext`-based types, because we do require these methods to raise an exception on failure, but these methods were lazy and did not raise the required exception - **Breaking**: changes how `is_successful` is typed: now we allow any `Unwrappable` interface instances there, including custom ones - **Breaking**: changes `UnwrapFailedError` constructor, now it does accept an `Unwrappable` instance instead of a `BaseContainer` - **Breaking**: removes `.fix` method from all containers, also removes `fix` pointfree function - **Breaking**: Removes `coalesce` function, because it is impossible to properly type it - **Breaking**: Removes all `Context*` based types with `.ask()` method, use new `.ask()` methods on the `Reader`-based containers - **Breaking**: Now `Future` and `FutureResult` can be awaited multiple times - **Breaking**: Removes `.unify()` method from several containers, use `unify()` pointfree function instead - **Breaking**: Removes ``.from_iterable`` method from all containers, instead adds better `iterables` support, we now have `returns.iterables` module with `Fold` helper - **Breaking**: Renames property `empty` to `no_args` of all `RequiresContext`-based classes - Adds new public interfaces: see `returns.interfaces` - Adds `methods` package with several helpful things inside - Adds `FutureSuccess` and `FutureFailure` unit functions to be similar to `Result` and `IOResult` - Adds `.swap` method to `Result`, `IOResult`, `FutureResult`, and other result based containers - Adds `.modify_env` method to all `RequiresContext*` types - Adds `.rescue` to `Maybe` - Adds `.equals` methods to types that can be compared directly: `Result`, `Maybe`, `IO`, `IOResult` - Adds missing `from_requires_context_future_result` to `RequiresContext` - Adds `.from_optional` and `.bind_optional` to `Maybe` container - Adds `__slots__` to `UnwrapFailedError` with `halted_container` - Changes `flatten` to work with `KindN` and any possible container - Adds a helper to test traces to our `pytest` plugin - Adds `cond` function to `pointfree` and `methods` packages - Adds `compose_result` HKT method and pointfree function - Adds `unify` HKT pointfree function - Adds `bimap` pointfree function - Adds `unwrap_or_failure` function to `methods` package - Adds `collect_trace` helper function for better development experience - Adds `hypothesis` integration and pre-defined "monad laws as values" - Adds `assert_equal` method to our `pytest` plugin ### Bugfixes - **Breaking**: fixes serious typing issue and changes how `flow` works - **Breaking**: fixes serious typing issue and changes how `pipe` works, now it has a hard limit of 20 parameters - Fixes that `RequiresContextFutureResult` was not supported by `pytest` plugin - Fixes incorrect `partial` behaviour in an edge case, #618 - Fixes that `.apply` method of `IOResult` was working incorrectly, it was returning `IOFailure(2)` as a result of `IOFailure(1).apply(IOFailure(2))` - Fixes bug that `safe(tap(...))` was revealing invalid types sometimes ### Misc - Adds a lot of new typetests - Checks that now all math laws are checked for all types - Changes docs structure, adds new `Interfaces`, `HKT`, and `Methods` pages - Changed `__str__` method in `BaseContainer` class to `__repr__` method - Adds `Quickstart` guide ## 0.14.0 ### Features - **Breaking**: renames mypy plugin from `decorator_plugin` to `returns_plugin` because of a complete rewrite and lots of new features - **Breaking**: changes `@safe`, `@impure`, `impure_safe`, `@maybe` semantics: they do not work with `async` functions anymore; now you are forced to use `Future` and its helpers to work with `async` functions - **Breaking**: renames `Maybe.new` to `Maybe.from_value`. Because all our other containers support this protocol. Only `Maybe` was different, sorry for that! - **Breaking**: renames `.from_success()` to `.from_value()`, there's no need in two separate methods - **Breaking**: renames `.from_successful_io()` to `.from_io()`, there's no need in two separate methods - **Breaking**: renames `.from_successful_context()` to `.from_context()`, there's no need in two separate methods - **Breaking**: since we now support `.apply()` method, there's no more need in `*_squash` converters, they are removed - **Breaking**: renamed `Instanceable` to `Applicative` - **Breaking**: changes `.from_io` and `.from_failed_io` of `IOResult` to return `Any` instead of `NoReturn` unfilled type - **Breaking**: removes `.lift` and `.lift_*` methods from all containers, use `map_`, `bind_result`, `bind_io`, and other pointfree helpers instead - **Breaking**: removes `@pipeline` function. It was a mistake: it does not work with mixed container types, it does not type failures properly, it does not work with ``IO`` and ``Future``, it enforces to write imperative code in a functional codebase. Use ``flow`` instead - Adds typed `partial` and `curry` mypy plugins! - Adds typed `flow` plugin, now it can accept any number of arguments, it now also has **excellent** type inference - Adds typed `pipe` plugin, now it can accept any number of arguments, it now also has good type inference - Adds `managed` pipeline function that is useful for working with stateful computations - Adds typed `map_`, `fix`, and `alt` pointfree functions - Adds typed `bind_result`, `bind_io`, `bind_ioresult`, `bind_context`, `bind_context_result`, `bind_future`, `bind_async`, and `bind_awaitable` pointfree functions - Adds typed `bind_async_future` and `bind_async_future_result` pointfree functions - Adds typed `unify` pointfree function - Adds typed `apply` pointfree function - Adds typed `value_or` pointfree function - Adds `pytest` plugin with the ability to tests error handling - Adds `Future` container to easily work with `async` functions - Adds `FutureResult` container to easily work with `async` function that might fail - Adds `RequiresContextFutureResult` container - Adds `ReaderFutureResult` alias for `RequiresContextFutureResult` - Adds `RequiresContextFutureResultE` and `ReaderFutureResultE` aliases - Adds `Future`, `FutureResult` and `RequiresContextFutureResult` support for all existing pointfree functions - Adds `bind_io` method to `IOResult` - Adds `bind_io` method to `RequiresContextIOResult` - Adds `or_else` method to `Maybe` - Adds `.from_io` and `.from_failed_io` to `RequiresContextIOResult` - Syncs naming in `from_*` methods, now all parameters are named `inner_value` - Adds `not_` composition helper - Adds `flatten` support for `Future`, `FutureResult` and `RequiresContextFutureResult` - Adds `__copy__` and `__deepcopy__` magic methods to `Immutable` class - Speeds up ``is_successful`` function - Makes all `Context` context helpers abstract, so you cannot create new instances of this class, also adds `__slots__` to these classes - Improves `RequiresContext*` types with `NoDeps` where it is logically true ### Bugfixes - Fixes that `@safe` decorator was generating incorrect signatures for functions with `Any` - Fixes that `.rescue()` of `RequiresContextResult` was returning `Any` - Fixes that `.rescue()` of `RequiresContextIOResult` was returning `Any` - Fixes that `RequiresContextResult` and `RequiresContextIOResult` were not `final` - Fixes that `ImmutableStateError` was not a subclass of `AttributeError` - Fixes that `IOResult` was not showing `str` representation of wrapped `inner_value` ### Misc - Replaces `pytest-asyncio` with `anyio` plugin, now we test compatibility with any IO stack: `asyncio`, `trio`, `curio` - Updates lots of dependencies - Adds lots of new tests - Updates lots of docs - Removes "IO marker" name from docs in favor for "IO container", it is not special at all. Why would we call it differently? ## 0.13.0 ### Features - **Breaking**: renames `join` to `flatten`, sorry! - **Breaking**: renames `box` to `bind` and moves it to `returns.pointfree` - **Breaking**: removes `Maybe.rescue` and `Maybe.fix` methods - **Breaking**: renames `io_squash` to `squash_io` and moves it to `returns.converters` - **Breaking**: moves all interfaces from `returns.primitives.container` to `returns.primitives.interfaces` - Adds `rescue` pointfree function - Adds `ResultE` alias for `Result[..., Exception]` - Adds `RequiresContext` container and `Context` helper class - Adds `RequiresContext` support for `bind` pointfree function - Adds `RequiresContext` support for `flatten` function - Adds `RequiresContextResult` container - Adds `RequiresContextResultE` alias - Adds `ReaderResult` and `ReaderResultE` aliases for `RequiresContextResult[..., ..., Exception]` - Adds `RequiresContextResult` support for `bind` and `rescue` - Adds `RequiresContextResult` support for `flatten` - Adds `IOResult` helper to work better with `IO[Result[a, b]]` - Adds `IOResultE` alias for `IOResult[a, Exception]` - Adds `IOResult` support for `bind` - Adds `IOResult` support for `flatten` - Adds `IOResult` support for `@pipeline` - Adds `IOResult` support for `coalesce` - Adds `IOResult` support for `is_successful` - Adds `RequiresContextIOResult` container - Adds `RequiresContextIOResultE` alias - Adds `ReaderIOResult` and `ReaderIOResultE` aliases for `RequiresContextIOResult[..., ..., Exception]` - Adds `RequiresContextIOResult` support for `bind` and `rescue` - Adds `RequiresContextIOResult` support for `flatten` - Adds `Result.lift`, `Maybe.lift`, `RequiresContext.lift`, and `RequiresContextResult.lift` functions in addition to `IO.lift` - Adds `Immutable` primitive type - Adds `Unitable` protocol and `.from_success()` and `.from_failure()` methods for all `Result` related classes - Adds `Instanceable` protocol and `.from_value()` method for `IO` and `RequiresContext` - Adds `flow` function, which is similar to `pipe` - Adds `swap` converter for `Result` and `IOResult` - Adds `squash_context` function to squash `RequiresContext` similar to `IO` ### Bugfixes - Now `Success` and `Failure` (both `io` and pure) return `Any` and not `NoReturn` - Fixes how `flatten` works, also adds more tests and docs about `Failure` case - Fixes `Unwrappable` type being parametrized with only one `TypeVar` - Changes `Success` and `Failure` to return `Any` instead of `NoReturn` ### Misc - Updates `poetry` version in `travis` - Improves ``pipe`` docs with ``lambda`` and `Generic` problem - Improves docs in several places - Now examples in docs tries to be docstests where possible - Changes how tests are checked with `mypy` in CI ## 0.12.0 ### Features - **Breaking**: now `@pipeline` requires a container type when created: `@pipeline(Result)` or `@pipeline(Maybe)` - `Maybe` and `Result` now has `success_type` and `failure_type` aliases - Adds `Result.unify` utility method for better error type composition - We now support `dry-python/classes` as a first-class citizen - Adds `io_squash` to squash several `IO` containers into one container with a tuple inside, currently works with `9` containers max at a time - Adds `untap` function which does convert return type to `None` ### Bugfixes - Fixes that containers were not usable with `multiprocessing` - Changes the inheritance order, now `BaseContainer` is the first child - Fixes that `Nothing` had incorrect docstrings ### Misc - Now `generated` package is protected - Updates `poetry` to `1.0` ## 0.11.0 ### Features - **Breaking**: now `pipe()` does not require argument to be the first value, instead it is required to use: `pipe(f1, f2, f3, f4)(value)` - **Breaking**: dropped everything from `returns/__init__.py`, because we now have quite a lot of stuff - **Breaking**: dropped support of zero argument functions for `Nothing.fix` - **Breaking**: dropped support of zero argument functions for `Nothing.rescue` - `Maybe` now has `.failure()` to match the same API as `Result` - Adds `identity` function - Adds `tap` function - Now `pipe` allows to pipe 8 steps - Adds `coalesce_result` and `coalesce_maybe` converters ### Bugfixes - Fixes that code inside `.fix` and `.rescue` of `Maybe` might be called twice ### Misc - Now all methods have doctests - Updates docs about `Success` and `_Success`, `Failure` and `_Failure` - Updates docs about `@pipeline` - Typechecks async functions and decorators inside `typesafety/` tests ## 0.10.0 ### Features - **Breaking**: `python>=3.7,<=3.7.2` are not supported anymore, because of a bug inside `typing` module - **Breaking**: Now `bind` does not change the type of an error - **Breaking**: Now `rescue` does not change the type of a value - **Breaking**: Renames `map_failure` to `alt` - Adds `box()` function with the ability to box function for direct container composition like: `a -> Container[b]` to `Container[a] -> Container[b]` - Adds `IO.lift()` function to lift `a -> a` to `IO[a] -> IO[a]` - Adds `pipe()` function to `pipeline.py` - Adds `__hash__()` magic methods to all containers ### Bugfixes - Changes `Any` to `NoReturn` in `Success` and `Failure` - Now all type parameters in `Result`, `Maybe`, and `IO` are covariant ### Misc - Massive docs rewrite - Updates `mypy` version - Updates `wemake-python-styleguide` and introduces `nitpick` - Updates `pytest-plugin-mypy`, all tests now use `yml` ## 0.9.0 ### Features - Provides a bunch of primitive interfaces to write your own containers - Adds `.map_failure()` method - Adds `flatten()` function to join nested containers ### Bugfixes - Fixes type of `Maybe.fix` and `Maybe.rescue` to work with both `lambda: 1` and `lambda _: 1` ### Misc - Improves `README` ## 0.8.0 ### Features - Reintroduces the `Maybe` container, typed! - Introduces converters from one type to another - Adds `mypy` plugin to type decorators - Complete rewrite of `Result` types - Partial API change, now `Success` and `Failure` are not types, but functions - New internal types introduced: `FixableContainer` and `ValueUnwrapContainer` ### Bugfixes - Fixes issue when you could return `IO` container from `Result.bind` - Fixes `@pipeline` return type ### Misc - Reapplied all types to `.py` files - Improved docs about `IO` and `Container` concept - Adds docs about container composition - Moves from `Alpha` to `Beta` ## 0.7.0 ### Features - Adds `IO` container - Adds `unsafe` module with unsafe functions - Changes how functions are located inside the project ### Bugfixes - Fixes container type in `@pipeline` - Now `is_successful` is public - Now `raise_exception` is public ### Misc - Changes how `str()` function works for container types - Total rename to "container" in the source code ## Version 0.6.0 ### Features - `safe` and `pipeline` now supports `asyncio` - `is_successful` now returns `Literal` types if possible ## Version 0.5.0 ### Features - Adds `compose` helper function - Adds public API to `import returns` - Adds `raise_exception` helper function - Adds full traceback to `.unwrap()` ### Misc - Updates multiple dev-dependencies, including `mypy` - Now search in the docs is working again - Relicenses this project to `BSD` - Fixes copyright notice in the docs ## Version 0.4.0 aka Goodbye, containers! ### Features - Moves all types to `.pyi` files - Renames all classes according to new naming pattern - **HUGE** improvement of types - Renames `fmap` to `map` - Renames `do_notation` to `pipeline`, moves it to `functions.py` - Renames `ebind` to `rescue` - Renames `efmap` to `fix` - Renames `container` to `Container` - Removes `Maybe` container, since typing does not have `NonNullable` type ## Version 0.3.1 ### Bugfixes - Adds `py.typed` file to be `PEP561` compatible ## Version 0.3.0, Renamed to `returns` The project is renamed to `returns` and moved to `dry-python` org. ### Features - Adds `.pyi` files for all modules, to enable `mypy` support for 3rd party users ## Version 0.2.0 ### Features - Adds `Maybe` container - Adds immutability and `__slots__` to all containers - Adds methods to work with failures - Adds `safe` decorator to convert exceptions to `Result` container - Adds `is_successful()` function to detect if your result is a success - Adds `failure()` method to unwrap values from failed containers ### Bugfixes - Changes the type of `.bind` method for `Success` container - Changes how equality works, so now `Failure(1) != Success(1)` - Changes how new instances created on unused methods ### Misc - Improves docs ## Version 0.1.1 ### Bugfixes - Changes how `PyPI` renders package's page ### Misc - Improves `README` with new badges and installation steps ## Version 0.1.0 Initial release. Featuring only `Result` and `do_notation`. returns-0.24.0/CONTRIBUTING.md000066400000000000000000000073611472312074000155370ustar00rootroot00000000000000# How to contribute Do you have questions or issues? Join our chat! [![Telegram chat](https://img.shields.io/badge/chat-join-blue?logo=telegram)](https://t.me/drypython) ## Tutorials If you want to start working on this project, you will need to get familiar with these concepts: - http://learnyouahaskell.com/functors-applicative-functors-and-monoids - https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures - https://gcanti.github.io/fp-ts/ and https://dev.to/gcanti Here are some practical examples of what we are doing here: - https://medium.com/@rnesytov/using-Result-monad-in-python-b6eac698dff5 - https://www.morozov.is/2018/09/08/monad-laws-in-ruby.html - https://beepb00p.xyz/mypy-error-handling.html ## Dependencies We use [`poetry`](https://github.com/python-poetry/poetry) to manage the dependencies. To install them you would need to run the `install` command: ```bash poetry install ``` To activate your `virtualenv` run `poetry shell`. ## Tests We use `pytest` and `flake8` for quality control. We also use `wemake_python_styleguide` to enforce code quality. To run standard tests: ```bash poetry run pytest returns docs/pages tests ``` **NOTE:** type-safety tests not included, see section on type tests below To run linting: ```bash poetry run flake8 . ``` Keep in mind: default virtual environment folder excluded by flake8 style checking is `.venv`. If you want to customize this parameter, you should do this in `setup.cfg`. These steps are mandatory during CI. ### Type tests We also use `pytest-mypy-plugins`. Tests cases are located inside `./typesafety` If you create new types or typed functions, it is required to test their types. The type-safety tests can be run with the following: ```bash poetry run pytest typesafety ``` **NOTE:** This can take upwards of 20 minutes, only recommended to run if necessary. Here's [a helpful tutorial](https://sobolevn.me/2019/08/testing-mypy-types) if you are looking for more information. ## Type checks We use `mypy` to run type checks on our code. To use it: ```bash poetry run mypy returns tests/**/*.py ``` This step is mandatory during CI. ## Submitting your code We use [trunk based](https://trunkbaseddevelopment.com/) development (we also sometimes call it `wemake-git-flow`). What the point of this method? 1. We use protected `master` branch, so the only way to push your code is via pull request 2. We use issue branches: to implement a new feature or to fix a bug create a new branch named `issue-$TASKNUMBER` 3. Then create a pull request to `master` branch 4. We use `git tag`s to make releases, so we can track what has changed since the latest release So, this way we achieve an easy and scalable development process which frees us from merging hell and long-living branches. In this method, the latest version of the app is always in the `master` branch. ### Before submitting Before submitting your code please do the following steps: 1. Run `pytest` to make sure everything was working before 2. Add any changes you want 3. Add tests for the new changes 4. Edit documentation if you have changed something significant 5. Update `CHANGELOG.md` with a quick summary of your changes 6. Run `pytest` again to make sure it is still working 7. Run `mypy` to ensure that types are correct 8. Run `flake8` to ensure that style is correct 9. Run `slotscheck` to ensure that slots are correct ## Other help You can contribute by spreading a word about this library. It would also be a huge contribution to write a short article on how you are using this project. You can also share your best practices with us. Join in the conversation with us on our Telegram. [![Telegram chat](https://img.shields.io/badge/chat-join-blue?logo=telegram)](https://t.me/drypython) returns-0.24.0/LICENSE000066400000000000000000000024111472312074000143020ustar00rootroot00000000000000Copyright 2016-2021 dry-python organization Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. returns-0.24.0/README.md000066400000000000000000000554411472312074000145670ustar00rootroot00000000000000[![Returns logo](https://raw.githubusercontent.com/dry-python/brand/master/logo/returns_white-outline.png)](https://github.com/dry-python/returns) ----- [![Build Status](https://github.com/dry-python/returns/workflows/test/badge.svg?branch=master&event=push)](https://github.com/dry-python/returns/actions?query=workflow%3Atest) [![codecov](https://codecov.io/gh/dry-python/returns/branch/master/graph/badge.svg)](https://codecov.io/gh/dry-python/returns) [![Documentation Status](https://readthedocs.org/projects/returns/badge/?version=latest)](https://returns.readthedocs.io/en/latest/?badge=latest) [![Python Version](https://img.shields.io/pypi/pyversions/returns.svg)](https://pypi.org/project/returns/) [![conda](https://img.shields.io/conda/v/conda-forge/returns?label=conda)](https://anaconda.org/conda-forge/returns) [![wemake-python-styleguide](https://img.shields.io/badge/style-wemake-000000.svg)](https://github.com/wemake-services/wemake-python-styleguide) [![Telegram chat](https://img.shields.io/badge/chat-join-blue?logo=telegram)](https://t.me/drypython) ----- Make your functions return something meaningful, typed, and safe! ## Features - Brings functional programming to Python land - Provides a bunch of primitives to write declarative business logic - Enforces better architecture - Fully typed with annotations and checked with `mypy`, [PEP561 compatible](https://www.python.org/dev/peps/pep-0561/) - Adds emulated [Higher Kinded Types](https://returns.readthedocs.io/en/latest/pages/hkt.html) support - Provides type-safe interfaces to create your own data-types with enforced laws - Has a bunch of helpers for better composition - Pythonic and pleasant to write and to read 🐍 - Support functions and coroutines, framework agnostic - Easy to start: has lots of docs, tests, and tutorials [Quickstart](https://returns.readthedocs.io/en/latest/pages/quickstart.html) right now! ## Installation ```bash pip install returns ``` You can also install `returns` with the latest supported `mypy` version: ```bash pip install returns[compatible-mypy] ``` You would also need to configure our [`mypy` plugin](https://returns.readthedocs.io/en/latest/pages/contrib/mypy_plugins.html): ```ini # In setup.cfg or mypy.ini: [mypy] plugins = returns.contrib.mypy.returns_plugin ``` or: ```toml [tool.mypy] plugins = ["returns.contrib.mypy.returns_plugin"] ``` We also recommend to use the same `mypy` settings [we use](https://github.com/wemake-services/wemake-python-styleguide/blob/master/styles/mypy.toml). Make sure you know how to get started, [check out our docs](https://returns.readthedocs.io/en/latest/)! [Try our demo](https://repl.it/@sobolevn/returns#ex.py). ## Contents - [Maybe container](#maybe-container) that allows you to write `None`-free code - [RequiresContext container](#requirescontext-container) that allows you to use typed functional dependency injection - [Result container](#result-container) that lets you to get rid of exceptions - [IO container](#io-container) and [IOResult](#troublesome-io) that marks all impure operations and structures them - [Future container](#future-container) and [FutureResult](#async-code-without-exceptions) to work with `async` code - [Write your own container!](https://returns.readthedocs.io/en/latest/pages/create-your-own-container.html) You would still have all the features for your own types (including full existing code reuse and type-safety) - Use [`do-notation`](https://returns.readthedocs.io/en/latest/pages/do-notation.html) to make your code easier! ## Maybe container `None` is called the [worst mistake in the history of Computer Science](https://www.infoq.com/presentations/Null-References-The-Billion-Dollar-Mistake-Tony-Hoare/). So, what can we do to check for `None` in our programs? You can use builtin [Optional](https://mypy.readthedocs.io/en/stable/kinds_of_types.html#optional-types-and-the-none-type) type and write a lot of `if some is not None:` conditions. But, **having `null` checks here and there makes your code unreadable**. ```python user: Optional[User] discount_program: Optional['DiscountProgram'] = None if user is not None: balance = user.get_balance() if balance is not None: credit = balance.credit_amount() if credit is not None and credit > 0: discount_program = choose_discount(credit) ``` Or you can use [Maybe](https://returns.readthedocs.io/en/latest/pages/maybe.html) container! It consists of `Some` and `Nothing` types, representing existing state and empty (instead of `None`) state respectively. ```python from typing import Optional from returns.maybe import Maybe, maybe @maybe # decorator to convert existing Optional[int] to Maybe[int] def bad_function() -> Optional[int]: ... maybe_number: Maybe[float] = bad_function().bind_optional( lambda number: number / 2, ) # => Maybe will return Some[float] only if there's a non-None value # Otherwise, will return Nothing ``` You can be sure that `.bind_optional()` method won't be called for `Nothing`. Forget about `None`-related errors forever! We can also bind a `Optional`-returning function over a container. To achieve this, we are going to use `.bind_optional` method. And here's how your initial refactored code will look: ```python user: Optional[User] # Type hint here is optional, it only helps the reader here: discount_program: Maybe['DiscountProgram'] = Maybe.from_optional( user, ).bind_optional( # This won't be called if `user is None` lambda real_user: real_user.get_balance(), ).bind_optional( # This won't be called if `real_user.get_balance()` is None lambda balance: balance.credit_amount(), ).bind_optional( # And so on! lambda credit: choose_discount(credit) if credit > 0 else None, ) ``` Much better, isn't it? ## RequiresContext container Many developers do use some kind of dependency injection in Python. And usually it is based on the idea that there's some kind of a container and assembly process. Functional approach is much simpler! Imagine that you have a `django` based game, where you award users with points for each guessed letter in a word (unguessed letters are marked as `'.'`): ```python from django.http import HttpRequest, HttpResponse from words_app.logic import calculate_points def view(request: HttpRequest) -> HttpResponse: user_word: str = request.POST['word'] # just an example points = calculate_points(user_word) ... # later you show the result to user somehow # Somewhere in your `words_app/logic.py`: def calculate_points(word: str) -> int: guessed_letters_count = len([letter for letter in word if letter != '.']) return _award_points_for_letters(guessed_letters_count) def _award_points_for_letters(guessed: int) -> int: return 0 if guessed < 5 else guessed # minimum 6 points possible! ``` Awesome! It works, users are happy, your logic is pure and awesome. But, later you decide to make the game more fun: let's make the minimal accountable letters threshold configurable for an extra challenge. You can just do it directly: ```python def _award_points_for_letters(guessed: int, threshold: int) -> int: return 0 if guessed < threshold else guessed ``` The problem is that `_award_points_for_letters` is deeply nested. And then you have to pass `threshold` through the whole callstack, including `calculate_points` and all other functions that might be on the way. All of them will have to accept `threshold` as a parameter! This is not useful at all! Large code bases will struggle a lot from this change. Ok, you can directly use `django.settings` (or similar) in your `_award_points_for_letters` function. And **ruin your pure logic with framework specific details**. That's ugly! Or you can use `RequiresContext` container. Let's see how our code changes: ```python from django.conf import settings from django.http import HttpRequest, HttpResponse from words_app.logic import calculate_points def view(request: HttpRequest) -> HttpResponse: user_word: str = request.POST['word'] # just an example points = calculate_points(user_words)(settings) # passing the dependencies ... # later you show the result to user somehow # Somewhere in your `words_app/logic.py`: from typing import Protocol from returns.context import RequiresContext class _Deps(Protocol): # we rely on abstractions, not direct values or types WORD_THRESHOLD: int def calculate_points(word: str) -> RequiresContext[int, _Deps]: guessed_letters_count = len([letter for letter in word if letter != '.']) return _award_points_for_letters(guessed_letters_count) def _award_points_for_letters(guessed: int) -> RequiresContext[int, _Deps]: return RequiresContext( lambda deps: 0 if guessed < deps.WORD_THRESHOLD else guessed, ) ``` And now you can pass your dependencies in a really direct and explicit way. And have the type-safety to check what you pass to cover your back. Check out [RequiresContext](https://returns.readthedocs.io/en/latest/pages/context.html) docs for more. There you will learn how to make `'.'` also configurable. We also have [RequiresContextResult](https://returns.readthedocs.io/en/latest/pages/context.html#requirescontextresult-container) for context-related operations that might fail. And also [RequiresContextIOResult](https://returns.readthedocs.io/en/latest/pages/context.html#requirescontextioresult-container) and [RequiresContextFutureResult](https://returns.readthedocs.io/en/latest/pages/context.html#requirescontextfutureresult-container). ## Result container Please, make sure that you are also aware of [Railway Oriented Programming](https://fsharpforfunandprofit.com/rop/). ### Straight-forward approach Consider this code that you can find in **any** `python` project. ```python import requests def fetch_user_profile(user_id: int) -> 'UserProfile': """Fetches UserProfile dict from foreign API.""" response = requests.get('/api/users/{0}'.format(user_id)) response.raise_for_status() return response.json() ``` Seems legit, does it not? It also seems like a pretty straightforward code to test. All you need is to mock `requests.get` to return the structure you need. But, there are hidden problems in this tiny code sample that are almost impossible to spot at the first glance. ### Hidden problems Let's have a look at the exact same code, but with the all hidden problems explained. ```python import requests def fetch_user_profile(user_id: int) -> 'UserProfile': """Fetches UserProfile dict from foreign API.""" response = requests.get('/api/users/{0}'.format(user_id)) # What if we try to find user that does not exist? # Or network will go down? Or the server will return 500? # In this case the next line will fail with an exception. # We need to handle all possible errors in this function # and do not return corrupt data to consumers. response.raise_for_status() # What if we have received invalid JSON? # Next line will raise an exception! return response.json() ``` Now, all (probably all?) problems are clear. How can we be sure that this function will be safe to use inside our complex business logic? We really cannot be sure! We will have to create **lots** of `try` and `except` cases just to catch the expected exceptions. Our code will become complex and unreadable with all this mess! Or we can go with the top level `except Exception:` case to catch literally everything. And this way we would end up with catching unwanted ones. This approach can hide serious problems from us for a long time. ### Pipe example ```python import requests from returns.result import Result, safe from returns.pipeline import flow from returns.pointfree import bind def fetch_user_profile(user_id: int) -> Result['UserProfile', Exception]: """Fetches `UserProfile` TypedDict from foreign API.""" return flow( user_id, _make_request, bind(_parse_json), ) @safe def _make_request(user_id: int) -> requests.Response: # TODO: we are not yet done with this example, read more about `IO`: response = requests.get('/api/users/{0}'.format(user_id)) response.raise_for_status() return response @safe def _parse_json(response: requests.Response) -> 'UserProfile': return response.json() ``` Now we have a clean and a safe and declarative way to express our business needs: - We start from making a request, that might fail at any moment, - Then parsing the response if the request was successful, - And then return the result. Now, instead of returning regular values we return values wrapped inside a special container thanks to the [@safe](https://returns.readthedocs.io/en/latest/pages/result.html#safe) decorator. It will return [Success[YourType] or Failure[Exception]](https://returns.readthedocs.io/en/latest/pages/result.html). And will never throw exception at us! We also use [flow](https://returns.readthedocs.io/en/latest/pages/pipeline.html#flow) and [bind](https://returns.readthedocs.io/en/latest/pages/pointfree.html#bind) functions for handy and declarative composition. This way we can be sure that our code won't break in random places due to some implicit exception. Now we control all parts and are prepared for the explicit errors. We are not yet done with this example, let's continue to improve it in the next chapter. ## IO container Let's look at our example from another angle. All its functions look like regular ones: it is impossible to tell whether they are [pure](https://en.wikipedia.org/wiki/Pure_function) or impure from the first sight. It leads to a very important consequence: *we start to mix pure and impure code together*. We should not do that! When these two concepts are mixed we suffer really bad when testing or reusing it. Almost everything should be pure by default. And we should explicitly mark impure parts of the program. That's why we have created `IO` container to mark impure functions that never fail. These impure functions use `random`, current datetime, environment, or console: ```python import random import datetime as dt from returns.io import IO def get_random_number() -> IO[int]: # or use `@impure` decorator return IO(random.randint(1, 10)) # isn't pure, because random now: Callable[[], IO[dt.datetime]] = impure(dt.datetime.now) @impure def return_and_show_next_number(previous: int) -> int: next_number = previous + 1 print(next_number) # isn't pure, because does IO return next_number ``` Now we can clearly see which functions are pure and which ones are impure. This helps us a lot in building large applications, unit testing you code, and composing business logic together. ### Troublesome IO As it was already said, we use `IO` when we handle functions that do not fail. What if our function can fail and is impure? Like `requests.get()` we had earlier in our example. Then we have to use a special `IOResult` type instead of a regular `Result`. Let's find the difference: - Our `_parse_json` function always returns the same result (hopefully) for the same input: you can either parse valid `json` or fail on invalid one. That's why we return pure `Result`, there's no `IO` inside - Our `_make_request` function is impure and can fail. Try to send two similar requests with and without internet connection. The result will be different for the same input. That's why we must use `IOResult` here: it can fail and has `IO` So, in order to fulfill our requirement and separate pure code from impure one, we have to refactor our example. ### Explicit IO Let's make our [IO](https://returns.readthedocs.io/en/latest/pages/io.html) explicit! ```python import requests from returns.io import IOResult, impure_safe from returns.result import safe from returns.pipeline import flow from returns.pointfree import bind_result def fetch_user_profile(user_id: int) -> IOResult['UserProfile', Exception]: """Fetches `UserProfile` TypedDict from foreign API.""" return flow( user_id, _make_request, # before: def (Response) -> UserProfile # after safe: def (Response) -> ResultE[UserProfile] # after bind_result: def (IOResultE[Response]) -> IOResultE[UserProfile] bind_result(_parse_json), ) @impure_safe def _make_request(user_id: int) -> requests.Response: response = requests.get('/api/users/{0}'.format(user_id)) response.raise_for_status() return response @safe def _parse_json(response: requests.Response) -> 'UserProfile': return response.json() ``` And later we can use [unsafe_perform_io](https://returns.readthedocs.io/en/latest/pages/io.html#unsafe-perform-io) somewhere at the top level of our program to get the pure (or "real") value. As a result of this refactoring session, we know everything about our code: - Which parts can fail, - Which parts are impure, - How to compose them in a smart, readable, and typesafe manner. ## Future container There are several issues with `async` code in Python: 1. You cannot call `async` function from a sync one 2. Any unexpectedly thrown exception can ruin your whole event loop 3. Ugly composition with lots of `await` statements `Future` and `FutureResult` containers solve these issues! ### Mixing sync and async code The main feature of [Future](https://returns.readthedocs.io/en/latest/pages/future.html) is that it allows to run async code while maintaining sync context. Let's see an example. Let's say we have two functions, the `first` one returns a number and the `second` one increments it: ```python async def first() -> int: return 1 def second(): # How can we call `first()` from here? return first() + 1 # Boom! Don't do this. We illustrate a problem here. ``` If we try to just run `first()`, we will just create an unawaited coroutine. It won't return the value we want. But, if we would try to run `await first()`, then we would need to change `second` to be `async`. And sometimes it is not possible for various reasons. However, with `Future` we can "pretend" to call async code from sync code: ```python from returns.future import Future def second() -> Future[int]: return Future(first()).map(lambda num: num + 1) ``` Without touching our `first` async function or making `second` async we have achieved our goal. Now, our async value is incremented inside a sync function. However, `Future` still requires to be executed inside a proper eventloop: ```python import anyio # or asyncio, or any other lib # We can then pass our `Future` to any library: asyncio, trio, curio. # And use any event loop: regular, uvloop, even a custom one, etc assert anyio.run(second().awaitable) == 2 ``` As you can see `Future` allows you to work with async functions from a sync context. And to mix these two realms together. Use raw `Future` for operations that cannot fail or raise exceptions. Pretty much the same logic we had with our `IO` container. ### Async code without exceptions We have already covered how [`Result`](#result-container) works for both pure and impure code. The main idea is: we don't raise exceptions, we return them. It is **especially** critical in async code, because a single exception can ruin all our coroutines running in a single eventloop. We have a handy combination of `Future` and `Result` containers: `FutureResult`. Again, this is exactly like `IOResult`, but for impure async code. Use it when your `Future` might have problems: like HTTP requests or filesystem operations. You can easily turn any wild throwing coroutine into a calm `FutureResult`: ```python import anyio from returns.future import future_safe from returns.io import IOFailure @future_safe async def raising(): raise ValueError('Not so fast!') ioresult = anyio.run(raising.awaitable) # all `Future`s return IO containers assert ioresult == IOFailure(ValueError('Not so fast!')) # True ``` Using `FutureResult` will keep your code safe from exceptions. You can always `await` or execute inside an eventloop any `FutureResult` to get sync `IOResult` instance to work with it in a sync manner. ### Better async composition Previously, you had to do quite a lot of `await`ing while writing `async` code: ```python async def fetch_user(user_id: int) -> 'User': ... async def get_user_permissions(user: 'User') -> 'Permissions': ... async def ensure_allowed(permissions: 'Permissions') -> bool: ... async def main(user_id: int) -> bool: # Also, don't forget to handle all possible errors with `try / except`! user = await fetch_user(user_id) # We will await each time we use a coro! permissions = await get_user_permissions(user) return await ensure_allowed(permissions) ``` Some people are ok with it, but some people don't like this imperative style. The problem is that there was no choice. But now, you can do the same thing in functional style! With the help of `Future` and `FutureResult` containers: ```python import anyio from returns.future import FutureResultE, future_safe from returns.io import IOSuccess, IOFailure @future_safe async def fetch_user(user_id: int) -> 'User': ... @future_safe async def get_user_permissions(user: 'User') -> 'Permissions': ... @future_safe async def ensure_allowed(permissions: 'Permissions') -> bool: ... def main(user_id: int) -> FutureResultE[bool]: # We can now turn `main` into a sync function, it does not `await` at all. # We also don't care about exceptions anymore, they are already handled. return fetch_user(user_id).bind(get_user_permissions).bind(ensure_allowed) correct_user_id: int # has required permissions banned_user_id: int # does not have required permissions wrong_user_id: int # does not exist # We can have correct business results: assert anyio.run(main(correct_user_id).awaitable) == IOSuccess(True) assert anyio.run(main(banned_user_id).awaitable) == IOSuccess(False) # Or we can have errors along the way: assert anyio.run(main(wrong_user_id).awaitable) == IOFailure( UserDoesNotExistError(...), ) ``` Or even something really fancy: ```python from returns.pointfree import bind from returns.pipeline import flow def main(user_id: int) -> FutureResultE[bool]: return flow( fetch_user(user_id), bind(get_user_permissions), bind(ensure_allowed), ) ``` Later we can also refactor our logical functions to be sync and to return `FutureResult`. Lovely, isn't it? ## More! Want more? [Go to the docs!](https://returns.readthedocs.io) Or read these articles: - [Python exceptions considered an anti-pattern](https://sobolevn.me/2019/02/python-exceptions-considered-an-antipattern) - [Enforcing Single Responsibility Principle in Python](https://sobolevn.me/2019/03/enforcing-srp) - [Typed functional Dependency Injection in Python](https://sobolevn.me/2020/02/typed-functional-dependency-injection) - [How Async Should Have Been](https://sobolevn.me/2020/06/how-async-should-have-been) - [Higher Kinded Types in Python](https://sobolevn.me/2020/10/higher-kinded-types-in-python) - [Make Tests a Part of Your App](https://sobolevn.me/2021/02/make-tests-a-part-of-your-app) Do you have an article to submit? Feel free to open a pull request! returns-0.24.0/docs/000077500000000000000000000000001472312074000142275ustar00rootroot00000000000000returns-0.24.0/docs/Makefile000066400000000000000000000011351472312074000156670ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXPROJ = returns SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) returns-0.24.0/docs/_static/000077500000000000000000000000001472312074000156555ustar00rootroot00000000000000returns-0.24.0/docs/_static/overrides.css000066400000000000000000000003561472312074000203750ustar00rootroot00000000000000.globaltoc > p.caption { display: block; font-size: 1.05em; font-weight: 700; text-decoration: none; margin-bottom: 1em; border: 0; } /* For some reason it did not have a scroll attached. */ .mermaid { overflow: scroll; } returns-0.24.0/docs/conf.py000066400000000000000000000100571472312074000155310ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. import os import sys import tomli sys.path.insert(0, os.path.abspath('..')) # -- Project information ----------------------------------------------------- def _get_project_meta(): with open('../pyproject.toml', mode='rb') as pyproject: return tomli.load(pyproject)['tool']['poetry'] pkg_meta = _get_project_meta() project = str(pkg_meta['name']) copyright = '2019, dry-python team' # noqa: WPS125 author = 'dry-python team' # The short X.Y version version = str(pkg_meta['version']) # The full version, including alpha/beta/rc tags release = version # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'sphinx.ext.autosummary', 'sphinx.ext.napoleon', # Used to include .md files: 'myst_parser', # Used to insert typehints into the final docs: 'sphinx_autodoc_typehints', # Used to build graphs: 'sphinxcontrib.mermaid', ] autoclass_content = 'class' autodoc_member_order = 'bysource' autodoc_member_order = 'bysource' autodoc_default_options = { 'members': True, 'undoc-members': '_laws', 'exclude-members': '__dict__,__weakref__', 'show-inheritance': True, } suppress_warnings = [ 'myst.header', 'myst.xref_missing', ] # https://pypi.org/project/sphinx-autodoc-typehints/ always_document_param_types = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: source_suffix = ['.rst', '.md'] # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' add_module_names = False # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'furo' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. html_sidebars = {} # -- Extension configuration ------------------------------------------------- napoleon_numpy_docstring = False # -- Options for todo extension ---------------------------------------------- # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True returns-0.24.0/docs/index.rst000066400000000000000000000020541472312074000160710ustar00rootroot00000000000000returns ======= .. include:: ../README.md :parser: myst_parser.sphinx_ Contents -------- .. toctree:: :maxdepth: 2 pages/quickstart.rst .. toctree:: :maxdepth: 2 :caption: Userguide pages/container.rst pages/railway.rst pages/hkt.rst pages/interfaces.rst .. toctree:: :maxdepth: 2 :caption: Containers pages/maybe.rst pages/result.rst pages/io.rst pages/future.rst pages/context.rst pages/create-your-own-container.rst .. toctree:: :maxdepth: 2 :caption: Composition helpers pages/pipeline.rst pages/converters.rst pages/pointfree.rst pages/methods.rst pages/do-notation.rst pages/functions.rst pages/curry.rst pages/trampolines.rst pages/types.rst .. toctree:: :maxdepth: 2 :caption: Integration pages/development.rst pages/contrib/mypy_plugins.rst pages/contrib/pytest_plugins.rst pages/contrib/hypothesis_plugins.rst .. toctree:: :maxdepth: 1 :caption: Changelog pages/changelog.rst Indices and tables ------------------ * :ref:`genindex` * :ref:`modindex` * :ref:`search` returns-0.24.0/docs/pages/000077500000000000000000000000001472312074000153265ustar00rootroot00000000000000returns-0.24.0/docs/pages/changelog.rst000066400000000000000000000001001472312074000177760ustar00rootroot00000000000000.. include:: ../../CHANGELOG.md :parser: myst_parser.sphinx_ returns-0.24.0/docs/pages/container.rst000066400000000000000000000264471472312074000200570ustar00rootroot00000000000000Container: the concept ====================== .. currentmodule:: returns.primitives.container Container is a concept that allows you to write code around the existing wrapped values while maintaining the execution context. List of supported containers: - :class:`Maybe ` to handle ``None`` cases - :class:`Result ` to handle possible exceptions - :class:`IO ` to mark explicit ``IO`` actions - :class:`Future ` to work with ``async`` code - :class:`RequiresContext ` to pass context to your functions (DI and similar) There are also some combinations like :class:`IOResult `, :class:`FutureResult `, :class:`RequiresContextResult <.RequiresContextResult>`, :class:`RequiresContextIOResult <.RequiresContextIOResult>` and :class:`RequiresContextFutureResult <.RequiresContextFutureResult>`. We will show you container's simple API of one attribute and several simple methods. Basics ------ The main idea behind a container is that it wraps some internal state. That's what :attr:`._inner_value ` is used for. And we have several functions to create new containers based on the previous state. And we can see how this state is evolving during the execution. .. mermaid:: :caption: State evolution. graph LR F1["Container(Initial)"] --> F2["Container(UserId(1))"] F2 --> F3["Container(UserAccount(156))"] F3 --> F4["Container(FailedLoginAttempt(1))"] F4 --> F5["Container(SentNotificationId(992))"] Working with a container ------------------------ We use two methods to create a new container from the previous one. ``bind`` and ``map``. The difference is simple: - ``map`` works with functions that return regular value - ``bind`` works with functions that return new container of the same type We have :func:`returns.interfaces.mappable.MappableN.map` to compose containers with regular functions. Here's how it looks: .. mermaid:: :caption: Illustration of ``map`` method. graph LR F1["Container[A]"] -- "map(function)" --> F2["Container[B]"] style F1 fill:green style F2 fill:green .. code:: python >>> from typing import Any >>> from returns.result import Success, Result >>> def double(state: int) -> int: ... return state * 2 >>> result: Result[int, Any] = Success(1).map(double) >>> assert str(result) == '' >>> result: Result[int, Any] = result.map(lambda state: state + 1) >>> assert str(result) == '' The same works with built-in functions as well: .. code:: python >>> from returns.io import IO >>> io = IO('bytes').map(list) >>> str(io) "" The second method is ``bind``. It is a bit different. We pass a function that returns another container to it. :func:`returns.interfaces.bindable.BindableN.bind` is used to literally bind two different containers together. Here's how it looks: .. mermaid:: :caption: Illustration of ``bind`` method. graph LR F1["Container[A]"] -- "bind(function)" --> F2["Container[B]"] F1["Container[A]"] -- "bind(function)" --> F3["Container[C]"] style F1 fill:green style F2 fill:green style F3 fill:red .. code:: python from returns.result import Result, Success def may_fail(user_id: int) -> Result[float, str]: ... value: Result[int, str] = Success(1) # Can be assumed as either Success[float] or Failure[str]: result: Result[float, str] = value.bind(may_fail) .. note:: All containers support these methods. Because all containers implement :class:`returns.interfaces.mappable.MappableN` and :class:`returns.interfaces.bindable.BindableN`. You can read more about methods that some other containers support and :ref:`interfaces ` behind them. Instantiating a container ------------------------- All :class:`returns.interfaces.applicative.ApplicativeN` containers support special ``.from_value`` method to construct a new container from a raw value. .. code:: python >>> from returns.result import Result >>> assert str(Result.from_value(1)) == '' There are also other methods in other interfaces. For example, here are some of them: - :func:`returns.interfaces.specific.maybe.MaybeLikeN.from_optional` creates a value from ``Optional`` value .. code:: python >>> from returns.maybe import Maybe, Some, Nothing >>> assert Maybe.from_optional(1) == Some(1) >>> assert Maybe.from_optional(None) == Nothing - :func:`returns.interfaces.failable.DiverseFailableN.from_failure` creates a failing container from a value .. code:: python >>> from returns.result import Result, Failure >>> assert Result.from_failure(1) == Failure(1) There are many other constructors! Check out concrete types and their interfaces. Working with multiple containers -------------------------------- Multiple container arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We have already seen how we can work with one container and functions that receive a single argument. Let's say you have a function of two arguments and two containers: .. code:: python >>> def sum_two_numbers(first: int, second: int) -> int: ... return first + second And here are our two containers: .. code:: python >>> from returns.io import IO >>> one = IO(1) >>> two = IO(2) The naive approach to compose two ``IO`` containers and a function would be too hard to show here. Luckily, we support partial application and the ``.apply()`` method. Here are the required steps: 0. We make ``sum_two_numbers`` to receive :ref:`partial arguments ` 1. We create a new container that wraps ``sum_two_numbers`` function as a value 2. We then call ``.apply()`` twice to pass each value It can be done like so: .. code:: python >>> from returns.curry import curry >>> from returns.io import IO >>> @curry ... def sum_two_numbers(first: int, second: int) -> int: ... return first + second >>> one = IO(1) >>> two = IO(2) >>> assert two.apply(one.apply(IO(sum_two_numbers))) == IO(3) But, there are other ways to make ``sum_two_numbers`` partial. One can use ``partial`` as well: .. code:: python >>> from returns.curry import partial >>> one = IO(1) >>> two = IO(2) >>> assert two.apply(one.apply( ... IO(lambda x: partial(sum_two_numbers, x)), ... )) == IO(3) Or even native ``lambda`` functions: .. code:: python >>> one = IO(1) >>> two = IO(2) >>> assert two.apply(one.apply( ... IO(lambda x: lambda y: sum_two_numbers(x, y)), ... )) == IO(3) It would be faster, but not as elegant (and type-safe). Working with iterable of containers ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Imagine that you have to take 10 random numbers and then sum them to get the final result. So, here's how your code will look like: .. code:: python >>> import random >>> from returns.io import IO >>> def random_number() -> IO[int]: ... return IO(2) # Example, basically alias of ``random.randint(1, 5)`` >>> numbers = [random_number() for _ in range(10)] >>> assert len(numbers) == 10 >>> assert all(isinstance(number, IO) for number in numbers) So, how to sum these random values into a single ``IO[int]`` value? That's where :meth:`Fold.loop ` really helps! .. code:: python >>> from typing import Callable >>> from returns.iterables import Fold >>> def sum_two_numbers(first: int) -> Callable[[int], int]: ... return lambda second: first + second >>> assert Fold.loop( ... numbers, # let's loop on our ``IO`` values ... IO(0), # starting from ``0`` value ... sum_two_numbers, # and getting the sum of each two numbers in a loop ... ) == IO(20) We can also change the initial element to some other value: .. code:: python >>> assert Fold.loop( ... numbers, ... IO(5), # now we will start from ``5``, not ``0` ... sum_two_numbers, ... ) == IO(25) ``Fold.loop`` is eager. It will be executed for all items in your iterable. Collecting an iterable of containers into a single container ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ You might end up with an iterable of containers: .. code:: python >>> from typing import List >>> from returns.maybe import Maybe, Some, Nothing, maybe >>> source = {'a': 1, 'b': 2} >>> fetched_values: List[Maybe[int]] = [ ... maybe(source.get)(key) ... for key in ('a', 'b') ... ] To work with iterable of containers, it is recommended to cast it into a container with the iterable inside using the :meth:`Fold.collect ` method: .. code:: python >>> from returns.iterables import Fold >>> assert Fold.collect(fetched_values, Some(())) == Some((1, 2)) Any falsy values will result in a falsy result (pun intended): .. code:: python >>> fetched_values: List[Maybe[int]] = [ ... maybe(source.get)(key) ... for key in ('a', 'c') # 'c' is missing! ... ] >>> assert Fold.collect(fetched_values, Some(())) == Nothing You can also use a different strategy to fetch values you need, to do just that we have :meth:`Fold.collect_all ` method: .. code:: python >>> fetched_values: Maybe[int] = [ ... maybe(source.get)(key) ... for key in ('a', 'c') # 'c' is missing! ... ] >>> assert Fold.collect_all(fetched_values, Some(())) == Some((1,)) We support any ``Iterable[T]`` input type and return a ``Container[Sequence[T]]``. You can subclass ``Fold`` type to change how any of these methods work. .. _immutability: Immutability ------------ We like to think of ``returns`` as :ref:`immutable ` structures. You cannot mutate the inner state of the created container, because we redefine ``__setattr__`` and ``__delattr__`` magic methods. You cannot also set new attributes to container instances, since we are using ``__slots__`` for better performance and strictness. Well, nothing is **really** immutable in python, but you were warned. We also provide :class:`returns.primitives.types.Immutable` mixin that users can use to quickly make their classes immutable. .. _type-safety: Type safety ----------- We try to make our containers optionally type safe. What does it mean? 1. It is still good old ``python``, do whatever you want without ``mypy`` 2. If you are using ``mypy`` you will be notified about type violations We also ship `PEP561 `_ compatible ``.pyi`` files together with the source code. In this case these types will be available to users when they install our application. We also ship custom ``mypy`` plugins to overcome some existing problems, please make sure to use them, since they increase your developer experience and type-safety level: Check out our docs on using our :ref:`mypy plugins `. Further reading --------------- - :ref:`Railway oriented programming ` .. _base-interfaces: API Reference ------------- ``BaseContainer`` is a base class for all other containers. It defines some basic things like representation, hashing, pickling, etc. .. autoclasstree:: returns.primitives.container :strict: .. automodule:: returns.primitives.container :members: :special-members: returns-0.24.0/docs/pages/context.rst000066400000000000000000000623411472312074000175520ustar00rootroot00000000000000Context ======= Dependency injection is a popular software architecture pattern. It's main idea is that you provide `Inversion of Control `_ and can pass different things into your logic instead of hardcoding you stuff. And by doing this you are on your way to achieve `Single Responsibility `_ for your functions and objects. Using the context ----------------- A lot of programs we write rely on the context implicitly or explicitly. We can rely on configuration, env variables, stubs, logical dependencies, etc. Let's look at the example. Simple app ~~~~~~~~~~ One of the most popular errors Python developers do in ``Django`` is that they overuse ``settings`` object inside the business logic. This makes your logic framework-oriented and hard to reason about in large projects. Because values just pop out of nowhere in a deeply nested functions. And can be changed from the outside, from the context of your app. Imagine that you have a ``django`` based game, where you award users with points for each guessed letter in a word (unguessed letters are marked as ``'.'``): .. code:: python from django.http import HttpRequest, HttpResponse from words_app.logic import calculate_points def view(request: HttpRequest) -> HttpResponse: user_word: str = request.POST['word'] # just an example points = calculate_points(user_word) ... # later you show the result to user somehow .. code:: python # Somewhere in your `words_app/logic.py`: def calculate_points(word: str) -> int: guessed_letters_count = len([letter for letter in word if letter != '.']) return _award_points_for_letters(guessed_letters_count) def _award_points_for_letters(guessed: int) -> int: return 0 if guessed < 5 else guessed # minimum 6 points possible! Straight and simple! Adding configuration ~~~~~~~~~~~~~~~~~~~~ But, later you decide to make the game more fun: let's make the minimal accountable letters threshold configurable for an extra challenge. You can just do it directly: .. code:: python def _award_points_for_letters(guessed: int, threshold: int) -> int: return 0 if guessed < threshold else guessed And now your code won't simply type-check. Because that's how our caller looks like: .. code:: python def calculate_points(word: str) -> int: guessed_letters_count = len([letter for letter in word if letter != '.']) return _award_points_for_letters(guessed_letters_count) To fix this ``calculate_points`` function (and all other upper caller functions) will have to accept ``threshold: int`` as a parameter and pass it to ``_award_points_for_letters``. Imagine that your large project has multiple things to configure in multiple functions. What a mess it would be! Ok, you can directly use ``django.settings`` (or similar) in your ``_award_points_for_letters`` function. And ruin your pure logic with framework-specific details. That's ugly! Explicitly relying on context ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We have learned that this tiny change showed us that it is not so easy to rely on implicit app context. And instead of passing parameters for all callstack or using dirty framework specific magic you can use ``RequiresContext`` container. That was built just for this case. Let's see how our code changes: .. code:: python from django.conf import settings from django.http import HttpRequest, HttpResponse from words_app.logic import calculate_points def view(request: HttpRequest) -> HttpResponse: user_word: str = request.POST['word'] # just an example points = calculate_points(user_words)(settings) # passing the dependencies ... # later you show the result to user somehow .. code:: python # Somewhere in your `words_app/logic.py`: from typing import Protocol from returns.context import RequiresContext class _Deps(Protocol): # we rely on abstractions, not direct values or types WORD_THRESHOLD: int def calculate_points(word: str) -> RequiresContext[int, _Deps]: guessed_letters_count = len([letter for letter in word if letter != '.']) return _award_points_for_letters(guessed_letters_count) def _award_points_for_letters(guessed: int) -> RequiresContext[int, _Deps]: return RequiresContext( lambda deps: 0 if guessed < deps.WORD_THRESHOLD else guessed, ) And now you can pass your dependencies in a really direct and explicit way. .. _ask: ask ~~~ Let's try to configure how we mark our unguessed letters (previously unguessed letters were marked as ``'.'``). Let's say, we want to change this to be ``_``. How can we do that with our existing function? .. code:: python def calculate_points(word: str) -> RequiresContext[int, _Deps]: guessed_letters_count = len([letter for letter in word if letter != '.']) return _award_points_for_letters(guessed_letters_count) We are already using ``RequiresContext``, but its dependencies are just hidden from us! We have a special helper for this case: ``.ask()``, which returns us current dependencies. The only thing we need to is to properly annotate the type for our case: ``RequiresContext[int, _Deps].ask()`` Sadly, currently ``mypy`` is not able to infer the dependency type out of the context and we need to explicitly provide it. Let's see the final result: .. code:: python from returns.context import RequiresContext class _Deps(Protocol): # we rely on abstractions, not direct values or types WORD_THRESHOLD: int UNGUESSED_CHAR: str def calculate_points(word: str) -> RequiresContext[int, _Deps]: def factory(deps: _Deps) -> RequiresContext[int, _Deps]: guessed_letters_count = len([ letter for letter in word if letter != deps.UNGUESSED_CHAR ]) return _award_points_for_letters(guessed_letters_count) return RequiresContext[int, _Deps].ask().bind(factory) And now we access the current context from any place in our callstack. Isn't it convenient? .. warning:: ``RequiresContext`` and similar types are not recursion safe. If you would have nesting of more than ``sys.getrecursionlimit()`` you will end up with ``RecursionError``. Will this ever happen to you? Probably not. RequiresContext container ------------------------- The concept behind :class:`~returns.context.requires_context.RequiresContext` container is really simple. It is a container around ``Callable[[EnvType], ReturnType]`` function. By its definition it works with pure functions that never fails. It can be illustrated as a simple nested function: .. code:: python >>> from typing import Callable >>> def first(limit: int) -> Callable[[str], bool]: ... def inner(deps: str) -> bool: ... return len(deps) > limit ... return inner >>> assert first(2)('abc') # first(limit)(deps) >>> assert not first(5)('abc') # first(limit)(deps) That's basically enough to make dependency injection possible. But how would you compose ``first`` function? Let's say with the following function: .. code:: python >>> def bool_to_str(arg: bool) -> str: ... return 'ok' if arg else 'nope' It would be hard, knowing that it returns another function to be called later when the context is known. We can wrap it in ``RequiresContext`` container to allow better composition! .. code:: python >>> from returns.context import RequiresContext >>> def first(limit: int) -> RequiresContext[bool, str]: ... def inner(deps: str) -> bool: ... return len(deps) > limit ... return RequiresContext(inner) # wrapping function here! >>> assert first(1).map(bool_to_str)('abc') == 'ok' >>> assert first(5).map(bool_to_str)('abc') == 'nope' There's how execution flows: .. mermaid:: :caption: RequiresContext execution flow. graph LR F1["first(1)"] --> F2["RequiresContext(inner)"] F2 --> F3 F3["container('abc')"] --> F4["True"] F4 --> F5 F5["bool_to_str(True)"] --> F6["'ok'"] The rule is: the dependencies are injected at the very last moment in time. And then normal logical execution happens. RequiresContextResult container ------------------------------- .. currentmodule:: returns.context.requires_context_result :class:`~RequiresContextResult` container is a combination of ``RequiresContext[Result[a, b], env]``. Which means that it is a wrapper around pure function that might fail. We also added a lot of useful methods for this container, so you can work easily with it: - :meth:`~RequiresContextResult.bind_result` allows to bind functions that return ``Result`` with just one call - :meth:`~RequiresContextResult.bind_context` allows to bind functions that return ``RequiresContext`` easily - There are also several useful constructors from any possible type Use it when you work with pure context-related functions that might fail. RequiresContextIOResult container --------------------------------- .. currentmodule:: returns.context.requires_context_ioresult :class:`~RequiresContextIOResult` container is a combination of ``RequiresContext[IOResult[a, b], env]``. Which means that it is a wrapper around impure function that might fail. We also added a lot of useful methods for this container, so you can work easily with it: - :meth:`~RequiresContextIOResult.bind_result` allows to bind functions that return ``Result`` with just one call - :meth:`~RequiresContextIOResult.bind_io` allows to bind functions that return ``IO`` with just one call - :meth:`~RequiresContextIOResult.bind_ioresult` allows to bind functions that return ``IOResult`` with just one call - :meth:`~RequiresContextIOResult.bind_context` allows to bind functions that return ``RequiresContext`` easily - :meth:`~RequiresContextIOResult.bind_context_result` allows to bind functions that return ``RequiresContextResult`` easily - There are also several useful constructors from any possible type Use it when you work with impure context-related functions that might fail. This is basically **the main type** that is going to be used in most apps. .. _requires_context_future_result: RequiresContextFutureResult container ------------------------------------- .. currentmodule:: returns.context.requires_context_future_result :class:`~RequiresContextFutureResult` container is a combination of ``RequiresContext[FutureResult[a, b], env]``. Which means that it is a wrapper around impure async function that might fail. Here's how it should be used: .. literalinclude:: ../../tests/test_examples/test_context/test_reader_future_result.py :linenos: This example illustrates the whole point of our actions: writing sync code that executes asynchronously without any magic at all! We also added a lot of useful methods for this container, so you can work easily with it. These methods are identical with ``RequiresContextIOResult``: - :meth:`~RequiresContextFutureResult.bind_result` allows to bind functions that return ``Result`` with just one call - :meth:`~RequiresContextFutureResult.bind_io` allows to bind functions that return ``IO`` with just one call - :meth:`~RequiresContextFutureResult.bind_ioresult` allows to bind functions that return ``IOResult`` with just one call - :meth:`~RequiresContextFutureResult.bind_future_result` allows to bind functions that return ``FutureResult`` with just one call - :meth:`~RequiresContextFutureResult.bind_context` allows to bind functions that return ``RequiresContext`` easily - :meth:`~RequiresContextFutureResult.bind_context_result` allows to bind functions that return ``RequiresContextResult`` easily There are new ones: - :meth:`~RequiresContextFutureResult.bind_future` allows to bind functions that return ``Future`` container - :meth:`~RequiresContextFutureResult.bind_future_result` allows to bind functions that return ``FutureResult`` container - :meth:`~RequiresContextFutureResult.bind_async_future` allows to bind async functions that return ``Future`` container - :meth:`~RequiresContextFutureResult.bind_async_future_result` allows to bind async functions that return ``FutureResult`` container - :meth:`~RequiresContextFutureResult.bind_context_ioresult` allows to bind functions that return ``RequiresContextIOResult`` - :meth:`~RequiresContextFutureResult.bind_async` allows to bind async functions that return ``RequiresContextFutureResult`` container - :meth:`~RequiresContextFutureResult.bind_awaitable` allows to bind async function that return raw values Use it when you work with impure context-related functions that might fail. This is basically **the main type** that is going to be used in most apps. Aliases ------- There are several useful aliases for ``RequiresContext`` and friends with some common values: .. currentmodule:: returns.context.requires_context - :attr:`~Reader` is an alias for ``RequiresContext[...]`` to save you some typing. Uses ``Reader`` because it is a native name for this concept from Haskell. .. currentmodule:: returns.context.requires_context_result - :attr:`~RequiresContextResultE` is an alias for ``RequiresContextResult[..., Exception]``, just use it when you want to work with ``RequiresContextResult`` containers that use exceptions as error type. It is named ``ResultE`` because it is ``ResultException`` and ``ResultError`` at the same time. - :attr:`~ReaderResult` is an alias for ``RequiresContextResult[...]`` to save you some typing. - :attr:`~ReaderResultE` is an alias for ``RequiresContextResult[..., Exception]`` .. currentmodule:: returns.context.requires_context_ioresult - :attr:`~RequiresContextIOResultE` is an alias for ``RequiresContextIOResult[..., Exception]`` - :attr:`~ReaderIOResult` is an alias for ``RequiresContextIOResult[...]`` to save you some typing. - :attr:`~ReaderIOResultE` is an alias for ``RequiresContextIOResult[..., Exception]`` .. currentmodule:: returns.context.requires_context_future_result - :attr:`~RequiresContextFutureResultE` is an alias for ``RequiresContextFutureResult[..., Exception]`` - :attr:`~ReaderFutureResult` is an alias for ``RequiresContextFutureResult[...]`` to save you some typing. - :attr:`~ReaderFutureResultE` is an alias for ``RequiresContextFutureResult[..., Exception]`` FAQ --- How to create unit objects? ~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``RequiresContext`` requires you to use one of the following methods: - ``from_value`` when you have a raw value - ``from_requires_context_result`` when you have ``RequiresContextResult`` - ``from_requires_context_ioresult`` when you have ``RequiresContextIOResult`` ``RequiresContextResult`` requires you to use one of the following methods: - ``from_value`` when you want to mark some raw value as a ``Success`` - ``from_failure`` when you want to mark some raw value as a ``Failure`` - ``from_result`` when you already have ``Result`` container - ``from_context`` when you have successful ``RequiresContext`` - ``from_failed_context`` when you have failed ``RequiresContext`` - ``from_typecast`` when you have ``RequiresContext[..., Result]`` ``RequiresContextIOResult`` requires you to use one of the following methods: - ``from_value`` when you want to mark some raw value as a ``Success`` - ``from_failure`` when you want to mark some raw value as a ``Failure`` - ``from_result`` when you already have ``Result`` container - ``from_io`` when you have successful ``IO`` container - ``from_failed_io`` when you have failed ``IO`` container - ``from_ioresult`` when you already have ``IOResult`` container - ``from_context`` when you have successful ``RequiresContext`` container - ``from_failed_context`` when you have failed ``RequiresContext`` container - ``from_result_context`` when you have ``RequiresContextResult`` container - ``from_typecast`` when you have ``RequiresContext[..., IOResult]`` ``RequiresContextFutureResult`` requires you to use one of the following methods: - ``from_value`` when you want to mark some raw value as a ``Success`` - ``from_failure`` when you want to mark some raw value as a ``Failure`` - ``from_result`` when you already have ``Result`` container - ``from_io`` when you have successful ``IO`` container - ``from_failed_io`` when you have failed ``IO`` container - ``from_ioresult`` when you already have ``IOResult`` container - ``from_future`` when you already have successful ``Future`` container - ``from_failed_future`` when you already have failed ``Future`` container - ``from_future_result`` when you already have ``FutureResult`` container - ``from_context`` when you have successful ``RequiresContext`` - ``from_failed_context`` when you have failed ``RequiresContext`` - ``from_result_context`` when you have ``RequiresContextResult`` container - ``from_ioresult_context`` when you have ``RequiresContextIOResult`` container - ``from_typecast`` when you have ``RequiresContext[..., IOResult]`` How can I access dependencies inside the context? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Use ``.ask()`` method! See :ref:`this guide `. RequiresContext looks like a decorator with arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Yes, this container might remind a traditional decorator with arguments, let see an example: .. code:: python >>> def example(print_result: bool): ... def decorator(function): ... def factory(*args, **kwargs): ... original = function(*args, **kwargs) ... if print_result: ... print(original) ... return original ... return factory ... return decorator And it can be used like so: .. code:: python >>> @example(print_result=True) ... def my_function(first: int, second: int) -> int: ... return first + second >>> assert my_function(2, 3) == 5 5 We can model the similar idea with ``RequiresContext``: .. code:: python >>> from returns.context import RequiresContext >>> def my_function(first: int, second: int) -> RequiresContext[int, bool]: ... def factory(print_result: bool) -> int: ... original = first + second ... if print_result: ... print(original) ... return original ... return RequiresContext(factory) >>> assert my_function(2, 3)(False) == 5 >>> assert my_function(2, 3)(True) == 5 5 As you can see, it is easier to change the behaviour of a function with ``RequiresContext``. While decorator with arguments glues values to a function forever. Decide when you need which behaviour carefully. Why can’t we use RequiresContext[Result, e] instead of RequiresContextResult? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We actually can! But, it is harder to write. And ``RequiresContextResult`` is actually the very same thing as ``RequiresContext[Result, e]``, but has nicer API: .. code:: python x: RequiresContext[Result[int, str], int] x.map(lambda result: result.map(lambda number: number + 1)) # Is the same as: y: RequiresContextResult[int, str, int] y.map(lambda number: number + 1) The second one looks better, doesn't it? The same applies for ``RequiresContextIOResult`` and ``RequiresContextFutureResult`` as well. Why do I have to use explicit type annotation for ask method? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Because ``mypy`` cannot possibly know the type of current context. This is hard even for a plugin. So, using this technique is better: .. code:: python from returns.context import RequiresContext def some_context(*args, **kwargs) -> RequiresContext[str, int]: def factory(deps: int) -> RequiresContext[str, int]: ... return RequiresContext[str, int].ask().bind(factory) What is the difference between DI and RequiresContext? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Dependency Injection pattern and `Inversion of Control `_ principle forms a lot of ideas and tooling that do pretty much the same as ``RequiresContext`` container. What is the difference? Why do we need each of them? Let's find out! Tools like `dependencies `_ or `punq `_ tries to: 1. Inspect (by name or type respectively) function or class that needs dependencies 2. Build the required dependency tree from the source defined in the service container There are other tools like ``inject`` that also invades your code with ``@inject`` decorator. ``RequiresContext`` works completely different. It respects your code and does not try to inspect in any manner. It also does not care about building dependencies at all. All it does is: provides simple API to compose functions that need additional context (or dependencies) to run. You can even use them together: ``RequiresContext`` will pass dependencies built by ``punq`` (or any other tool of your choice) as a ``deps`` parameter to ``RequiresContext`` instance. When to use which? Let's dig into it! - ``RequiresContext`` offers explicit context passing for the whole function stack inside your program. This means two things: you will have to pass it through all your code and use it everywhere inside your program explicitly, when you need to access the environment and dependencies - Traditional ``DI`` allows to leave a lot of code unaware of dependency injection. Because you don't have to maintain the context everywhere. You just need to adjust your API to meet the dependency injector requirements. On the other hand, you lose explicitness here. So when to use ``RequiresContext``? 1. When you write pure functional code 2. When you want to know which code relies on context and which is free from it, ``RequiresContext`` makes this explicit and typed 3. When you rely on types inside your program 4. When you want to rely on functions rather than magic When not to use ``RequiresContext`` and use traditional DI? 1. When you already have a lot of code written in a different approach: in OOP and/or imperative styles 2. When you need to pass dependencies into a very deep level of your call stack implicitly (without modifying the whole stack), this is called magic 3. When you not rely on types for dependencies. There are cases when DI is made by names or tags Here's an example that might give you a better understanding of how ``RequiresContext`` is used on real and rather big projects: .. code:: python from typing import Callable, Dict, Protocol, final from returns.io import IOResultE from returns.context import ReaderIOResultE class _SyncPermissionsDeps(Protocol): fetch_metadata: Callable[[], IOResultE['Metadata']] get_user_permissions: Callable[['Metadata'], Dict[int, str]] # pure update_bi_permissions: Callable[[Dict[int, str]], IOResultE['Payload']] def sync_permissions() -> ReaderIOResultE[_SyncPermissionsDeps, 'Payload']: """ This functions runs a scheduled task once a day. It syncs permissions from the metadata storage to our BI system. """ def factory(deps: _SyncPermissionsDeps) -> IOResultE['Payload']: return deps.fetch_metadata().map( deps.get_user_permissions, ).bind_ioresult( deps.update_bi_permissions, ) return ReaderIOResult(factory) And then it is called like so: .. code:: python # tasks.py from celery import shared_task from returns.functions import raise_exception from logic.usecases.sync_permissions import sync_permissions from infrastructure.implemented import Container from infrastructure.services import bi from infrastructure.repositories import db @shared_task(autoretry_for=(ConnectionError,), max_retries=3) def queue_sync_permissions(): # Building the container with dependencies to pass it into the context. # We also make sure that we don't forget to raise internal exceptions # and trigger celery retries. return sync_permissions().alt(raise_exception)(Container( fetch_metadata=db.select_user_metadata, get_user_permissions=bi.permissions_from_user, update_bi_permissions=bi.put_user_permissions, )) Further reading --------------- - `Enforcing Single Responsibility Principle in Python `_ - `Typed functional Dependency Injection in Python `_ - `Three-Useful-Monads: Reader `_ - `Getting started with fp-ts: Reader `_ - `Reader & Constructor-based Dependency Injection in Scala - friend or foe? `_ API Reference ------------- RequiresContext ~~~~~~~~~~~~~~~ .. autoclasstree:: returns.context.requires_context :strict: .. automodule:: returns.context.requires_context :members: RequiresContextResult ~~~~~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.context.requires_context_result :strict: .. automodule:: returns.context.requires_context_result :members: RequiresContextIOResult ~~~~~~~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.context.requires_context_ioresult :strict: .. automodule:: returns.context.requires_context_ioresult :members: RequiresContextFutureResult ~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.context.requires_context_future_result :strict: .. automodule:: returns.context.requires_context_future_result :members: returns-0.24.0/docs/pages/contrib/000077500000000000000000000000001472312074000167665ustar00rootroot00000000000000returns-0.24.0/docs/pages/contrib/hypothesis_plugins.rst000066400000000000000000000112421472312074000234600ustar00rootroot00000000000000.. _hypothesis-plugins: hypothesis plugin ================= We provide several extra features for Hypothesis users. And encourage to use it together with ``returns``. Installation ------------ You will need to install ``hypothesis`` separately. It is not bundled with ``returns``. We also require ``anyio`` package for this plugin to work with async laws. hypothesis entrypoint --------------------- We support a ``hypothesis`` entrypoint that is executed on ``hypothesis`` import. There we are registering all our containers as strategies. So, you don't have to. Example: .. code:: python from returns.result import Result from hypothesis import strategies as st assert st.from_type(Result).example() This is a convenience thing only. strategy_from_container ----------------------- We provide a utility function to create ``hypothesis`` strategy from any container. You can use it to easily register your own containers. .. code:: python from hypothesis import strategies as st from returns.contrib.hypothesis.containers import strategy_from_container st.register_type_strategy( YourContainerClass, strategy_from_container(YourContainerClass), ) You can also pass ``use_init`` keyword argument if you wish to use ``__init__`` method to instantiate your containers. Turned off by default. Example: .. code:: python st.register_type_strategy( YourContainerClass, strategy_from_container(YourContainerClass, use_init=True), ) Or you can write your own ``hypothesis`` strategy. It is also fine. check_all_laws -------------- We also provide a very powerful mechanism of checking defined container laws. It works in a combination with "Laws as Values" feature we provide in the core. .. code:: python from returns.contrib.hypothesis.laws import check_all_laws from your.module import YourCustomContainer check_all_laws(YourCustomContainer) This one line of code will generate ~100 tests for all defined law in both ``YourCustomContainer`` and all its super types, including our internal ones. We also provide a way to configure the checking process with ``settings_kwargs``: .. code:: python check_all_laws(YourCustomContainer, settings_kwargs={'max_examples': 500}) This will increase the number of generated test to 500. We support all kwargs from ``@settings``, see `@settings docs `_. You can also change how ``hypothesis`` creates instances of your container. By default, we use ``.from_value``, ``.from_optional``, and ``.from_failure`` if we are able to find them. But, you can also pass types without these methods, but with ``__init__`` defined: .. code:: python from typing import Callable, TypeVar, final from returns.interfaces.mappable import Mappable1 from returns.primitives.container import BaseContainer from returns.primitives.hkt import SupportsKind1 _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') @final class Number( BaseContainer, SupportsKind1['Number', _ValueType], Mappable1[_ValueType], ): def __init__(self, inner_value: _ValueType) -> None: super().__init__(inner_value) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> 'Number[_NewValueType]': return Number(function(self._inner_value)) # We want to allow ``__init__`` method to be used: check_all_laws(Number, use_init=True) As you see, we don't support any ``from`` methods here. But, ``__init__`` would be used to generate values thanks to ``use_init=True``. By default, we don't allow to use ``__init__``, because there are different complex types like ``Future``, ``ReaderFutureResult``, etc that have complex ``__init__`` signatures. And we don't want to mess with them. Warning:: Checking laws is not compatible with ``pytest-xdist``, because we use a lot of global mutable state there. Please, use ``returns_lawful`` marker to exclude them from ``pytest-xdist`` execution plan. Further reading --------------- - `Projects Extending hypothesis `_ API Reference ------------- Types we have already registered for you ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. automodule:: returns.contrib.hypothesis._entrypoint :members: DSL to register custom containers ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. automodule:: returns.contrib.hypothesis.containers :members: DSL to define laws ~~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.primitives.laws :strict: .. automodule:: returns.primitives.laws :members: Plugin internals ~~~~~~~~~~~~~~~~ .. automodule:: returns.contrib.hypothesis.laws :members: returns-0.24.0/docs/pages/contrib/mypy_plugins.rst000066400000000000000000000056701472312074000222670ustar00rootroot00000000000000.. _mypy-plugins: mypy plugin =========== We provide a custom ``mypy`` plugin to fix existing issues, provide new awesome features, and improve type-safety of things developers commonly use. Installation ------------ ``returns`` has ``[compatible-mypy]`` extra to install the supported version. .. code:: bash pip install 'returns[compatible-mypy]' Or you can install ``mypy`` separately and check that version is supported. Enabling our mypy plugin ------------------------ To install our ``mypy`` plugin add it to the ``plugins`` section of the config file (``setup.cfg`` or ``mypy.ini``): .. code:: ini [mypy] plugins = returns.contrib.mypy.returns_plugin Or in ``pyproject.toml``: .. code:: toml [tool.mypy] plugins = ["returns.contrib.mypy.returns_plugin"] We recommend to always add our plugin as the first one in chain. Configuration ------------- You can have a look at the suggested ``mypy`` `configuration `_ in our own repository. You can also use `nitpick `_ tool to enforce the same ``mypy`` configuration for all your projects. We recommend to use our own setup. Add this to your ``pyproject.toml``: .. code:: toml [tool.nitpick] style = "https://raw.githubusercontent.com/wemake-services/wemake-python-styleguide/master/styles/mypy.toml" And use ``flake8`` to lint that configuration defined in the setup matches yours. This will allow to keep them in sync with the upstream. Supported features ------------------ - ``kind`` feature adds Higher Kinded Types (HKT) support - ``curry`` feature allows to write typed curried functions - ``partial`` feature allows to write typed partial application - ``flow`` feature allows to write better typed functional pipelines with ``flow`` function - ``pipe`` feature allows to write better typed functional pipelines with ``pipe`` function - ``do-notation`` feature allows using :ref:`do-notation` Further reading --------------- - `Mypy official docs `_ - `Mypy plugins docs `_ API Reference ------------- Plugin definition ~~~~~~~~~~~~~~~~~ .. automodule:: returns.contrib.mypy._consts :members: .. autoclasstree:: returns.contrib.mypy.returns_plugin :strict: .. automodule:: returns.contrib.mypy.returns_plugin :members: Kind ~~~~ .. automodule:: returns.contrib.mypy._features.kind :members: Curry ~~~~~ .. automodule:: returns.contrib.mypy._features.curry :members: Partial ~~~~~~~ .. automodule:: returns.contrib.mypy._features.partial :members: Flow ~~~~ .. automodule:: returns.contrib.mypy._features.flow :members: Pipe ~~~~ .. automodule:: returns.contrib.mypy._features.pipe :members: Do notation ~~~~~~~~~~~ .. automodule:: returns.contrib.mypy._features.do_notation :members: returns-0.24.0/docs/pages/contrib/pytest_plugins.rst000066400000000000000000000114631472312074000226160ustar00rootroot00000000000000.. _pytest-plugins: pytest plugin ============= We use special ``pytest`` plugin to improve the testing side of this project. For example: it is a popular request to ensure that your container does have its error pass handled. Because otherwise, developers might forget to do it properly. It is impossible to fix with types, but is really simple to check with tests. Installation ------------ You will need to install ``pytest`` separately. Usage ----- There's no need to install anything special. ``pytest`` will automatically find and use this plugin. To use it in your tests, request ``returns`` fixture like so: .. code:: python def test_my_container(returns): ... assert_equal ~~~~~~~~~~~~ We have a special helper to compare containers' equality. It might be an easy task for two ``Result`` or ``Maybe`` containers, but it is not very easy for two ``ReaderResult`` or ``FutureResult`` instances. Take a look: .. code:: python >>> from returns.result import Result >>> from returns.context import Reader >>> assert Result.from_value(1) == Result.from_value(1) >>> Reader.from_value(1) == Reader.from_value(1) False So, we can use :func:`~returns.primitives.asserts.assert_equal` or ``returns.assert_equal`` method from our ``pytest`` fixture: .. code:: python >>> from returns.result import Success >>> from returns.context import Reader >>> from returns.contrib.pytest import ReturnsAsserts >>> def test_container_equality(returns: ReturnsAsserts): ... returns.assert_equal(Success(1), Success(1)) ... returns.assert_equal(Reader.from_value(1), Reader.from_value(1)) >>> # We only run these tests manually, because it is a doc example: >>> returns_fixture = getfixture('returns') >>> test_container_equality(returns_fixture) is_error_handled ~~~~~~~~~~~~~~~~ The first helper we define is ``is_error_handled`` function. It tests that containers do handle error track. .. code:: python >>> from returns.result import Failure, Success >>> from returns.contrib.pytest import ReturnsAsserts >>> def test_error_handled(returns: ReturnsAsserts): ... assert not returns.is_error_handled(Failure(1)) ... assert returns.is_error_handled( ... Failure(1).lash(lambda _: Success('default value')), ... ) >>> # We only run these tests manually, because it is a doc example: >>> returns_fixture = getfixture('returns') >>> test_error_handled(returns_fixture) We recommend to unit test big chunks of code this way. This is helpful for big pipelines where you need at least one error handling at the very end. This is how it works internally: - Methods like ``fix`` and ``lash`` mark errors inside the container as handled - Methods like ``map`` and ``alt`` just copies the error handling state from the old container to a new one, so there's no need to re-handle the error after these methods - Methods like ``bind`` create new containers with unhandled errors .. note:: We use monkeypathing of containers inside tests to make this check possible. They are still purely functional inside. It does not affect production code. assert_trace ~~~~~~~~~~~~ Sometimes we have to know if a container is created correctly in a specific point of our flow. ``assert_trace`` helps us to check exactly this by identifying when a container is created and looking for the desired function. .. code:: python >>> from returns.result import Result, Success, Failure >>> from returns.contrib.pytest import ReturnsAsserts >>> def desired_function(arg: str) -> Result[int, str]: ... if arg.isnumeric(): ... return Success(int(arg)) ... return Failure('"{0}" is not a number'.format(arg)) >>> def test_if_failure_is_created_at_convert_function( ... returns: ReturnsAsserts, ... ): ... with returns.assert_trace(Failure, desired_function): ... Success('not a number').bind(desired_function) >>> def test_if_success_is_created_at_convert_function( ... returns: ReturnsAsserts, ... ): ... with returns.assert_trace(Success, desired_function): ... Success('42').bind(desired_function) >>> # We only run these tests manually, because it is a doc example: >>> returns_fixture = getfixture('returns') >>> test_if_failure_is_created_at_convert_function(returns_fixture) >>> test_if_success_is_created_at_convert_function(returns_fixture) markers ~~~~~~~ We also ship a bunch of pre-defined markers with ``returns``: - ``returns_lawful`` is used to mark all tests generated by our :ref:`hypothesis-plugins` Further reading --------------- - `pytest docs `_ API Reference ------------- .. autoclasstree:: returns.contrib.pytest.plugin :strict: .. automodule:: returns.contrib.pytest.plugin :members: .. automodule:: returns.primitives.asserts :members: returns-0.24.0/docs/pages/converters.rst000066400000000000000000000031251472312074000202530ustar00rootroot00000000000000.. _converters: Converters ========== We have several helpers to convert containers from one type to another and back again. Maybe and Result ---------------- We have two converters to work with ``Result <-> Maybe`` transformations: .. currentmodule:: returns.converters - :func:`~.maybe_to_result` that converts ``Maybe`` to ``Result`` - :func:`~.result_to_maybe` that converts ``Result`` to ``Maybe`` That's how they work: .. code:: python >>> from returns.converters import maybe_to_result, result_to_maybe >>> from returns.maybe import Maybe, Some, Nothing >>> from returns.result import Failure, Result, Success >>> result: Result[int, Exception] = Success(1) >>> maybe: Maybe[int] = result_to_maybe(result) >>> assert maybe == Some(1) >>> new_result: Result[int, None] = maybe_to_result(maybe) >>> assert new_result == Success(1) >>> failure_with_default: Result[int, str] = maybe_to_result(Nothing, 'abc') >>> assert failure_with_default == Failure('abc') Take a note, that type changes. Also, take a note that ``Success(None)`` will be converted to ``Nothing``. flatten ------- You can also use :func:`flatten ` to merge nested containers together: .. code:: python >>> from returns.converters import flatten >>> from returns.maybe import Some >>> from returns.result import Success >>> from returns.io import IO >>> assert flatten(IO(IO(1))) == IO(1) >>> assert flatten(Some(Some(1))) == Some(1) >>> assert flatten(Success(Success(1))) == Success(1) API Reference ------------- .. automodule:: returns.converters :members: returns-0.24.0/docs/pages/create-your-own-container.rst000066400000000000000000000221751472312074000231070ustar00rootroot00000000000000.. _create-your-own-container: Create your own container ========================= This tutorial will guide you through the process of creating your own containers. Step 0: Motivation ------------------ First things first, why would anyone want to create a custom containers? The great idea about "containers" in functional programming is that it can be literally anything. There are endless use-cases. You can create your own primitives for working with some language-or-framework specific problem, or just model your business domain. You can copy ideas from other languages or just compose existing containers for better usability (like ``IOResult`` is the composition of ``IO`` and ``Result``). .. rubric:: Example We are going to implement a ``Pair`` container for this example. What is a ``Pair``? Well, it is literally a pair of two values. No more, no less. Similar to a ``Tuple[FirstType, SecondType]``. But with extra goodies. .. note:: You can find all `code samples here `_. Step 1: Choosing right interfaces --------------------------------- After you came up with the idea, you will need to make a decision: what capabilities my container must have? Basically, you should decide what :ref:`interfaces` you will subtype and what methods and laws will be present in your type. You can create just a :class:`returns.interfaces.mappable.MappableN` or choose a full featured :class:`returns.interfaces.container.ContainerN`. You can also choose some specific interfaces to use, like :class:`returns.interfaces.specific.result.ResultLikeN` or any other. Summing up, decide what laws and methods you need to solve your problem. And then subtype the interfaces that provide these methods and laws. .. rubric:: Example What interfaces a ``Pair`` type needs? - :class:`returns.interfaces.equable.Equable`, because two ``Pair`` instances can be compared - :class:`returns.interfaces.mappable.MappableN`, because the first type can be composed with pure functions - :class:`returns.interfaces.bindable.BindableN`, because a ``Pair`` can be bound to a function returning a new ``Pair`` based on the first type - :class:`returns.interfaces.altable.AltableN`, because the second type can be composed with pure functions - :class:`returns.interfaces.lashable.LashableN`, because a ``Pair`` can be bound to a function returning a new ``Pair`` based on the second type Now, after we know about all interfaces we would need, let's find pre-defined aliases we can reuse. Turns out, there are some of them! - :class:`returns.interfaces.bimappable.BiMappableN` which combines ``MappableN`` and ``AltableN`` - :class:`returns.interfaces.swappable.SwappableN` is an alias for ``BiMappableN`` with a new method called ``.swap`` to change values order Let's look at the result: .. code: python >>> from typing import Callable, TypeVar, Tuple, final >>> from returns.interfaces import bindable, equable, lashable, swappable >>> from returns.primitives.container import BaseContainer >>> from returns.primitives.hkt import SupportsKind2 >>> _FirstType = TypeVar('_FirstType') >>> _SecondType = TypeVar('_SecondType') >>> _NewFirstType = TypeVar('_NewFirstType') >>> _NewSecondType = TypeVar('_NewSecondType') >>> @final ... class Pair( ... BaseContainer, ... SupportsKind2['Pair', _FirstType, _SecondType], ... bindable.Bindable2[_FirstType, _SecondType], ... swappable.Swappable2[_FirstType, _SecondType], ... lashable.Lashable2[_FirstType, _SecondType], ... equable.Equable, ... ): ... def __init__( ... self, inner_value: Tuple[_FirstType, _SecondType], ... ) -> None: ... super().__init__(inner_value) .. note:: A special note on :class:`returns.primitives.container.BaseContainer`. It is a very useful class with lots of pre-defined features, like: immutability, better cloning, serialization, and comparison. You can skip it if you wish, but it is highlighly recommended. Later we will talk about an actual implementation of all required methods. Step 2: Initial implementation ------------------------------ So, let's start writing some code! We would need to implement all interface methods, otherwise ``mypy`` won't be happy. That's what it currently says on our type definition: .. code:: error: Final class test_pair1.Pair has abstract attributes "alt", "bind", "equals", "lash", "map", "swap" Looks like it already knows what methods should be there! Ok, let's drop some initial and straight forward implementation. We will later make it more complex step by step. .. literalinclude:: ../../tests/test_examples/test_your_container/test_pair1.py :linenos: You can check our resulting source with ``mypy``. It would be happy this time. Step 3: New interfaces ---------------------- As you can see our existing interfaces do not cover everything. We can potentially want several extra things: 1. A method that takes two arguments and returns a new ``Pair`` instance 2. A named constructor to create a ``Pair`` from a single value 3. A named constructor to create a ``Pair`` from two values We can define an interface just for this! It would be also nice to add all other interfaces there as supertypes. That's how it is going to look: .. literalinclude:: ../../tests/test_examples/test_your_container/test_pair2.py :linenos: :pyobject: PairLikeN Awesome! Now we have a new interface to implement. Let's do that! .. literalinclude:: ../../tests/test_examples/test_your_container/test_pair2.py :linenos: :pyobject: Pair.pair .. literalinclude:: ../../tests/test_examples/test_your_container/test_pair2.py :linenos: :pyobject: Pair.from_unpaired Looks like we are done! Step 4: Writing tests and docs ------------------------------- The best part about this type is that it is pure. So, we can write our tests inside docs! We are going to use `doctests `_ builtin module for that. This gives us several key benefits: - All our docs has usage examples - All our examples are correct, because they are executed and tested - We don't need to write regular boring tests Let's add docs and doctests! Let's use ``map`` method as a short example: .. literalinclude:: ../../tests/test_examples/test_your_container/test_pair3.py :linenos: :pyobject: Pair.map By adding these simple tests we would already have 100% coverage. But, what if we can completely skip writing tests, but still have 100%? Let's discuss how we can achieve that with "Laws as values". Step 5: Checking laws --------------------- We already ship lots of laws with our interfaces. See our docs on :ref:`laws and checking them `. Moreover, you can also define your own laws! Let's add them to our ``PairLikeN`` interface. Let's start with laws definition: .. literalinclude:: ../../tests/test_examples/test_your_container/test_pair4.py :linenos: :pyobject: _LawSpec And them let's add them to our ``PairLikeN`` interface: .. literalinclude:: ../../tests/test_examples/test_your_container/test_pair4.py :linenos: :pyobject: PairLikeN :emphasize-lines: 9-12 The last to do is to call ``check_all_laws(Pair, use_init=True)`` to generate 10 ``hypothesis`` test cases with hundreds real test cases inside. Here's the final result of our brand new ``Pair`` type: .. literalinclude:: ../../tests/test_examples/test_your_container/test_pair4.py :linenos: Step 6: Writing type-tests --------------------------- .. note:: You can find all `type-tests here `_. The next thing we want is to write a type-test! What is a type-test? This is a special type of tests for your typing. We run ``mypy`` on top of tests and use snapshots to assert the result. We recommend to use `pytest-mypy-plugins `_. `Read more `_ about how to use it. Let's start with a simple test to make sure our ``.pair`` function works correctly: .. warning:: Please, don't use ``env:`` property the way we do here. We need it since we store our example in ``tests/`` folder. And we have to tell ``mypy`` how to find it. .. literalinclude:: ../../typesafety/test_examples/test_your_container/test_pair4_def.yml :linenos: Ok, now, let's try to raise an error by using it incorrectly: .. literalinclude:: ../../typesafety/test_examples/test_your_container/test_pair4_error.yml :linenos: Step 7: Reusing code -------------------- The last (but not the least!) thing you need to know is that you can reuse all code we already have for this new ``Pair`` type. This is because of our :ref:`hkt` feature. So, let's say we want to use native :func:`~returns.pointfree.map.map_` pointfree function with our new ``Pair`` type. Let's test that it will work correctly: .. literalinclude:: ../../typesafety/test_examples/test_your_container/test_pair4_reuse.yml :linenos: Yes, it works! Now you have fully working, typed, documented, lawful, and tested primitive. You can build any other primitive you need for your business logic or infrastructure. returns-0.24.0/docs/pages/curry.rst000066400000000000000000000161111472312074000172240ustar00rootroot00000000000000.. _curry: Curry ===== This module is dedicated to partial application. We support two types of partial application: ``@curry`` and ``partial``. ``@curry`` is a new concept for most Python developers, but Python already has a great tool to use partial application: `functools.partial `_ The only problem with it is the lack of typing. Let's see what problems do we solve with this module. .. warning:: This module requires :ref:`our mypy plugin ` to be present. Without it we will fallback to the original behaviour. .. _partial: Partial ------- Here's how typing works there: .. code:: python from functools import partial def some_function(first: int, second: int) -> float: return first / second reveal_type(partial(some_function, 1)) # => functools.partial[builtins.float*] # => Which is really: `def (*Any, **Any) -> builtins.float` And compare it with our solution: .. code:: python from returns.curry import partial def some_function(first: int, second: int) -> float: return first / second reveal_type(partial(some_function, 1)) # => def (second: builtins.int) -> builtins.float* # => Which is fair! .. note:: We still use ``functools.partial`` inside. We just improve the typings. Generics ~~~~~~~~ One more problem is generics support in ``functools.partial``. Here's the comparison: .. code:: python from functools import partial from typing import List, TypeVar T = TypeVar('T') x: List[int] def some_function(first: List[T], second: int) -> T: return first[second] reveal_type(partial(some_function, x)) # => functools.partial[T`-1] # => Which is broken! And our solution works fine: .. code:: python from returns.curry import partial reveal_type(partial(some_function, x)) # => def (second: builtins.int) -> builtins.int* We also work with complex generic with multiple arguments or with multiple generics. The only known problem is that passing explicit generic like ``[1, 2, 3]`` will resolve in ``List[Any]``. Because ``mypy`` won't be able to infer this type for some reason. The reasonable work-around is to pass annotated variables like in the example above. Types and Instances ~~~~~~~~~~~~~~~~~~~ We can also work with types and instances. Because they are callable too! .. code:: python from returns.curry import partial class Test(object): def __init__(self, arg: int) -> None: self.arg = arg def __call__(self, other: int) -> int: return self.arg + other reveal_type(partial(Test, 1)) # N: Revealed type is 'def () -> ex.Test' reveal_type(partial(Test(1), 1)) # N: Revealed type is 'def () -> builtins.int' No differences with regular callables at all. Overloads ~~~~~~~~~ We also support working with ``@overload`` definitions. It also looks the same way: .. code:: python from typing import overload from returns.curry import partial @overload def test(a: int, b: str) -> str: ... @overload def test(a: int) -> int: ... @overload def test(a: str) -> None: # won't match! ... def test(a, b=None): ... reveal_type(partial(test, 1)) # N: Revealed type is 'Overload(def (b: builtins.str) -> builtins.str, def () -> builtins.int)' From this return type you can see that we work with all matching cases and discriminate unmatching ones. @curry ------ ``curry`` allows to provide only a subset of arguments to a function. And it won't be called until all the required arguments are provided. In contrast to ``partial`` which works on the calling stage, ``@curry`` works best when defining a new function. .. code:: python >>> from returns.curry import curry >>> @curry ... def function(first: int, second: str) -> bool: ... return len(second) > first >>> assert function(1)('a') is False >>> assert function(1, 'a') is False >>> assert function(2)('abc') is True >>> assert function(2, 'abc') is True Take a note, that providing invalid arguments will raise ``TypeError``: .. code:: pycon >>> function(1, 2, 3) Traceback (most recent call last): ... TypeError: too many positional arguments >>> function(a=1) Traceback (most recent call last): ... TypeError: got an unexpected keyword argument 'a' This is really helpful when working with ``.apply()`` method of containers. .. warning:: We recommend using :ref:`partial ` instead of ``@curry`` when possible because it's much faster. Typing ~~~~~~ ``@curry`` functions are also fully typed with our custom ``mypy`` plugin. Let's see how types do look like for a curried function: .. code:: python >>> from returns.curry import curry >>> @curry ... def zero(a: int, b: float, *, kw: bool) -> str: ... return str(a - b) if kw else '' >>> assert zero(1)(0.3)(kw=True) == '0.7' >>> assert zero(1)(0.3, kw=False) == '' # If we will reveal the type it would be quite big: reveal_type(zero) # Overload( # def (a: builtins.int) -> Overload( # def (b: builtins.float, *, kw: builtins.bool) -> builtins.str, # def (b: builtins.float) -> def (*, kw: builtins.bool) -> builtins.str # ), # def (a: builtins.int, b: builtins.float) -> def (*, kw: builtins.bool) # -> builtins.str, # def (a: builtins.int, b: builtins.float, *, kw: builtins.bool) # -> builtins.str # ) It reveals to us that there are 4 possible way to call this function. And we type all of them with `overload `_ type. When you provide any arguments, you discriminate some overloads and choose more specific path: .. code:: python reveal_type(zero(1, 2.0)) # By providing this set of arguments we have chosen this path: # # def (a: builtins.int, b: builtins.float) -> def (*, kw: builtins.bool) # -> builtins.str, # # And the revealed type would be: # # def (*, kw: builtins.bool) -> builtins.str # It works with functions, instance, class, and static methods, including generics. See ``Limitations`` in the API Reference. FAQ --- Why don't you support `*` and `**` arguments? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When you use ``partial(some, *my_args)`` or ``partial(some, **my_args)`` or both of them at the same time, we fallback to the default return type. The same happens with ``curry``. Why? There are several problems: - Because ``mypy`` cannot not infer what arguments are there inside this ``my_args`` variable - Because ``curry`` cannot know when to stop accepting ``*args`` and ``**kwargs`` - And there are possibly other problems! Our advice is not to use ``*args`` and ``*kwargs`` with ``partial`` and ``curry``. But, it is still possible, but in this case we will fallback to ``Any``. Further reading --------------- - `functools.partial `_ - `Currying `_ - `@curry decorator `_ API Reference ------------- .. automodule:: returns.curry :members: returns-0.24.0/docs/pages/development.rst000066400000000000000000000056771472312074000204210ustar00rootroot00000000000000.. _development-tooling: Development tooling =================== Tracing Failures ---------------- Sometimes we want to trace where the ``Failure`` has occurred in our system, ``returns`` provide a way to trace those failures. By default tracing is disabled. The trace is accessible by :meth:`trace ` property that is available for ``Result``, ``IOResult`` containers. It's basically a list containing all :class:`inspect.FrameInfo` objects from the call stack when the ``Failure`` was originally created. To enable it you can use :func:`collect_traces `. See examples below: You can use it as a context manager: .. code:: python >>> from inspect import FrameInfo >>> from returns.result import Failure, Result >>> from returns.primitives.tracing import collect_traces >>> def get_failure(argument: str) -> Result[str, str]: ... return Failure(argument) >>> non_traced_failure = get_failure('Normal Failure') >>> with collect_traces(): ... traced_failure = get_failure('Traced Failure') >>> assert non_traced_failure.trace is None >>> assert isinstance(traced_failure.trace, list) >>> assert all(isinstance(trace_line, FrameInfo) for trace_line in traced_failure.trace) >>> for trace_line in traced_failure.trace: ... print(f"{trace_line.filename}:{trace_line.lineno} in `{trace_line.function}`") # doctest: +SKIP ... /returns/returns/result.py:529 in `Failure` /example_folder/example.py:5 in `get_failure` /example_folder/example.py:1 in `` Or as a decorator: .. code:: python >>> from inspect import FrameInfo >>> from returns.io import IOFailure, IOResult >>> from returns.result import Failure, Result >>> from returns.primitives.tracing import collect_traces >>> @collect_traces ... def traced_function(value: str) -> IOResult[str, str]: ... return IOFailure(value) >>> non_traced_failure = Failure('Normal Failure') >>> traced_failure = traced_function('Traced Failure') >>> assert non_traced_failure.trace is None >>> assert isinstance(traced_failure.trace, list) >>> assert all(isinstance(trace_line, FrameInfo) for trace_line in traced_failure.trace) >>> for trace_line in traced_failure.trace: ... print(f"{trace_line.filename}:{trace_line.lineno} in `{trace_line.function}`") # doctest: +SKIP ... /returns/returns/result.py:525 in `Failure` /returns/returns/io.py:852 in `IOFailure` /example_folder/example.py:7: in `traced_function` /usr/lib/python3.8/contextlib.py:75 in `inner` /example_folder/example.py:1 in `` .. warning:: Activating trace can make your program noticeably slower if it has many points where ``Failure`` is often created. .. warning:: ``collect_traces`` is not thread safe, beware to use it with threading! .. warning:: Traces are meant to be used during development only. API Reference ------------- .. automodule:: returns.primitives.tracing :members: returns-0.24.0/docs/pages/do-notation.rst000066400000000000000000000130301472312074000203100ustar00rootroot00000000000000.. _do-notation: Do Notation =========== .. note:: Technical note: this feature requires :ref:`mypy plugin `. All containers can be easily composed with functions that can take a single argument. But, what if we need to compose two containers with a function with two arguments? That's not so easy. Of course, we can use :ref:`curry` and ``.apply`` or some imperative code. But, it is not very easy to write and read. This is why multiple functional languages have a concept of "do-notation". It allows you to write beautiful imperative code. Regular containers ------------------ Let's say we have a function called ``add`` which is defined like this: .. code:: python >>> def add(one: int, two: int) -> int: ... return one + two And we have two containers: ``IO(2)`` and ``IO(3)``. How can we easily get ``IO(5)`` in this case? Luckily, ``IO`` defines :meth:`returns.io.IO.do` which can help us: .. code:: python >>> from returns.io import IO >>> assert IO.do( ... add(first, second) ... for first in IO(2) ... for second in IO(3) ... ) == IO(5) Notice, that you don't have two write any complicated code. Everything is pythonic and readable. However, we still need to explain what ``for`` does here. It uses Python's ``__iter__`` method which returns an iterable with strictly a single raw value inside. .. warning:: Please, don't use ``for x in container`` outside of do-notation. It does not make much sense. Basically, for ``IO(2)`` it will return just ``2``. Then, ``IO.do`` wraps it into ``IO`` once again. Errors ~~~~~~ Containers like ``Result`` and ``IOResult`` can sometimes represent errors. In this case, do-notation expression will return the first found error. For example: .. code:: python >>> from returns.result import Success, Failure, Result >>> assert Result.do( ... first + second ... for first in Failure('a') ... for second in Success(3) ... ) == Failure('a') This behavior is consistent with ``.map`` and other methods. Async containers ---------------- We also support async containers like ``Future`` and ``FutureResult``. It works in a similar way as regular sync containers. But, they require ``async for`` expressions instead of regular ``for`` ones. And because of that - they cannot be used outside of ``async def`` context. Usage example: .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def main() -> None: ... return await Future.do( ... first + second ... async for first in Future.from_value(1) ... async for second in Future.from_value(2) ... ) >>> assert anyio.run(main) == IO(3) FAQ --- Why don't we allow mixing different container types? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ One might ask, why don't we allow mixing multiple container types in a single do-notation expression? For example, this code will not what you expect: .. code:: python >>> from returns.result import Result, Success >>> from returns.io import IOResult, IOSuccess >>> assert Result.do( ... first + second ... for first in Success(2) ... for second in IOSuccess(3) # Notice the IO part here ... ) == Success(5) This code will raise a mypy error at ``for second in IOSuccess(3)`` part: .. code:: Invalid type supplied in do-notation: expected "returns.result.Result[Any, Any]", got "returns.io.IOSuccess[builtins.int*]" Notice, that the ``IO`` part is gone in the final result. This is not right. And we can't track this in any manner. So, we require all containers to have the same type. The code above must be rewritten as: .. code:: python >>> from returns.result import Success >>> from returns.io import IOResult, IOSuccess >>> assert IOResult.do( ... first + second ... for first in IOResult.from_result(Success(2)) ... for second in IOSuccess(3) ... ) == IOSuccess(5) Now, it is correct. ``IO`` part is safe, the final result is correct. And mypy is happy. Why don't we allow ``if`` conditions in generator expressions? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ At the moment, using ``if`` conditions inside generator expressions passed into ``.do`` method is not allowed. Why? Because if the ``if`` condition will return ``False``, we will have an empty iterable and ``StopIteration`` will be thrown. .. code:: python >>> from returns.io import IO >>> IO.do( ... first + second ... for first in IO(2) ... for second in IO(3) ... if second > 10 ... ) Traceback (most recent call last): ... StopIteration It will raise: .. code:: Using "if" conditions inside a generator is not allowed Instead, use conditions and checks inside your logic, not inside your generator. Why do we require a literal expression in do-notation? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This code will work in runtime, but will raise a mypy error: .. code:: python >>> from returns.result import Result, Success >>> expr = ( ... first + second ... for first in Success(2) ... for second in Success(3) ... ) >>> >>> assert Result.do(expr) == Success(5) It raises: .. code:: Literal generator expression is required, not a variable or function call This happens, because of mypy's plugin API. We need the whole expression to make sure it is correct. We cannot use variables and function calls in its place. Further reading --------------- - `Do notation in Haskell `_ returns-0.24.0/docs/pages/functions.rst000066400000000000000000000074341472312074000201000ustar00rootroot00000000000000Helper functions ================ We feature several helper functions to make your developer experience better. compose ------- We also ship an utility function to compose two different functions together. .. code:: python >>> from returns.functions import compose >>> bool_after_int = compose(int, bool) >>> assert bool_after_int('1') is True >>> assert bool_after_int('0') is False Composition is also type-safe. The only limitation is that we only support functions with one argument and one return to be composed. Only works with regular functions (not async). .. warning:: ``compose`` might fail to infer types for some functions. There are several problems: ``lambda`` and generic functions. In these cases ``mypy`` will fail to infer the types of the resulting function. In this case, use :func:`pipe ` it does the same thing, but has pretty good type inference. Or use manual annotations with ``Callable`` type. identity -------- We also ship :func:`returns.functions.identity` function to help you with the composition. Identity function is a simple concept: it just returns its argument. If you wonder why do we need this function, please read below: - `Practical Usage of Identity Function `_ (JS) - `Using Identity Functions `_ (Scala) tap and untap ------------- We need ``tap()`` function to easily compose values with functions that does not return. For example you sometimes need to ``print()`` values inside your :ref:`pipe`: .. code:: python >>> from returns.functions import tap >>> result = tap(print)(1) # will print and return 1 1 >>> assert result == 1 You can also use the ``untap`` function to turn any function's return type to ``None`` and still do its thing: .. code:: python >>> from returns.functions import tap, untap >>> result = untap(tap(print))(1) # will print and return None 1 >>> assert result is None This is also sometimes helpful for a typed function composition. raise_exception --------------- Sometimes you really want to reraise an exception from ``Failure[Exception]`` due to some existing API (or a dirty hack). We allow you to do that with ease! .. code:: python from returns.functions import raise_exception def create_account_and_user(username: str) -> ...: """ Creates new Account-User pair. Imagine, that you need to reraise ValidationErrors due to existing API. """ return _validate_user( username, ).alt( # What happens here is interesting, since you do not let your # unwrap to fail with UnwrapFailedError, but instead # allows you to reraise a wrapped exception. # In this case `ValidationError()` will be thrown # before `UnwrapFailedError` raise_exception, ) def _validate_user(username: str) -> Result['User', ValidationError]: ... Use this with caution. We try to remove exceptions from our code base. Original proposal is `here `_. not\_ ----- With ``not_`` helper function we can easily deny a function returns. It supports functions with one or more arguments. .. code:: python >>> from typing import List >>> from returns.functions import compose, not_ >>> def is_even(number: int) -> bool: ... return number % 2 == 0 >>> def number_is_in_list(number: int, list_: List[int]) -> bool: ... return number in list_ >>> assert not_(is_even)(2) is False >>> assert not_(number_is_in_list)(1, [2, 3, 4]) is True >>> assert compose(int, not_(is_even))("1") is True API Reference ------------- .. automodule:: returns.functions :members: returns-0.24.0/docs/pages/future.rst000066400000000000000000000161561472312074000174030ustar00rootroot00000000000000Future ====== A set of primitives to work with ``async`` functions. Can be used with ``asyncio``, ``trio``, and ``curio``. And any event-loop! Tested with `anyio `_. What problems do we solve with these containers? Basically these ones: 1. You cannot call async function from a sync one 2. Any unexpectedly thrown exception can ruin your whole event loop 3. Ugly composition with lots of `await` statements Future container ---------------- Without ``Future`` container it is impossible to compose two functions: sync and async one. You simply cannot ``await`` coroutines inside a sync context. It is even a ``SyntaxError``. .. code:: python def test(): await some() # SyntaxError: 'await' outside async function So, you have to turn you function into async one. And all callers of this function in async functions. And all their callers. This is really hard to model. When your code has two types of uncomposable functions you increase your mental complexity by extreme levels. Instead, you can use ``Future`` container, it allows you to model async interactions in a sync manner: .. code:: pycon >>> from returns.future import Future >>> async def first() -> int: ... return 1 >>> async def second(arg: int) -> int: ... return arg + 1 >>> def main() -> Future[int]: # sync function! ... return Future(first()).bind_awaitable(second) Now we can compose async functions and maintaining a sync context! It is also possible to run a ``Future`` with regular tools like ``asyncio.run`` or ``anyio.run``: .. code:: python >>> import anyio >>> from returns.io import IO >>> assert anyio.run(main().awaitable) == IO(2) One more very useful thing ``Future`` does behind the scenes is converting its result to ``IO``-based containers. This helps a lot when separating pure and impure (async functions are impure) code inside your app. FutureResult ------------ This container becomes very useful when working with ``async`` function that can fail. It works the similar way regular ``Result`` does. And is literally a wrapper around ``Future[Result[_V, _E]]`` type. Let's see how it can be used in a real program: .. literalinclude:: ../../tests/test_examples/test_future/test_future_result.py :linenos: What is different? 1. We can now easily make ``show_titles`` sync, we can also make ``_fetch_post`` sync, but we would need to use ``ReaderFutureResult`` container with proper dependencies in this case 2. We now don't care about errors at all. In this example any error will cancel the whole pipeline 3. We now have ``.map`` method to easily compose sync and async functions You can see the next example with :ref:`RequiresContextFutureResult ` and without a single ``async/await``. That example illustrates the whole point of our actions: writing sync code that executes asynchronously without any magic at all. Decorators ---------- future ~~~~~~ This decorator helps to easily transform ``async def`` into ``Future``: .. code:: python >>> import anyio >>> from returns.future import future, Future >>> from returns.io import IO >>> @future ... async def test(arg: int) -> float: ... return arg / 2 >>> future_instance = test(1) >>> assert isinstance(future_instance, Future) >>> assert anyio.run(future_instance.awaitable) == IO(0.5) Make sure that you decorate with ``@future`` only coroutines that do not throw exceptions. For ones that do, use ``future_safe``. future_safe ~~~~~~~~~~~ This decorator converts ``async def`` into ``FutureResult``, which means that it becomes: 1. Full featured ``Future`` like container 2. Safe from any exceptions Let's dig into it: .. code:: pycon >>> import anyio >>> from returns.future import future_safe, FutureResult >>> from returns.io import IOSuccess, IOFailure >>> @future_safe ... async def test(arg: int) -> float: ... return 1 / arg >>> future_instance = test(2) >>> assert isinstance(future_instance, FutureResult) >>> assert anyio.run(future_instance.awaitable) == IOSuccess(0.5) >>> str(anyio.run(test(0).awaitable)) # this will fail '>' Never miss exceptions ever again! asyncify ~~~~~~~~ Helper decorator to transform regular sync function into ``async`` ones. .. code:: python >>> import anyio >>> from inspect import iscoroutinefunction >>> from returns.future import asyncify >>> @asyncify ... def your_function(x: int) -> int: ... return x + 1 >>> assert iscoroutinefunction(your_function) is True >>> assert anyio.run(your_function, 1) == 2 Very important node: making your function ``async`` does not mean it will work asynchronously. It can still block if it uses blocking calls. Here's an example of how you **must not** do: .. code:: python import requests from returns.future import asyncify @asyncify def please_do_not_do_that(): return requests.get('...') # this will still block! Do not overuse this decorator. It is only useful for some basic composition with ``Future`` and ``FutureResult``. FAQ --- Is it somehow related to Future object from asyncio? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Nope, we just use the same naming there are in other languages and platforms. Python happens to have its own meaning for this word. In our worldview, these two ``Future`` entities should never meet each other in a single codebase. It is also not related to `concurrent.Future `_. How to create unit objects? ~~~~~~~~~~~~~~~~~~~~~~~~~~~ For ``Future`` container: - ``from_value`` when you have a raw value - ``from_io`` when you have existing ``IO`` container - ``from_future_result`` when you have existing ``FutureResult`` For ``FutureResult`` container: - ``from_value`` when you want to mark some raw value as a ``Success`` - ``from_failure`` when you want to mark some raw value as a ``Failure`` - ``from_result`` when you already have ``Result`` container - ``from_io`` when you have successful ``IO`` - ``from_failed_io`` when you have failed ``IO`` - ``from_future`` when you have successful ``Future`` - ``from_failed_future`` when you have failed ``Future`` - ``from_typecast`` when you have existing ``Future[Result]`` What is the difference between Future[Result[a, b]] and FutureResult[a, b]? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ There's almost none. The only difference is that ``FutureResult[a, b]`` is a handy wrapper around ``Future[Result[a, b]]``, so you won't need to use methods like ``.map`` and ``.bind`` twice. You can always convert it with methods like ``.from_typecast`` and ``.from_future_result``. Further reading --------------- - `How Async Should Have Been `_ - `What Color is Your Function? `_ - `From Promises to Futures `_ API Reference ------------- .. autoclasstree:: returns.future :strict: .. automodule:: returns.future :members: returns-0.24.0/docs/pages/hkt.rst000066400000000000000000000214561472312074000166560ustar00rootroot00000000000000.. _hkt: Higher Kinded Types =================== Higher Kinded Types is a new concept for Python developers. But, it is totally not new in general! So, let's start with the detailed explanation: what Higher Kinded Types are? Regular types ------------- We can start with the very basic example. Let's say we have a function that transforms type ``A`` into a type ``B``. These types ``A`` and ``B`` can be some specific ones, for example: .. code:: python >>> def from_a_to_b(arg: int) -> str: ... return str(arg) >>> assert from_a_to_b(1) == '1' That's what we already know and use. Let's scale things up! Generics -------- The next thing we can do with types is to write generic types. What are generic types? Basically, they are some types that contain other types inside. Like ``List[int]`` is a list of integers: ``[1, 2, 3]``. We know that ``List[int]`` has a shape of a ``list`` and contents of ``int``. We can also write functions that work with generics: .. code:: python >>> from typing import List >>> def all_to_str(arg: List[int]) -> List[str]: ... return [str(item) for item in arg] >>> assert all_to_str([1, 2]) == ['1', '2'] There's one more thing about generics we want to notice at this point. Different generics do have different numbers of type arguments: - ``List`` has a single type argument: ``List[Value]`` or ``Maybe[Value]`` - ``Dict`` has two type arguments: ``Dict[Key, Value]`` or ``Result[Value, Error]`` - ``Generator`` has three type arguments: ``Generator[Yield, Send, Return]`` or ``RequiresContextResult[Value, Error, Env]`` That's what we call a kind. So, ``List`` and ``Maybe`` have a kind of ``1``, ``Dict`` and ``Result`` have kind of ``2``, ``Generator`` and ``RequiresContextResult`` have a kind of ``3``. So, let's go one level further. Operations on generics ---------------------- Let's say you have a function that copies all values of a passed argument. We can define this function as: .. code:: python >>> from typing import TypeVar >>> ValueType = TypeVar('ValueType') >>> def copy(arg: ValueType) -> ValueType: ... ... This function can work with any type. It receives something and then returns the same value back. That's the whole point of copying! But, there are different functions, that do different things with types. For example, we can write a function that converts a value inside any :class:`Container1 ` (a base class for all our containers) from ``int`` to ``str``: We can also write functions that work with generics: .. code:: python >>> from returns.interfaces.container import Container1 >>> def to_str(container: Container1[int]) -> Container1[str]: ... return container.map(str) And here's how it can be used: .. code:: python >>> from returns.maybe import Maybe >>> from returns.io import IO >>> assert to_str(Maybe.from_value(1)) == Maybe.from_value('1') >>> assert to_str(IO.from_value(1)) == IO.from_value('1') It works just fine! But! It has a very important thing inside. All calls to ``to_str`` will return ``Container1`` type, not something specific: .. code:: python reveal_type(to_str(Maybe.from_value(1))) # Container1[str] reveal_type(to_str(IO.from_value(1))) # Container1[str] But, we know that this is not true. When we pass a ``Maybe`` in - we get the ``Maybe`` back. When we pass a ``IO`` in - we get the ``IO`` back. How can we fix this problem? With ``@overload``! .. code:: python >>> from typing import overload >>> from returns.maybe import Maybe >>> from returns.io import IO >>> @overload ... def to_str(arg: Maybe[int]) -> Maybe[str]: ... ... >>> @overload ... def to_str(arg: IO[int]) -> IO[str]: ... ... We kinda fixed it! Now, our calls will reveal the correct types for these three examples: .. code:: python reveal_type(to_str(Maybe.from_value(1))) # Maybe[str] reveal_type(to_str(IO.from_value(1))) # IO[str] But, there's an important limitation with this solution: no other types are allowed in this function anymore. So, you will try to use it with any other type, it won't be possible. Current limitations ------------------- To overcome current ``@overload`` decorators limitations, we can imagine a syntax like this: .. code:: python from typing import TypeVar from returns.interfaces.container import Container1 T = TypeVar('T', bound=Container1) def all_to_str(arg: T[int]) -> T[str]: ... Sadly, this does not work. Because ``TypeVar`` cannot be used with ``[]``. We have to find some other way. Higher Kinded Types ------------------- So, that's where ``returns`` saves the day! .. note:: Technical note: this feature requires :ref:`mypy plugin `. The main idea is that we can rewrite ``T[int]`` as ``Kind1[T, int]``. Let's see how it works: .. code:: python >>> from returns.primitives.hkt import Kind1 >>> from returns.interfaces.container import ContainerN >>> from typing import TypeVar >>> T = TypeVar('T', bound=ContainerN) >>> def to_str(container: Kind1[T, int]) -> Kind1[T, str]: ... return container.map(str) Now, this will work almost correctly! Why almost? Because the revealed type will be ``Kind1``. .. code:: python reveal_type(to_str(Maybe.from_value(1))) # Kind1[Maybe, str] reveal_type(to_str(IO.from_value(1))) # Kind1[IO, str] That's not something we want. We don't need ``Kind1``, we need real ``Maybe`` or ``IO`` values. The final solution is to decorate ``to_str`` with ``@kinded``: .. code:: python >>> from returns.primitives.hkt import kinded >>> @kinded ... def to_str(container: Kind1[T, int]) -> Kind1[T, str]: ... return container.map(str) Now, it will be fully working: .. code:: python reveal_type(to_str(Maybe.from_value(1))) # Maybe[str] reveal_type(to_str(IO.from_value(1))) # IO[str] And the thing about this approach is that it will be: 1. Fully type-safe. It works with correct interface ``ContainerN``, returns the correct type, has correct type transformation 2. Is opened for further extension and even custom types Kinds ----- As it was said ``Maybe[int]``, ``Result[str, int]``, and ``RequiresContextResult[str, int, bool]`` are different in terms of a number of type arguments. We support different kinds: - ``Kind1[Maybe, int]`` is similar to ``Maybe[int]`` - ``Kind2[Result, str, int]`` is similar to ``Result[str, int]`` - ``Kind3[RequiresContextResult, str, int, bool]`` is similar to ``RequiresContextResult[str, int, bool]`` You can use any of them freely. Later you will learn how to :ref:`create your own types ` that support kinds! Further reading --------------- - `Higher Kinded Types in Python `_ FAQ --- Which types you can use with KindN? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The first position in all ``KindN`` types can be occupied by either ``Instance`` type or ``TypeVar`` with ``bound=``. Let's see an example: .. code:: python >>> from typing import TypeVar >>> from returns.primitives.hkt import KindN, kinded >>> from returns.interfaces.mappable import MappableN >>> _FirstType = TypeVar('_FirstType') >>> _SecondType = TypeVar('_SecondType') >>> _ThirdType = TypeVar('_ThirdType') >>> _MappableKind = TypeVar('_MappableKind', bound=MappableN) >>> @kinded ... def works_with_interface( ... container: KindN[_MappableKind, _FirstType, _SecondType, _ThirdType], ... ) -> KindN[_MappableKind, str, _SecondType, _ThirdType]: ... return container.map(str) This version of ``works_with_interface`` will work with any subtype of ``MappableN``. Because we use ``_MappableKind`` in its definition. And ``_MappableKind`` is a ``TypeVar`` bound to ``MappableN``. Arguments of non ``MappableN`` subtypes will be rejected by a type-checker: .. code:: python >>> from returns.maybe import Maybe >>> from returns.io import IO >>> from returns.result import Success >>> assert works_with_interface(Maybe.from_value(1)) == Maybe.from_value('1') >>> assert works_with_interface(IO.from_value(1)) == IO.from_value('1') >>> assert works_with_interface(Success(1)) == Success('1') In contrast, we can work directly with some specific type, let's say ``Maybe`` container: .. code:: python >>> from returns.maybe import Maybe >>> @kinded ... def works_with_maybe( ... container: KindN[Maybe, _FirstType, _SecondType, _ThirdType], ... ) -> KindN[Maybe, str, _SecondType, _ThirdType]: ... return container.map(str) >>> assert works_with_maybe(Maybe.from_value(1)) == Maybe.from_value('1') Function ``works_with_maybe`` will work correctly with ``Maybe`` instance. Other types will be rejected. So, choose wisely which mechanism you need. API Reference ------------- .. autoclasstree:: returns.primitives.hkt :strict: .. automodule:: returns.primitives.hkt :members: returns-0.24.0/docs/pages/interfaces.rst000066400000000000000000000614361472312074000202150ustar00rootroot00000000000000.. _interfaces: Interfaces ========== We provide a lot of generic interfaces to write our bundled and your own custom types. These interfaces are designed: 1. To be subclassed 2. To provide abstract methods to implement in your own types 3. To enforce correctness on final types 4. To attach critical laws to be checked We use :ref:`Higher Kinded Types ` to define abstract methods. Reading about interfaces will be the most useful if you plan to :ref:`create your own containers `. General information ------------------- All the non-specific interfaces (e.g. MappableN, BindableN, ApplicativeN) can have **Nth** types, at the maximum of three possible types. What does this mean? :class:`~returns.interfaces.mappable.MappableN` interface, for example, can have one, two or three possible types. See the example below: .. code:: python >>> from typing_extensions import Never >>> from returns.interfaces.mappable import ( ... MappableN, Mappable1, Mappable2, Mappable3, ... ) >>> one_type: MappableN[int, Never, Never] >>> two_types: MappableN[int, str, Never] >>> three_types: MappableN[int, str, bool] >>> # We have a shortcut for each amount of arguments to reduce the boilerplate >>> one_type: Mappable1[int] >>> two_types: Mappable2[int, str] >>> three_type: Mappable3[int, str, bool] .. note:: Useful links before you start here: * `Functors, Applicatives, And Monads In Pictures `_ * `Understanding Functor and Monad With a Bag of Peanuts `_ * `Variance of generic types `_ * `If you know map, I will teach you monads `_ Naming convention ~~~~~~~~~~~~~~~~~ We follow a very specific naming convention in our interface names. If interface does not depend on the number of types it works with and is always the same, we name it as is. For example, ``Equable`` is always the same and does not depend on the number of type arguments. We use adjectives to name these interfaces. Secondly, if interface depends on the number of type arguments, it is named with ``N`` suffix in the end. It would always have numeric aliases for each number of arguments supported. For example, ``MappableN``, ``Mappable1``, ``Mappable2``, and ``Mappable3``. The last criteria we have to decided on naming is "whether this interface always the same or it can have slight variations"? That's why we have ``ResultLikeN`` and ``ResultBasedN`` interfaces. Because ``ResultBasedN`` has two extra methods compared to ``ResultLikeN``. We use ``Like`` suffix for interfaces that describes some similar types. We use ``Based`` suffix for interfaces that describe almost concrete types. Laws ~~~~ Some interfaces define its laws as values. These laws can be viewed as tests that are attached to the specific interface. We are able to check them of any type that implements a given interfaces with laws by our own :ref:`check_all_laws hypothesis plugin `. In this docs we are going to describe each general interface and its laws. Mappable -------- .. currentmodule:: returns.interfaces.mappable Something is considered mappable if we can ``map`` it using a function, generally ``map`` is a method that accepts a function. An example in this library is :class:`~returns.maybe.Maybe`, that implements the ``Mappable`` interface: .. code:: python >>> from returns.maybe import Maybe, Some >>> def can_be_mapped(string: str) -> str: ... return string + '!' >>> maybe_str: Maybe[str] = Some('example') >>> assert maybe_str.map(can_be_mapped) == Some('example!') :class:`~MappableN` interface helps us to create our own mappable container like :class:`~returns.maybe.Maybe`. .. code:: python >>> from typing import Callable, TypeVar >>> from returns.interfaces.mappable import Mappable1 >>> from returns.primitives.hkt import SupportsKind1 >>> from returns.primitives.container import BaseContainer >>> _NumberType = TypeVar('_NumberType') >>> _NewNumberType = TypeVar('_NewNumberType') >>> class Number( ... BaseContainer, ... SupportsKind1['Number', _NumberType], ... Mappable1[_NumberType], ... ): ... def __init__(self, inner_value: _NumberType) -> None: ... super().__init__(inner_value) ... ... def map( # This method is required by Mappable ... self, ... function: Callable[[_NumberType], _NewNumberType] ... ) -> 'Number[_NewNumberType]': ... return Number(function(self._inner_value)) With our ``Number`` mappable class we can compose easily math functions with it. .. code:: python >>> def my_math_function(number: int) -> int: ... return number - 1 >>> number: Number[int] = Number(-41) >>> assert number.map(my_math_function).map(abs) == Number(42) Laws ~~~~ To make sure your ``Mappable`` implementation is right, you can apply the ``Mappable`` laws on it to test. 1. :func:`Identity Law <_LawSpec.identity_law>`: When we pass the identity function to the ``map`` method, the ``Mappable`` instance has to be the same, unchanged. .. code:: python >>> from returns.functions import identity >>> mappable_number: Number[int] = Number(1) >>> assert mappable_number.map(identity) == Number(1) 2. :func:`Associative Law <_LawSpec.associative_law>`: Given two functions, ``x`` and ``y``, calling the map method with ``x`` function and after that calling with ``y`` function must have the same result if we compose them together. .. code:: python >>> from returns.functions import compose >>> def add_one(number: int) -> int: ... return number + 1 >>> def multiply_by_ten(number: int) -> int: ... return number * 10 >>> mappable_number: Number[int] = Number(9) >>> assert mappable_number.map( ... add_one, ... ).map( ... multiply_by_ten, ... ) == mappable_number.map( ... compose(add_one, multiply_by_ten), ... ) Bindable -------- .. currentmodule:: returns.interfaces.bindable Bindable is something that we can bind with a function. Like :class:`~returns.maybe.Maybe`, so :class:`~BindableN` interface will help us to create our custom bindable. .. code:: python >>> from typing import Callable, TypeVar >>> from returns.interfaces.bindable import Bindable1 >>> from returns.primitives.hkt import SupportsKind1, Kind1, dekind >>> from returns.primitives.container import BaseContainer >>> _NumberType = TypeVar('_NumberType') >>> _NewNumberType = TypeVar('_NewNumberType') >>> class Number( ... BaseContainer, ... SupportsKind1['Number', _NumberType], ... Bindable1[_NumberType], ... ): ... def __init__(self, inner_value: _NumberType) -> None: ... super().__init__(inner_value) ... ... def bind( # This method is required by Bindable ... self, ... function: Kind1[ ... 'Number', ... Callable[[_NumberType], 'Number[_NewNumberType]'], ... ], ... ) -> 'Number[_NewNumberType]': ... return dekind(function(self._inner_value)) And here's how we can use it: .. code:: python >>> def double(arg: int) -> Number[int]: ... return Number(arg * 2) >>> number = Number(5) >>> assert number.bind(double) == Number(10) Applicative ----------- .. currentmodule:: returns.interfaces.applicative Something is considered applicative if it is a functor already and, moreover, we can ``apply`` another container to it and construct a new value with ``.from_value`` method. An example in this library is :class:`~returns.maybe.Maybe`, that implements the ``Mappable`` and ``Applicative`` interfaces: .. code:: python >>> from returns.maybe import Maybe, Some >>> maybe_str = Maybe.from_value('example') >>> maybe_func = Maybe.from_value(len) # we use function as a value! >>> assert maybe_str.apply(maybe_func) == Some(7) As you see, ``apply`` takes a container with a function inside and applies it to the current value inside the container. This way we really execute ``Maybe.from_value(len('example'))``. :class:`~ApplicativeN` which is a subtype of :class:`~returns.interfaces.mappable.MappableN` interface helps us to create our own applicative container like :class:`~returns.maybe.Maybe`. .. code:: python >>> from typing import Callable, TypeVar >>> from returns.interfaces.applicative import Applicative1 >>> from returns.primitives.hkt import SupportsKind1, Kind1, dekind >>> from returns.primitives.container import BaseContainer >>> _NumberType = TypeVar('_NumberType') >>> _NewNumberType = TypeVar('_NewNumberType') >>> class Number( ... BaseContainer, ... SupportsKind1['Number', _NumberType], ... Applicative1[_NumberType], ... ): ... def __init__(self, inner_value: _NumberType) -> None: ... super().__init__(inner_value) ... ... def map( # This method is required by Mappable ... self, ... function: Callable[[_NumberType], _NewNumberType] ... ) -> 'Number[_NewNumberType]': ... return Number(function(self._inner_value)) ... ... def apply( # This method is required by Applicative ... self, ... container: Kind1[ ... 'Number', ... Callable[[_NumberType], _NewNumberType], ... ], ... ) -> 'Number[_NewNumberType]': ... return Number.from_value( ... dekind(container._inner_value(self._inner_value)), ... ) ... ... @classmethod ... def from_value( # This method is required by Applicative ... cls, ... inner_value: _NewNumberType, ... ) -> 'Number[_NewNumberType]': ... return Number(inner_value) With our ``Number`` mappable class we can compose easily math functions with it. .. code:: python >>> def my_math_function(number: int) -> int: ... return number - 1 >>> number = Number(3) >>> number_function = Number.from_value(my_math_function) >>> assert number.apply(number_function) == Number(2) Laws ~~~~ To make sure your ``Applicative`` implementation is right, you can apply the ``Applicative`` laws on it to test. 1. :func:`Identity Law <_LawSpec.identity_law>`: When we pass an applicative instance with wrapped identity function to the ``apply`` method, the ``Applicative`` has to be the same, unchanged. .. code:: python >>> from returns.functions import identity >>> applicative_number: Number[int] = Number(1) >>> assert applicative_number.apply( ... applicative_number.from_value(identity), ... ) == Number(1) 2. :func:`Interchange Law <_LawSpec.interchange_law>`: We can start our composition with both raw value and a function. .. code:: python >>> def function(arg: int) -> int: ... return arg + 1 >>> raw_value = 5 >>> assert Number.from_value(raw_value).apply( ... Number.from_value(function), ... ) == Number.from_value(function).apply( ... Number.from_value(lambda inner: inner(raw_value)), ... ) 3. :func:`Homomorphism Law <_LawSpec.homomorphism_law>`: The homomorphism law says that applying a wrapped function to a wrapped value is the same as applying the function to the value in the normal way and then using ``.from_value`` on the result. .. code:: python >>> def function(arg: int) -> int: ... return arg + 1 >>> raw_value = 5 >>> assert Number.from_value( ... function(raw_value), ... ) == Number.from_value(raw_value).apply( ... Number.from_value(function), ... ) 4. :func:`Composition Law <_LawSpec.composition_law>`: Applying two functions twice is the same as applying their composition once. .. code:: python >>> from returns.functions import compose >>> def first(arg: int) -> int: ... return arg * 2 >>> def second(arg: int) -> int: ... return arg + 1 >>> instance = Number(5) >>> assert instance.apply( ... Number.from_value(compose(first, second)), ... ) == instance.apply( ... Number.from_value(first), ... ).apply( ... Number.from_value(second), ... ) Plus all laws from ``MappableN`` interface. Container --------- .. currentmodule:: returns.interfaces.container :class:`~ContainerN` is a central piece of our library. It is an interface that combines :class:`~returns.interfaces.applicative.ApplicativeN` and :class:`~returns.interfaces.bindable.BindableN` together. So, in other words: ``Container`` is an ``Apllicative`` that you can ``bind``! .. code:: python >>> from typing import Callable, TypeVar >>> from returns.interfaces.container import Container1 >>> from returns.primitives.hkt import SupportsKind1, Kind1, dekind >>> from returns.primitives.container import BaseContainer >>> _NumberType = TypeVar('_NumberType') >>> _NewNumberType = TypeVar('_NewNumberType') >>> class Number( ... BaseContainer, ... SupportsKind1['Number', _NumberType], ... Container1[_NumberType], ... ): ... def __init__(self, inner_value: _NumberType) -> None: ... super().__init__(inner_value) ... ... def map( # This method is required by Mappable ... self, ... function: Callable[[_NumberType], _NewNumberType] ... ) -> 'Number[_NewNumberType]': ... return Number(function(self._inner_value)) ... ... def bind( # This method is required by Bindable ... self, ... function: Kind1[ ... 'Number', ... Callable[[_NumberType], 'Number[_NewNumberType]'], ... ], ... ) -> 'Number[_NewNumberType]': ... return dekind(function(self._inner_value)) ... ... def apply( # This method is required by Applicative ... self, ... container: Kind1[ ... 'Number', ... Callable[[_NumberType], _NewNumberType], ... ], ... ) -> 'Number[_NewNumberType]': ... return Number.from_value( ... container._inner_value(self._inner_value), ... ) ... ... @classmethod ... def from_value( # This method is required by Applicative ... cls, ... inner_value: _NewNumberType, ... ) -> 'Number[_NewNumberType]': ... return Number(inner_value) This code gives us an opportunity to use ``Number`` with ``map``, ``apply``, and ``bind`` as we already did in the examples above. Laws ~~~~ To make sure other people will be able to use your implementation, it should respect three new laws. 1. :func:`Left Identity <_LawSpec.left_identity_law>`: If we ``bind`` a function to our bindable must have to be the same result as passing the value directly to the function. .. code:: python >>> def can_be_bound(value: int) -> Number[int]: ... return Number(value) >>> assert Number.from_value(5).bind(can_be_bound) == can_be_bound(5) 2. :func:`Right Identity <_LawSpec.right_identity_law>`: If we pass the bindable constructor through ``bind`` must have to be the same result as instantiating the bindable on our own. .. code:: python >>> number = Number(2) >>> assert number.bind(Number) == Number(2) 3. :func:`Associative Law <_LawSpec.associative_law>`: Given two functions, ``x`` and ``y``, calling the bind method with ``x`` function and after that calling with ``y`` function must have the same result if we bind with a function that passes the value to ``x`` and then bind the result with ``y``. .. code:: python >>> def minus_one(arg: int) -> Number[int]: ... return Number(arg - 1) >>> def half(arg: int) -> Number[int]: ... return Number(arg // 2) >>> number = Number(9) >>> assert number.bind(minus_one).bind(half) == number.bind( ... lambda value: minus_one(value).bind(half), ... ) Plus all laws from ``MappableN`` and ``ApplicativeN`` interfaces. More! ----- We have way more interfaces with different features! We have covered all of them in the technical docs. So, use them to enforce type-safety of your own containers. Specific interfaces ~~~~~~~~~~~~~~~~~~~ We also have a whole package of different specific interfaces that will help you to create containers based on our internal types, like ``Result``. FAQ --- Why do you have general and specific interfaces? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We have ``.interfaces.*`` types that can be applied to any possible type. There's nothing they know about other types or ``returns`` package. We also have a special ``.interfaces.specific`` package where we have types that know about other types in ``returns``. For example, ``MappableN`` from ``.interfaces`` only knows about ``.map`` method. It does not require anything else. But, ``ResultLikeN`` from ``.interfaces.specific.result`` does require to have ``.bind_result`` method which relies on our :class:`~returns.result.Result` type. That's the only difference. Build your own types with any of those interfaces. Why some interfaces do not have type alias for 1 or 2 type arguments? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Some types like :class:`~returns.interfaces.specific.result.ResultLikeN` do not have type aliases for one type argument in a form of ``ResultLike1``. Why does ``Mappable1`` exists and ``ResultLike1`` does not? Because ``Mappable1`` does make sense. But, ``ResultLike1`` requires at least two (value and error) types to exist. The same applies for ``ReaderLike1`` and ``ReaderResultLike1`` and ``ReaderResultLike2``. We don't support type aliases for types that won't make sense. What's the difference between ``MappableN`` and ``BindableN``? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ While ``MappableN`` you have to pass a pure function, like: .. code:: python >>> def can_be_mapped(string: str) -> str: ... return string with Bindable we have to pass a function that returns another container: .. code:: python >>> from returns.maybe import Maybe >>> def can_be_bound(string: str) -> Maybe[str]: ... return Some(string + '!') The main difference is the return type. The consequence of this is big! ``BindableN`` allows to change the container type. While ``MappableN`` cannot do that. So, ``Some.bind(function)`` can be evaluated to both ``Some`` and ``Nothing``. While ``Some.map(function)`` will always stay as ``Some``. What is the difference between ResultLikeN and ResultBasedN? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``ResultLikeN`` is just an intention of having a result (e.g. :class:`~returns.future.FutureResult`), it's not the result yet. While ``ResultBasedN`` is a concrete result (e.g. :class:`~returns.io.IOResult`), it has the desired result value. Because of this difference between them is why we can't unwrap a ``ResultLikeN`` container, it does not have the real result yet. See the example below using ``FutureResult`` to get a ``IOResult``: .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.interfaces.specific.future_result import FutureResultBasedN >>> from returns.interfaces.specific.ioresult import ( ... IOResultBasedN, ... IOResultLikeN, ... ) >>> from returns.interfaces.specific.result import ResultLikeN, ResultBasedN >>> from returns.io import IOSuccess, IOResult >>> from returns.result import Success, Result >>> async def coro(arg: int) -> Result[int, str]: ... return Success(arg + 1) >>> # `result_like` does not have the result we want (Result[int, str]) >>> # it's just the intention of having one, >>> # we have to await it to get the real result >>> result_like: FutureResult[int, str] = FutureResult(coro(1)) >>> assert isinstance(result_like, FutureResultBasedN) >>> assert isinstance(result_like, IOResultLikeN) >>> assert isinstance(result_like, ResultLikeN) >>> # `anyio.run(...)` will await our coroutine and give the real result to us >>> result: IOResult[int, str] = anyio.run(result_like.awaitable) >>> assert isinstance(result, IOResultBasedN) >>> assert isinstance(result, ResultLikeN) >>> # Compare it with the real result: >>> assert isinstance(Success(1), ResultBasedN) .. note:: The same difference applies to all ``*ResultLikeN`` vs ``*ResultBasedN`` (e.g. :class:`~returns.interfaces.specific.ioresult.IOResultLikeN` and :class:`~returns.interfaces.specific.ioresult.IOResultBasedN`) API Reference ------------- Overview ~~~~~~~~ Here's a full overview of all our interfaces: .. autoclasstree:: returns.interfaces.mappable returns.interfaces.bindable returns.interfaces.applicative returns.interfaces.lashable returns.interfaces.altable returns.interfaces.bimappable returns.interfaces.unwrappable returns.interfaces.container returns.interfaces.failable returns.interfaces.specific.maybe returns.interfaces.specific.result returns.interfaces.specific.io returns.interfaces.specific.ioresult returns.interfaces.specific.future returns.interfaces.specific.future_result returns.interfaces.specific.reader returns.interfaces.specific.reader_result returns.interfaces.specific.reader_ioresult returns.interfaces.specific.reader_future_result :strict: Let's review it one by one. Equable ~~~~~~~ .. autoclasstree:: returns.interfaces.equable :strict: .. automodule:: returns.interfaces.equable :members: :private-members: Mappable ~~~~~~~~ .. autoclasstree:: returns.interfaces.mappable :strict: .. automodule:: returns.interfaces.mappable :members: :private-members: Bindable ~~~~~~~~ .. autoclasstree:: returns.interfaces.bindable :strict: .. automodule:: returns.interfaces.bindable :members: :private-members: Applicative ~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.applicative :strict: .. automodule:: returns.interfaces.applicative :members: :private-members: Altable ~~~~~~~ .. autoclasstree:: returns.interfaces.altable :strict: .. automodule:: returns.interfaces.altable :members: :private-members: BiMappable ~~~~~~~~~~ .. autoclasstree:: returns.interfaces.bimappable :strict: .. automodule:: returns.interfaces.bimappable :members: :private-members: Swappable ~~~~~~~~~ .. autoclasstree:: returns.interfaces.swappable :strict: .. automodule:: returns.interfaces.swappable :members: :private-members: Lashable ~~~~~~~~ .. autoclasstree:: returns.interfaces.lashable :strict: .. automodule:: returns.interfaces.lashable :members: :private-members: Unwrappable ~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.unwrappable :strict: .. automodule:: returns.interfaces.unwrappable :members: :private-members: Container ~~~~~~~~~ .. autoclasstree:: returns.interfaces.container :strict: .. automodule:: returns.interfaces.container :members: :private-members: Failable ~~~~~~~~ .. autoclasstree:: returns.interfaces.failable :strict: .. automodule:: returns.interfaces.failable :members: :private-members: Maybe specific ~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.maybe :strict: .. automodule:: returns.interfaces.specific.maybe :members: :private-members: Result specific ~~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.result :strict: .. automodule:: returns.interfaces.specific.result :members: :private-members: IO specific ~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.io :strict: .. automodule:: returns.interfaces.specific.io :members: :private-members: IOResult specific ~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.ioresult :strict: .. automodule:: returns.interfaces.specific.ioresult :members: :private-members: Future specific ~~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.future :strict: .. automodule:: returns.interfaces.specific.future :members: :private-members: FutureResult specific ~~~~~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.future_result :strict: .. automodule:: returns.interfaces.specific.future_result :members: :private-members: Reader specific ~~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.reader :strict: .. automodule:: returns.interfaces.specific.reader :members: :private-members: ReaderResult specific ~~~~~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.reader_result :strict: .. automodule:: returns.interfaces.specific.reader_result :members: :private-members: ReaderIOResult specific ~~~~~~~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.reader_ioresult :strict: .. automodule:: returns.interfaces.specific.reader_ioresult :members: :private-members: ReaderFutureResult specific ~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. autoclasstree:: returns.interfaces.specific.reader_future_result :strict: .. automodule:: returns.interfaces.specific.reader_future_result :members: :private-members: returns-0.24.0/docs/pages/io.rst000066400000000000000000000317001472312074000164700ustar00rootroot00000000000000IO == Mathematicians dream in pure functions. Each of them only relies on its arguments and always produces the same result for the same input. That's not how a useful program works. We need to rely on the environment and we need to do side effects. Furthermore, there are several types of ``IO`` in our programs: - Some ``IO`` never fails, like: getting current date and time, random number, or OS name - Some ``IO`` might fail, like: sending network requests, accessing filesystem, or database There's a solution. IO container ------------ Once you have an ``IO`` operation you can mark it appropriately. We can use a simple class :class:`returns.io.IO` to mark impure parts of the program that do not fail. .. code:: python >>> import random >>> from returns.io import IO >>> def get_random_number() -> IO[int]: ... return IO(random.randint(1, 10)) ... >>> assert isinstance(get_random_number(), IO) And later we can work inside this ``IO`` context and do not break into our pure part of the program: .. code:: python >>> assert get_random_number().map(lambda number: number / number) == IO(1.0) And it infects all other functions that call it. .. code:: python >>> def modify_number(number: int) -> IO[float]: ... return get_random_number().map(lambda rnd: number / rnd) ... >>> assert isinstance(modify_number(1), IO) It is good enough to indicate that you are aware of side effects of the function. IOResult -------- On the other hand, we can have ``IO`` parts of the program that do fail. Imagine we have this beautiful pure function: .. code:: python def can_book_seats( number_of_seats: int, reservation: 'Reservation', ) -> bool: return reservation.capacity >= number_of_seats + reservation.booked What's good about it? We can test it easily. Even without setting up any testing framework, simple `doctests `_ will be enough. This code is **beautiful**, because it is **simple**. We can later use its result to process the result of the booking request: .. code:: python def process_booking_result(is_successful: bool) -> 'ProcessID': ... process_booking_result(is_successful) # works just fine! At this point we don't have ``IO`` in our program. Impure functions ~~~~~~~~~~~~~~~~ But, imagine that our requirements had changed. And now we have to grab the number of already booked tickets from some other microservice and fetch the maximum capacity from the database: .. code:: python import requests import db def can_book_seats( number_of_seats: int, place_id: int, ) -> bool: capacity = db.get_place_capacity(place_id) # sql query booked = requests('https://partner.com/api').json()['booked'] # http req return capacity >= number_of_seats + booked Now testing this code will become a nightmare! It will require to setup: - real database and tables - fixture data - ``requests`` mocks for different outcomes - and the whole Universe! Our complexity has sky-rocketed! And the most annoying part is that all other functions that call ``can_book_seats`` now also have to do the same setup. It seams like ``IO`` is indelible mark (some people also call it "effect"). And at some point it time we will start to mix pure and impure code together. Let's not forget that all of these operations can fail too! Separating two worlds ~~~~~~~~~~~~~~~~~~~~~ Well, ``IO`` mark is indeed indelible and should be respected. And then impurity becomes explicit: .. code:: python import requests import db from returns.io import IOResultE def can_book_seats( number_of_seats: int, place_id: int, ) -> IOResultE[bool]: ... Now this function returns ``IOResultE[bool]`` instead of a regular ``bool``. It means, that it cannot be used where regular ``bool`` can be: .. code:: python def process_booking_result(is_successful: bool) -> 'ProcessID': ... is_successful: IOResultE[bool] = can_book_seats(number_of_seats, place_id) process_booking_result(is_successful) # Boom! # => Argument 1 has incompatible type "IOResultE[bool]"; expected "bool" See? It is now impossible for a pure function to use ``IOResultE[bool]``. It is impossible to unwrap or get a raw value from this container. Once it is marked as ``IO`` it will never return to the pure state (well, there's a hack actually: :func:`unsafe_perform_io `). Now we have to work inside the ``IO`` context: .. code:: python message_id: IOResultE['ProcessID'] = can_book_seats( number_of_seats, place_id, ).map( process_booking_result, ) Or it can be annotated to work with impure results: .. code:: python def process_booking_result( is_successful: IOResultE[bool], ) -> IOResultE['ProcessID']: ... is_successful: IOResult[bool] = can_book_seats(number_of_seats, place_id) process_booking_result(is_successful) # Works! Now, all our impurity is explicit. We can track it, we can fight it, we can design it better. By saying that, it is assumed that you have a functional core and imperative shell. Lifting ~~~~~~~ You can also lift regular functions into one that works with ``IO`` or ``IOResult`` on both ends. It really helps you with the composition! .. code:: python >>> from returns.io import IO >>> from returns.pointfree import map_ >>> def regular_function(arg: int) -> float: ... return arg / 2 # not an `IO` operation >>> container = IO(1) >>> # When we need to compose `regular_function` with `IO`, >>> # we have two ways of doing it: >>> io = container.map(regular_function) >>> assert io == IO(0.5) >>> # or, it is the same as: >>> io = map_(regular_function)(container) >>> assert io == IO(0.5) ``IOResult`` can lift both regular functions and ones that return ``Result``: .. code:: python >>> from returns.io import IOResult, IOSuccess >>> from returns.pointfree import map_ >>> def regular_function(arg: int) -> float: ... return arg / 2 # not an `IO` operation >>> container: IOResult[int, str] = IOSuccess(1) >>> # When we need to compose `regular_function` with `IOResult`, >>> # we have two ways of doing it: >>> io = container.map(regular_function) >>> assert io == IOSuccess(0.5) >>> # or, it is the same as: >>> io = map_(regular_function)(container) >>> assert io == IOSuccess(0.5) And ``Result`` based functions: .. code:: python >>> from returns.io import IOResult, IOSuccess >>> from returns.result import Result, Success, Failure >>> from returns.pointfree import bind_result >>> def regular_function(arg: int) -> Result[float, str]: ... if arg > 0: ... return Success(arg / 2) ... return Failure('zero') >>> assert bind_result(regular_function)( ... IOSuccess(1), ... ) == IOResult.from_result(regular_function(1)) Lifting is useful when using :func:`returns.pipeline.pipe` and other different declarative tools. Pattern Matching ---------------- ``IOResult`` values can be matched using the new feature of Python 3.10, `Structural Pattern Matching `_, see the example below: .. literalinclude:: ../../tests/test_examples/test_io/test_ioresult_container/test_ioresult_pattern_matching.py Aliases ------- There are several useful aliases for ``IOResult`` type with some common values: - :attr:`returns.io.IOResultE` is an alias for ``IOResult[... Exception]``, just use it when you want to work with ``IOResult`` containers that use exceptions as error type. It is named ``IOResultE`` because it is ``IOResultException`` and ``IOResultError`` at the same time. Decorators ---------- Limitations ~~~~~~~~~~~ Typing will only work correctly if :ref:`our mypy plugin ` is used. This happens due to `mypy issue `_. impure ~~~~~~ We also have this handy decorator to help you with the existing impure things in Python: .. code:: python from returns.io import impure name: IO[str] = impure(input)('What is your name?') You can also decorate your own functions with ``@impure`` for better readability and clearness: .. code:: python import random from returns.io import impure @impure def get_user() -> 'User': return random.randint(1, 5) impure_safe ~~~~~~~~~~~ Similar to ``impure`` and ``safe`` decorators. Once applied, it transforms the return type to be ``IOResultE``: .. code:: python from returns.io import IOResultE, impure_safe @impure_safe def http_get(path: str) -> 'Response': return requests.get(path) container: IOResultE['Response'] = http_get('/home') Use for impure operations that might fail. Helpers ------- Don't forget to check out :ref:`converters`. .. _unsafe_perform_io: unsafe_perform_io ~~~~~~~~~~~~~~~~~ Sometimes you really need to get the raw value from ``IO`` container. For example: .. code:: python def index_view(request, user_id): user: IO[User] = get_user(user_id) return render('index.html', {'user': user}) # ??? In this case your web-framework will not render your user correctly. Since it does not expect it to be wrapped inside ``IO`` containers. And we obviously cannot ``map`` or ``bind`` this function. What to do? Use :func:`unsafe_perform_io `: .. code:: python from returns.unsafe import unsafe_perform_io def index_view(request, user_id): user: IO[User] = get_user(user_id) return render('index.html', {'user': unsafe_perform_io(user)}) # Ok We need it as an escape and compatibility mechanism for our imperative shell. In other words: .. code:: python >>> from returns.unsafe import unsafe_perform_io >>> from returns.io import IO >>> assert unsafe_perform_io(IO('abc')) == 'abc' It is recommended to use `import-linter `_ to restrict imports from ``returns.unsafe`` expect the top-level modules. Inspired by Haskell's `unsafePerformIO `_ FAQ --- Why aren't IO lazy? ~~~~~~~~~~~~~~~~~~~ Please, note that our ``IO`` implementation is not lazy by design. This way when you mark something as ``@impure`` it will work as previously. The only thing that changes is the return type. Instead we offer to use :ref:`unsafe_perform_io` to work with ``IO`` and simulate laziness. But, you can always make your ``IO`` lazy: .. code:: python >>> from returns.io import IO >>> lazy = lambda: IO(1) >>> assert lazy() == IO(1) We have decided that it would be better and more familiar for Python devs. What is the difference between IO[T] and T? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ What kind of input parameter should my function accept ``IO[T]`` or simple ``T``? It really depends on your domain / context. If the value is pure, than use raw unwrapped values. If the value is fetched, input, received, selected, than use ``IO`` or ``IOResult`` container: first one for operations that never fail, second one for operations that might fail. Most web applications are just fully covered with ``IO``. Why can't we use IO[Result] instead of IOResult? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We actually can! But, it is harder to write. And ``IOResult`` is actually the very same thing as ``IO[Result]``, but has nicer API: .. code:: python x: IO[Result[int, str]] x.map(lambda io: io.map(lambda number: number + 1)) # Is the same as: y: IOResult[int, str] y.map(lambda number: number + 1) The second one looks better, doesn't it? How to create unit objects for IOResult? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *TLDR*: you need to use ``IOSuccess`` and ``IOFailure`` functions or ``IOResult.from_value`` and ``IOResult.from_failure`` methods: .. code:: python >>> from returns.io import IOResult, IOSuccess, IOFailure >>> first: IOResult[int, str] = IOSuccess(1) >>> second: IOResult[float, int] = IOFailure(1) >>> assert IOResult.from_value(1) == IOSuccess(1) >>> assert IOResult.from_failure(2) == IOFailure(2) You can also annotate your variables properly. Otherwise, ``mypy`` will treat ``IOSuccess(1)`` as ``IOSuccess[int, Any]``. You can narrow the type in advance. See :ref:`result-units` for more details. Further reading --------------- - `Functional core, imperative shell `_ - `Functional architecture is Ports and Adapters `_ - `IO effect in Scala `_ - `Getting started with fp-ts: IO `_ - `IOEither `_ - `Effect Tracking Is Commercially Worthless `_ API Reference ------------- .. autoclasstree:: returns.io :strict: .. automodule:: returns.io :members: .. automodule:: returns.unsafe :members: returns-0.24.0/docs/pages/maybe.rst000066400000000000000000000174001472312074000171570ustar00rootroot00000000000000.. _maybe: Maybe ===== The ``Maybe`` container is used when a series of computations could return ``None`` at any point. Maybe container --------------- ``Maybe`` consist of two types: ``Some`` and ``Nothing``. We have a convenient method to create different ``Maybe`` types based on just a single value: .. code:: python >>> from returns.maybe import Maybe >>> assert str(Maybe.from_optional(1)) == '' >>> assert str(Maybe.from_optional(None)) == '' We also have another method called ``.from_value`` that behaves a bit differently: .. code:: python >>> from returns.maybe import Maybe >>> assert str(Maybe.from_value(1)) == '' >>> assert str(Maybe.from_value(None)) == '' Usage ~~~~~ It might be very useful for complex operations like the following one: .. code:: python >>> from attr import dataclass >>> from typing import Optional >>> from returns.maybe import Maybe, Nothing >>> @dataclass ... class Address(object): ... street: Optional[str] >>> @dataclass ... class User(object): ... address: Optional[Address] >>> @dataclass ... class Order(object): ... user: Optional[User] >>> def get_street_address(order: Order) -> Maybe[str]: ... return Maybe.from_optional(order.user).bind_optional( ... lambda user: user.address, ... ).bind_optional( ... lambda address: address.street, ... ) >>> with_address = Order(User(Address('Some street'))) >>> empty_user = Order(None) >>> empty_address = Order(User(None)) >>> empty_street = Order(User(Address(None))) >>> str(get_street_address(with_address)) # all fields are not None '' >>> assert get_street_address(empty_user) == Nothing >>> assert get_street_address(empty_address) == Nothing >>> assert get_street_address(empty_street) == Nothing Optional type ~~~~~~~~~~~~~ One may ask: "How is that different to the ``Optional[]`` type?" That's a really good question! Consider the same code to get the street name without ``Maybe`` and using raw ``Optional`` values: .. code:: python order: Order # some existing Order instance street: Optional[str] = None if order.user is not None: if order.user.address is not None: street = order.user.address.street It looks way uglier and can grow even more uglier and complex when new logic will be introduced. Pattern Matching ---------------- ``Maybe`` values can be matched using the new feature of Python 3.10, `Structural Pattern Matching `_, see the example below: .. literalinclude:: ../../tests/test_examples/test_maybe/test_maybe_pattern_matching.py Decorators ---------- Limitations ~~~~~~~~~~~ Typing will only work correctly if :ref:`our mypy plugin ` is used. This happens due to `mypy issue `_. maybe ~~~~~ Sometimes we have to deal with functions that dears to return ``Optional`` values! We have to work with it the carefully and write ``if x is not None:`` everywhere. Luckily, we have your back! ``maybe`` function decorates any other function that returns ``Optional`` and converts it to return ``Maybe`` instead: .. code:: python >>> from typing import Optional >>> from returns.maybe import Maybe, Some, maybe >>> @maybe ... def number(num: int) -> Optional[int]: ... if num > 0: ... return num ... return None >>> result: Maybe[int] = number(1) >>> assert result == Some(1) FAQ --- How can I turn Maybe into Optional again? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When working with regular Python, you might need regular ``Optional[a]`` values. You can easily get one from your ``Maybe`` container at any point in time: .. code:: python >>> from returns.maybe import Maybe >>> assert Maybe.from_optional(1).value_or(None) == 1 >>> assert Maybe.from_optional(None).value_or(None) == None As you can see, revealed type of ``.value_or(None)`` is ``Optional[a]``. Use it a fallback. How to model absence of value vs presence of None value? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Let's say you have this ``dict``: ``values = {'a': 1, 'b': None}`` So, you can have two types of ``None`` here: - ``values.get('b')`` - ``values.get('c')`` But, they are different! The first has explicit ``None`` value, the second one has no given key and ``None`` is used as a default. You might need to know exactly which case you are dealing with. For example, in validation. So, the first thing to remember is that: .. code:: python >>> assert Some(None) != Nothing There's a special way to work with a type like this: .. code:: python >>> values = {'a': 1, 'b': None} >>> assert Maybe.from_value(values).map(lambda d: d.get('a')) == Some(1) >>> assert Maybe.from_value(values).map(lambda d: d.get('b')) == Some(None) In contrast, you can ignore both ``None`` values easily: .. code:: python >>> assert Maybe.from_value(values).bind_optional( ... lambda d: d.get('a'), ... ) == Some(1) >>> assert Maybe.from_value(values).bind_optional( ... lambda d: d.get('b'), ... ) == Nothing So, how to write a complete check for a value: both present and missing? .. code:: python >>> from typing import Optional, Dict, TypeVar >>> from returns.maybe import Maybe, Some, Nothing >>> _Key = TypeVar('_Key') >>> _Value = TypeVar('_Value') >>> def check_key( ... heystack: Dict[_Key, _Value], ... needle: _Key, ... ) -> Maybe[_Value]: ... if needle not in heystack: ... return Nothing ... return Maybe.from_value(heystack[needle]) # try with `.from_optional` >>> real_values = {'a': 1} >>> opt_values = {'a': 1, 'b': None} >>> assert check_key(real_values, 'a') == Some(1) >>> assert check_key(real_values, 'b') == Nothing >>> # Type revealed: returns.maybe.Maybe[builtins.int] >>> assert check_key(opt_values, 'a') == Some(1) >>> assert check_key(opt_values, 'b') == Some(None) >>> assert check_key(opt_values, 'c') == Nothing >>> # Type revealed: returns.maybe.Maybe[Union[builtins.int, None]] Choose wisely between ``.from_value`` and ``.map``, and ``.from_optional`` and ``.bind_optional``. They are similar, but do different things. Note that you can also use :meth:`returns.pipeline.is_successful` to check if the value is present. See the `original issue about Some(None) `_ for more details and the full history. Why there's no IOMaybe? ~~~~~~~~~~~~~~~~~~~~~~~ We do have ``IOResult``, but we don't have ``IOMaybe``. Why? Because when dealing with ``IO`` there are a lot of possible errors. And ``Maybe`` represents just ``None`` and the value. It is not useful for ``IO`` related tasks. So, use ``Result`` instead, which can represent what happened to your ``IO``. You can convert ``Maybe`` to ``Result`` and back again with special :ref:`converters`. Why Maybe does not have alt method? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Well, because ``Maybe`` only has a single failed value: ``Nothing`` and it cannot be altered. But, ``Maybe`` has :meth:`returns.maybe.Maybe.or_else_call` method to call a passed callback function with zero argument on failed container: .. code:: python >>> from returns.maybe import Some, Nothing >>> assert Some(1).or_else_call(lambda: 2) == 1 >>> assert Nothing.or_else_call(lambda: 2) == 2 This method is unique to ``Maybe`` container. Further reading --------------- - `Option Monads in Rust `_ - `Option overview in TypeScript `_ - `Maybe not - Rich Hickey `_ API Reference ------------- .. autoclasstree:: returns.maybe :strict: .. automodule:: returns.maybe :members: returns-0.24.0/docs/pages/methods.rst000066400000000000000000000053451472312074000175320ustar00rootroot00000000000000.. _methods: Methods ========= The following useful methods can be used to interact with interfaces. cond ---- .. note:: ``cond`` is also the name of a function in the :ref:`pointfree` module. Therefore we encourage to import the modules ``pointfree`` and ``methods`` directly instead of their functions. Reduce the boilerplate when choosing paths with ``DiverseFailableN``. Think of this method as a functional ``if`` alternative for successful or failed types. So, this code: .. code:: python >>> from returns.result import Failure, Result, Success >>> def is_numeric(string: str) -> Result[str, str]: ... if string.isnumeric(): ... return Success('It is a number') ... return Failure('It is not a number') Can be replaced with this: .. code:: python >>> from returns import methods >>> from returns.result import Failure, Result, Success >>> def is_numeric(string: str) -> Result[str, str]: ... return methods.cond( ... Result, ... string.isnumeric(), ... 'It is a number', ... 'It is not a number', ... ) >>> assert is_numeric('42') == Success('It is a number') >>> assert is_numeric('text') == Failure('It is not a number') Why is it helpful? Because ``cond`` can be easily added into a :ref:`pipelines` of functions. unwrap_or_failure ----------------- Unwraps either a successful or failed value. .. code:: python >>> from returns.io import IO, IOSuccess, IOFailure >>> from returns import methods >>> assert methods.unwrap_or_failure(IOSuccess(1)) == IO(1) >>> assert methods.unwrap_or_failure(IOFailure('a')) == IO('a') Useful when you have a ``ResultLike`` value with correctly handled error value, for example with :func:`~returns.pointfree.bimap.bimap`. Here's a full example: .. code:: python >>> from returns.result import Failure, Result, Success >>> from returns import methods, pointfree >>> instance: Result[int, str] = Success(1) >>> error_handled = pointfree.bimap(lambda inr: inr + 1, lambda _: 0)(instance) >>> assert isinstance(methods.unwrap_or_failure(error_handled), int) partition ~~~~~~~~~ :func:`partition ` is used to convert list of :class:`~returns.interfaces.Unwrappable` instances like :class:`~returns.result.Result`, :class:`~returns.io.IOResult`, and :class:`~returns.maybe.Maybe` to a tuple of two lists: successes and failures. .. code:: python >>> from returns.result import Failure, Success >>> from returns.methods import partition >>> results = [Success(1), Failure(2), Success(3), Failure(4)] >>> partition(results) ([1, 3], [2, 4]) API Reference ------------- .. autofunction:: returns.methods.cond .. autofunction:: returns.methods.unwrap_or_failure returns-0.24.0/docs/pages/pipeline.rst000066400000000000000000000201161472312074000176650ustar00rootroot00000000000000.. _pipelines: Pipelines ========= The main idea behind functional programming is functional composition. We provide several tools to make composition easy, readable, pythonic, and useful. .. note:: Make sure you are familiar with our :ref:`pointfree` tools, because pipelines and pointfree functions are best friends! flow ---- ``flow`` allows to easily compose multiple functions together into a pipeline. It is useful when you already have an instance to compose functions with. .. note:: ``flow`` is the recommended way to write your code with ``returns``! Let's see an example: .. code:: python >>> from returns.pipeline import flow >>> assert flow( ... [1, 2, 3], ... lambda collection: max(collection), ... lambda max_number: -max_number, ... ) == -3 This allows you to write declarative steps that should be performed on an existing value. .. note:: Technical note: ``flow`` has the best type inference mechanism among all other tools we provide here. This happens due to our :ref:`mypy plugins `. You can also use ``flow`` with pointfree functions and containers: .. code:: python >>> from returns.result import Result, Success, Failure >>> from returns.pointfree import bind >>> from returns.pipeline import flow >>> def regular_function(arg: int) -> float: ... return float(arg) >>> def returns_container(arg: float) -> Result[str, ValueError]: ... if arg != 0: ... return Success(str(arg)) ... return Failure(ValueError('Wrong arg')) >>> def also_returns_container(arg: str) -> Result[str, ValueError]: ... return Success(arg + '!') >>> assert flow( ... 1, # initial value ... regular_function, # composes easily ... returns_container, # also composes easily, but returns a container ... # So we need to `bind` the next function to allow it to consume ... # the container from the previous step. ... bind(also_returns_container), ... ) == Success('1.0!') >>> # And this will fail: >>> assert flow( ... 0, # initial value ... regular_function, # composes easily ... returns_container, # also composes easily, but returns a container ... # So we need to `bind` the next function to allow it to consume ... # the container from the previous step. ... bind(also_returns_container), ... ).failure().args == ('Wrong arg', ) And now let's get to know ``pipe``, it is very similar, but has different usage pattern. .. _pipe: pipe ---- ``pipe`` is an easy way to compose functions together. It is useful when you don't have an instance to compose functions with yet. .. note:: ``pipe`` requires to use our :ref:`mypy plugins `. Let's see an example. .. code:: python >>> from returns.pipeline import pipe >>> pipeline = pipe(str, lambda x: x + 'b', str.upper) >>> assert pipeline(1) == '1B' It might be later used with multiple values: .. code:: python >>> assert pipeline(2) == '2B' It also might be useful to compose containers together: .. code:: python >>> from returns.pipeline import pipe >>> from returns.result import Result, Success, Failure >>> from returns.pointfree import bind >>> def regular_function(arg: int) -> float: ... return float(arg) >>> def returns_container(arg: float) -> Result[str, ValueError]: ... if arg != 0: ... return Success(str(arg)) ... return Failure(ValueError('Wrong arg')) >>> def also_returns_container(arg: str) -> Result[str, ValueError]: ... return Success(arg + '!') >>> transaction = pipe( ... regular_function, # composes easily ... returns_container, # also composes easily, but returns a container ... # So we need to `bind` the next function to allow it to consume ... # the container from the previous step. ... bind(also_returns_container), ... ) >>> result = transaction(1) # running the pipeline >>> assert result == Success('1.0!') You might consider ``pipe()`` as :func:`returns.functions.compose` on steroids. The main difference is that ``compose`` takes strictly two arguments (or you might say that it has an arity of two), while ``pipe`` has infinite possible arguments. managed ------- A really common task is to work with something stateful, like database connections or files. First, you need to acquire some resource, then use it and do your thing, and clear things up and release the acquired resource. There are several rules here: 1. If the acquiring failed, then do nothing: do not try to use the resource or release it 2. If the resource is acquired, then try to use it and then release it despite of the usage result In other words, if you cannot open a file, then do nothing. If you opened it, then try to read it. And then always close it. Let's say you have to read a file's contents: .. code:: python >>> from typing import TextIO >>> from returns.pipeline import managed, is_successful >>> from returns.result import ResultE >>> from returns.io import IOResultE, impure_safe >>> def read_file(file_obj: TextIO) -> IOResultE[str]: ... return impure_safe(file_obj.read)() # this will be the final result >>> def close_file( ... file_obj: TextIO, ... file_contents: ResultE[str], ... ) -> IOResultE[None]: # sometimes might require to use `untap` ... return impure_safe(file_obj.close)() # this value will be dropped >>> managed_read = managed(read_file, close_file) >>> read_result = managed_read( ... impure_safe(lambda filename: open(filename, 'r'))('pyproject.toml'), ... ) >>> assert is_successful(read_result) # file content is inside `IOSuccess` And here's how we recommend to combine ``managed`` with other pipeline functions: .. code:: python >>> import tomlkit >>> from returns.pipeline import flow >>> from returns.pointfree import bind_result >>> from returns.result import safe >>> from returns.io import IOSuccess >>> @safe ... def parse_toml(file_contents: str) -> dict: ... return tomlkit.parse(file_contents) >>> @safe ... def get_project_name(parsed: dict) -> str: ... return parsed['tool']['poetry']['name'] >>> pipeline_result = flow( ... 'pyproject.toml', # filename we work with ... impure_safe(lambda filename: open(filename, 'r')), ... managed_read, ... bind_result(parse_toml), ... bind_result(get_project_name), ... ) >>> assert pipeline_result == IOSuccess('returns') Notice a few tricks here: 1. We use ``managed`` with and without ``flow`` here, both are fine! 2. We have created a ``managed_read`` managed function, so we don't need to specify it every time we want to read a file in a functional way 3. We are using impure and pure operations inside the pipeline: this helps us to understand how our app works. Which parts do access the file system and which just work However, you can still use the imperative approach with ``with:`` or ``try/finally`` wrapped into ``@impure_safe`` decorator, your choice! We don't recommend to mix these two. Stick to one you like the most. ``managed`` can be used with: - ``IOResult`` - ``FutureResult`` - ``RequiresContextIOResult`` - ``RequiresContextFutureResult`` is_successful ------------- :func:`is_successful ` is used to tell whether or not your result is a success. We treat only three types that do not throw as successful ones, basically: :func:`Success `, :func:`IOSuccess `, and :func:`Some ` .. code:: python >>> from returns.result import Success, Failure >>> from returns.pipeline import is_successful >>> assert is_successful(Success(1)) is True >>> assert is_successful(Failure('text')) is False Further reading --------------- - `fp-ts pipeable `_ - `ZIO Managed `_ API Reference ------------- .. autofunction:: returns.pipeline.flow .. autofunction:: returns.pipeline.pipe .. autofunction:: returns.pipeline.managed .. automodule:: returns.pipeline :members: returns-0.24.0/docs/pages/pointfree.rst000066400000000000000000000305271472312074000200620ustar00rootroot00000000000000.. _pointfree: Pointfree ========= This module provides a bunch of primitives to work with containers. It makes composing functions with containers easier. Sometimes using methods on containers is not very helpful. Container methods are difficult to compose with other functions or methods. Instead we can use functions that produce the same result but have the reverse semantics. Usually, this means changing something like ``x.f(y)`` to ``f(x)(y)``. Why would anyone need these functions when you can use methods? To create pipelines! .. code:: python from returns.pipeline import pipe from returns.result import ResultE def returns_result(arg: int) -> ResultE[int]: ... def works_with_result(arg: int) -> ResultE[int]: ... def finish_work(arg: int) -> ResultE[int]: ... pipe( returns_result, works_with_result, # does not compose! Needs a container for input finish_work, # does not compose either! ) Without pointfree functions you would probably have to write: .. code:: python returns_result().bind(works_with_result).bind(notifies_user) And you need a way to somehow do this in the pipeline syntax. Remember that pipeline syntax helps make composing functions more readable and pythonic. That's where pointfree functions become really useful. map_ ---- ``map_()`` is a pointfree alternative to the container method ``.map()``. It lifts a function to work from container to container. ``map_(f)`` would return f lifted to work on a container. In other words, it modifies the function's signature from: ``a -> b`` to: ``Container[a] -> Container[b]`` Doing this lets us compose regular functions and containers. .. code:: python >>> from returns import pointfree >>> from returns.maybe import Maybe, Some >>> def as_int(arg: str) -> int: ... return ord(arg) >>> container: Maybe[str] = Some('a') >>> # We now have two ways to compose container and as_int >>> # 1. Via ``.map()``: >>> assert container.map(as_int) == Some(97) >>> # 2. Or via ``map_()``, like above but in the reverse order: >>> assert pointfree.map_(as_int)(container) == Some(97) This means we can compose functions in a pipeline. .. code:: python >>> from returns import pointfree >>> from returns.pipeline import flow >>> from returns.maybe import Maybe, Some, Nothing >>> def index_of_7(arg: str) -> Maybe[int]: ... if '7' in arg: ... return Some(arg.index('7')) ... return Nothing >>> def double(num: int) -> int: ... return num * 2 >>> assert flow( ... '007', ... index_of_7, # Some(2) ... pointfree.map_(double), # Some(4) ... ) == Some(4) >>> # Still passes along Nothing >>> assert flow( ... '006', ... index_of_7, # Nothing ... pointfree.map_(double), # Nothing ... ) == Nothing bind ---- Pointfree ``bind()`` is an alternative to the container method ``.bind()``. It binds a function that returns a container so that is accepts the same container type as input. In other words, it modifies the function's signature from: ``a -> Container[b]`` to: ``Container[a] -> Container[b]`` Without ``bind()`` it would be very hard to declaratively compose two entities: 1. Existing containers 2. Existing functions that accept a regular value and return a container We can compose these entities with ``.bind()`` when calling it on a container, but how can we do it independently? .. code:: python >>> from returns import pointfree >>> from returns.maybe import Maybe, Some >>> def index_of_1(arg: str) -> Maybe[int]: ... if '1' in arg: ... return Some(arg.index('1')) ... return Nothing >>> container = Some('A1 Steak Sauce') >>> # We now have two way of composing these entities. >>> # 1. Via ``.bind``: >>> assert container.bind(index_of_1) == Some(1) >>> # 2. Or via the ``bind`` function. >>> assert pointfree.bind(index_of_1)(container) == Some(1) >>> # This produces the same result, but in a different order That's it! We also have a long list of other ``bind_*`` functions, like: - ``bind_io`` to bind functions returning ``IO`` container - ``bind_result`` to bind functions returning ``Result`` container - ``bind_ioresult`` to bind functions returning ``IOResult`` container - ``bind_future`` to bind functions returning ``Future`` container - ``bind_async_future`` to bind async functions returning ``Future`` container - ``bind_future_result`` to bind functions returning ``FutureResult`` container - ``bind_async_future_result`` to bind async functions returning ``FutureResult`` container - ``bind_context`` to bind functions returning ``RequiresContext`` container - ``bind_context_result`` to bind functions returning ``RequiresContextResult`` container - ``bind_context_ioresult`` to bind functions returning ``RequiresContextIOResult`` container - ``bind_async`` to bind async functions returning ``Future`` or ``FutureResult`` - ``bind_awaitable`` to bind async non-container functions alt ---- Pointfree ``alt()`` is an alternative to the container method ``.alt()``. It lifts a function to act on the error contents of a container. In other words, it modifies the function's signature from: ``a -> b`` to: ``Container[_, a] -> Container[_, b]`` You can think of it like ``map``, but for the second type of a container. .. code:: python >>> from returns.io import IOFailure, IOSuccess >>> from returns import pointfree >>> def half_as_bad(error_code: int) -> float: ... return error_code / 2 >>> # When acting on a successful state, nothing happens. >>> assert pointfree.alt(half_as_bad)(IOSuccess(1)) == IOSuccess(1) >>> # When acting on a failed state, the result changes >>> assert pointfree.alt(half_as_bad)(IOFailure(4)) == IOFailure(2.0) >>> # This is equivalent to IOFailure(4).alt(half_as_bad) >>> assert pointfree.alt(half_as_bad)(IOFailure(4)) == IOFailure(4).alt(half_as_bad) This inverse syntax lets us easily compose functions in a pipeline .. code:: python >>> from returns.io import IOFailure, IOSuccess, IOResult >>> from returns import pointfree >>> def always_errors(user_input: str) -> IOResult: ... return IOFailure(len(user_input)) >>> def twice_as_bad(exit_code: int) -> int: ... return exit_code * 2 >>> def make_error_message(exit_code: int) -> str: ... return 'Badness level: {0}'.format(exit_code) >>> assert flow( ... '12345', ... always_errors, ... pointfree.alt(twice_as_bad), ... pointfree.alt(make_error_message) ... ) == IOFailure('Badness level: 10') lash ---- Pointfree ``lash()`` function is an alternative to ``.lash()`` container method. It allows better composition by lifting a function that returns a container to act on the failed state of a container. You can think of it like ``bind``, but for the second type of a container. .. code:: python >>> from returns import pointfree >>> from returns.result import Success, Failure, Result >>> def always_succeeds(arg: str) -> Result[int, str]: ... return Success(1) >>> failed: Result[int, str] = Failure('a') >>> # We now have two way of composing these entities. >>> # 1. Via ``.lash``: >>> assert failed.lash(always_succeeds) == Success(1) >>> # 2. Or via ``lash`` function, the same but in the inverse way: >>> assert pointfree.lash(always_succeeds)(failed) == Success(1) apply ----- Pointfree ``apply`` function allows to use ``.apply()`` container method like a function: .. code:: python >>> from returns import pointfree >>> from returns.maybe import Some, Nothing >>> def wow(arg: int) -> str: ... return chr(arg) + '!' >>> assert pointfree.apply(Some(wow))(Some(97)) == Some('a!') >>> assert pointfree.apply(Some(wow))(Some(98)) == Some('b!') >>> assert pointfree.apply(Some(wow))(Nothing) == Nothing >>> assert pointfree.apply(Nothing)(Nothing) == Nothing If you wish to use ``apply`` inside a pipeline here's how it might look: .. code:: python >>> from returns import pointfree >>> from returns.pipeline import flow >>> from returns.maybe import Some, Nothing, Maybe >>> from typing import Callable >>> def wow(arg: int) -> str: ... return chr(arg) + '!' >>> def my_response(is_excited: bool) -> Maybe[Callable[[int], str]]: ... if is_excited: ... return Some(wow) ... return Nothing >>> assert flow( ... Some(97), ... pointfree.apply(my_response(True)), ... ) == Some('a!') >>> assert flow( ... Nothing, ... pointfree.apply(my_response(False)), ... ) == Nothing Or with a function as the first parameter: .. code:: python >>> from returns.pipeline import flow >>> from returns.curry import curry >>> from returns.maybe import Some >>> @curry ... def add_curried(first: int, second: int) -> int: ... return first + second >>> assert flow( ... Some(add_curried), ... Some(2).apply, ... Some(3).apply, ... ) == Some(5) compose_result -------------- Sometimes we need to manipulate the inner ``Result`` of some containers like ``IOResult`` or ``FutureResult``. With ``compose_result`` we can do this kind of manipulation. .. code:: python >>> from returns import pointfree >>> from returns.io import IOResult, IOSuccess, IOFailure >>> from returns.result import Result >>> def cast_to_str(container: Result[float, int]) -> IOResult[str, int]: ... return IOResult.from_result(container.map(str)) >>> assert pointfree.compose_result(cast_to_str)(IOSuccess(42.0)) == IOSuccess('42.0') >>> assert pointfree.compose_result(cast_to_str)(IOFailure(1)) == IOFailure(1) cond ---- .. note:: ``cond`` is also the name of a function in the :ref:`methods` module. Therefore we encourage to import the modules ``pointfree`` and ``methods`` directly instead of their functions. Sometimes we need to create ``SingleFailableN`` or ``DiverseFailableN`` containers (e.g. ``Maybe``, ``ResultLikeN``) based on a boolean expression, ``cond`` can help us. Consider ``cond`` to be a functional ``if``. See the example below: .. code:: python >>> from returns.pipeline import flow >>> from returns import pointfree >>> from returns.result import Result, Failure, Success >>> def returns_boolean(arg: int) -> bool: ... return bool(arg) >>> assert flow( ... returns_boolean(1), ... pointfree.cond(Result, 'success', 'failure') ... ) == Success('success') >>> assert flow( ... returns_boolean(0), ... pointfree.cond(Result, 'success', 'failure') ... ) == Failure('failure') Example using ``cond`` with the ``Maybe`` container: .. code:: python >>> from returns.pipeline import flow >>> from returns import pointfree >>> from returns.maybe import Maybe, Some, Nothing >>> assert flow( ... returns_boolean(1), ... pointfree.cond(Maybe, 'success') ... ) == Some('success') >>> assert flow( ... returns_boolean(0), ... pointfree.cond(Maybe, 'success') ... ) == Nothing Further reading --------------- - `Tacit programming or point-free style `_ - `Pointfree in Haskell `_ API Reference ------------- .. autofunction:: returns.pointfree.map_ .. autofunction:: returns.pointfree.bind .. autofunction:: returns.pointfree.bind_result .. autofunction:: returns.pointfree.bind_io .. autofunction:: returns.pointfree.bind_ioresult .. autofunction:: returns.pointfree.bind_future .. autofunction:: returns.pointfree.bind_async_future .. autofunction:: returns.pointfree.bind_future_result .. autofunction:: returns.pointfree.bind_async_future_result .. autofunction:: returns.pointfree.bind_context2 .. autofunction:: returns.pointfree.bind_context3 .. autofunction:: returns.pointfree.bind_context .. autofunction:: returns.pointfree.modify_env2 .. autofunction:: returns.pointfree.modify_env3 .. autofunction:: returns.pointfree.modify_env .. autofunction:: returns.pointfree.bind_context_result .. autofunction:: returns.pointfree.bind_context_ioresult .. autofunction:: returns.pointfree.bind_async .. autofunction:: returns.pointfree.bind_awaitable .. autofunction:: returns.pointfree.bind_optional .. autofunction:: returns.pointfree.compose_result .. autofunction:: returns.pointfree.cond .. autofunction:: returns.pointfree.alt .. autofunction:: returns.pointfree.lash .. autofunction:: returns.pointfree.unify .. autofunction:: returns.pointfree.apply returns-0.24.0/docs/pages/quickstart.rst000066400000000000000000000060541472312074000202570ustar00rootroot00000000000000Quickstart ========== Starting is really fast! You can integrate ``returns`` into any project at any stage. You can use it fully or partially. With or without types. ``returns`` is a very flexible library! You can even just start using it without any deep theory around this project. But, you can always address our learning materials which will unravel all parts of functional programming with useful examples and simple terms. Why --- One of the most frequent questions Python developers ask: why would we need this? Basically, the answer is that ``returns`` provides useful abstractions that solve some problems every developer has: 1. :class:`~returns.maybe.Maybe` helps to work with ``None`` in a type-safe way 2. :class:`~returns.result.Result` helps to work with exceptions in a type-safe way 3. :class:`~returns.io.IO` helps to separate pure code from impure code to make your architecture better 4. :class:`~returns.future.Future` helps to write ``await`` free code 5. :class:`~returns.context.requires_context.RequiresContext` helps to inject dependencies in a very readable, explicit, type-safe, and clean way 6. :ref:`pipelines` can be used independently or together with the types above to create complex, declarative, and type-safe data pipelines On top of that we provide useful interfaces that allows you to switch implementation on the fly. For example, you can write code that works the same way for sync and async execution flows. While being fully type-safe at the same time. And you can write your own primitives that will solve any other problem you can possible have based on our existing or your custom interfaces. In other words, ``returns`` unlocks insane powers of typed-functional programming to a regular Python developer. Installation ------------ ``returns`` is a pure Python library. Install it as usual: .. code:: bash pip install returns # or better use poetry Typechecking and other integrations ----------------------------------- This step is optional. If you use ``mypy`` for type-checking, than you will need to configure it. We really recommend using ``mypy`` with this project though. Because we have put a lot of efforts into the typing part. Check out our docs on :ref:`mypy `. We also have built-in integrations with :ref:`pytest ` and :ref:`hypothesis `. Also, there is :ref:`developer tooling ` you might enjoy. Theory ------ Do you want to learn new awesome concepts? Then, start reading our "Userguide"! It has everything you need! Reading order matters. However, this is optional. You can still use ``returns`` without a deep-dive into theory. Building your own stuff ----------------------- You can extend ``returns`` and build your own stuff! Particularly, you can add new interfaces, new containers, and new integrations. See :ref:`this guide `. |Telegram chat| .. |Telegram chat| image:: https://img.shields.io/badge/chat-join-blue?logo=telegram :target: https://t.me/drypython Join our chat to get help or advice. returns-0.24.0/docs/pages/railway.rst000066400000000000000000000137201472312074000175330ustar00rootroot00000000000000.. _railway: Railway oriented programming ============================ Containers can serve many different purposes (while still serving the main one: composition) for example, some of them (:class:`~returns.result.Result` and :class:`~returns.maybe.Maybe`) are used to work with different types of errors starting with ``NullPointerException`` to arbitrary user-defined ones. Error handling -------------- When talking about error handling we use a concept of `Railway oriented programming `_. It means that flow of our program has two tracks: 1. Successful one: where everything goes perfectly: HTTP requests work, database is always serving us data, parsing values does not fail 2. Failed one: where something went wrong We can switch from track to track: we can fail something or we can fix the situation. .. mermaid:: :caption: Railway oriented programming. graph LR S1 -- bind --> S3 S1 -- bind --> F2 S3 -- map --> S5 S5 -- bind --> S7 S5 -- bind --> F6 F2 -- alt --> F4 F4 -- lash --> F6 F4 -- lash --> S5 F6 -- lash --> F8 F6 -- lash --> S7 style S1 fill:green style S3 fill:green style S5 fill:green style S7 fill:green style F2 fill:red style F4 fill:red style F6 fill:red style F8 fill:red Returning execution to the right track ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We also support two special methods to work with "failed" values: - :func:`returns.interfaces.altable.AltableN.alt` transforms error to another error that works only when container is in failed state, is the opposite of :func:`returns.interfaces.mappable.MappableN.map` method - :func:`returns.interfaces.lashable.LashableN.lash` is the opposite of :func:`returns.interfaces.bindable.BindableN.bind` method that works only when container is in failed state Let's start from the first one: ``alt`` method allows to change your error type. .. mermaid:: :caption: Illustration of ``alt`` method. graph LR F1["Container[A]"] -- "alt(function)" --> F2["Container[B]"] style F1 fill:red style F2 fill:red .. code:: python >>> from returns.result import Failure >>> assert Failure(1).alt(str) == Failure('1') The second method is ``lash``. It is a bit different. We pass a function that returns another container to it. :func:`returns.interfaces.lashable.LashableN.lash` is used to literally bind two different containers together. It can also lash your flow and get on the successful track again: .. mermaid:: :caption: Illustration of ``lash`` method. graph LR F1["Container[A]"] -- "lash(function)" --> F2["Container[B]"] F1["Container[A]"] -- "lash(function)" --> F3["Container[C]"] style F1 fill:red style F2 fill:green style F3 fill:red .. code:: python >>> from returns.result import Result, Failure, Success >>> def tolerate_exception(state: Exception) -> Result[int, Exception]: ... if isinstance(state, ZeroDivisionError): ... return Success(0) ... return Failure(state) >>> value: Result[int, Exception] = Failure(ZeroDivisionError()) >>> result: Result[int, Exception] = value.lash(tolerate_exception) >>> assert result == Success(0) >>> value2: Result[int, Exception] = Failure(ValueError()) >>> result2: Result[int, Exception] = value2.lash(tolerate_exception) >>> # => Failure(ValueError()) From typing perspective ``.alt`` and ``.lash`` are exactly the same as ``.map`` and ``.bind`` but only work with the second type argument instead of the first one: .. code:: python from returns.result import Result first: Result[int, int] second: Result[int, int] reveal_type(first.map(str)) # => Result[str, int] reveal_type(second.alt(str)) # => Result[int, str] .. note:: Not all containers support these methods, only containers that implement :class:`returns.interfaces.lashable.LashableN` and :class:`returns.interfaces.altable.AltableN` For example, :class:`~returns.io.IO` based containers and :class:`~returns.context.requires_context.RequiresContext` cannot be alted or lashed. Unwrapping values ----------------- And we have two more functions to unwrap inner state of containers into a regular types: - :func:`.unwrap ` returns a value if it is possible, raises :class:`returns.primitives.exceptions.UnwrapFailedError` otherwise .. code:: python >>> from returns.result import Failure, Success >>> from returns.maybe import Some, Nothing >>> assert Success(1).value_or(None) == 1 >>> assert Some(0).unwrap() == 0 .. code:: pycon >>> Failure(1).unwrap() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError >>> Nothing.unwrap() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError For failing containers you can use :meth:`returns.interfaces.unwrappable.Unwrapable.failure` to unwrap the failed state: .. code:: pycon >>> assert Failure(1).failure() == 1 >>> Success(1).failure() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError Be careful, since this method will raise an exception when you try to ``.failure()`` a successful container. .. note:: Not all containers support these methods, only containers that implement :class:`returns.interfaces.unwrappable.Unwrappable`. For example, :class:`~returns.io.IO` based containers and :class:`~returns.context.requires_context.RequiresContext` cannot be unwrapped. .. note:: Some containers also have ``.value_or()`` helper method. Example: .. code:: python >>> from returns.result import Success, Failure >>> assert Success(1).value_or(None) == 1 >>> assert Failure(1).value_or(None) is None Further reading --------------- - `Railway oriented programming in F# `_ - `Against Railway-Oriented Programming `_ returns-0.24.0/docs/pages/result.rst000066400000000000000000000165141472312074000174050ustar00rootroot00000000000000.. _result: Result ====== Make sure to get familiar with :ref:`Railway oriented programming `. ``Result`` is obviously a result of some series of computations. It might succeed with some resulting value. Or it might return an error with some extra details. ``Result`` consist of two types: ``Success`` and ``Failure``. ``Success`` represents successful operation result and ``Failure`` indicates that something has failed. .. code:: python from returns.result import Result, Success, Failure def find_user(user_id: int) -> Result['User', str]: user = User.objects.filter(id=user_id) if user.exists(): return Success(user[0]) return Failure('User was not found') user_search_result = find_user(1) # => Success(User{id: 1, ...}) user_search_result = find_user(0) # id 0 does not exist! # => Failure('User was not found') When is it useful? When you do not want to use exceptions to break your execution scope. Or when you do not want to use ``None`` to represent empty values, since it will raise ``TypeError`` somewhere and other ``None`` exception-friends. Composition ----------- Make sure to check out how to compose container with ``flow`` or :ref:`pipe`! Read more about them if you want to compose your containers easily. Pattern Matching ---------------- ``Result`` values can be matched using the new feature of Python 3.10, `Structural Pattern Matching `_, see the example below: .. literalinclude:: ../../tests/test_examples/test_result/test_result_pattern_matching.py Aliases ------- There are several useful aliases for ``Result`` type with some common values: - :attr:`returns.result.ResultE` is an alias for ``Result[... Exception]``, just use it when you want to work with ``Result`` containers that use exceptions as error type. It is named ``ResultE`` because it is ``ResultException`` and ``ResultError`` at the same time. Decorators ---------- Limitations ~~~~~~~~~~~ Typing will only work correctly if :ref:`our mypy plugin ` is used. This happens due to `mypy issue `_. safe ~~~~ :func:`safe ` is used to convert regular functions that can throw exceptions to functions that return :class:`Result ` type. Supports only regular functions. If you need to mark ``async`` functions as ``safe``, use :func:`future_safe ` instead. .. code:: python >>> from returns.result import Success, safe >>> @safe # Will convert type to: Callable[[int], Result[float, Exception]] ... def divide(number: int) -> float: ... return number / number >>> assert divide(1) == Success(1.0) >>> str(divide(0)) '' If you want to `safe` handle only a set of exceptions: .. code:: python >>> @safe(exceptions=(ZeroDivisionError,)) # Other exceptions will be raised ... def divide(number: int) -> float: ... if number > 10: ... raise ValueError('Too big') ... return number / number >>> assert divide(5) == Success(1.0) >>> assert divide(0).failure() >>> divide(15) Traceback (most recent call last): ... ValueError: Too big attempt ~~~~~~~ Similar to :func:`safe ` function but instead of wrapping the exception error in a :class:`Failure ` container it'll wrap the argument that lead to that exception. .. code:: python >>> from returns.result import Failure, Success, attempt >>> @attempt ... def divide_itself(number: int) -> float: ... return number / number >>> assert divide_itself(2) == Success(1.0) >>> assert divide_itself(0) == Failure(0) .. warning:: This decorator works only with functions that has just one argument. FAQ --- .. _result-units: How to create unit objects? ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Use ``Success`` or ``Failure``. Alternatively :meth:`returns.result.Result.from_value` or :meth:`returns.result.Result.from_failure`. It might be a good idea to use unit functions together with the explicit annotation. Python's type system does not allow us to do much, so this is required: .. code:: python >>> from returns.result import Result, Success >>> def callback(arg: int) -> Result[float, int]: ... return Success(float(arg)) >>> first: Result[int, int] = Success(1) >>> assert first.bind(callback) == Success(1.0) Otherwise ``first`` will have ``Result[int, Any]`` type. Which is okay in some situations. How to compose error types? ~~~~~~~~~~~~~~~~~~~~~~~~~~~ You might want to sometimes use ``unify`` :ref:`pointfree` functions instead of ``.bind`` to compose error types together. While ``.bind`` enforces error type to stay the same, ``unify`` is designed to return a ``Union`` of a previous error type and a new one. It gives an extra flexibility, but also provokes more thinking and can be problematic in some cases. Like so: .. code:: python >>> from returns.result import Result, Success, Failure >>> from returns.pointfree import unify >>> def div(number: int) -> Result[float, ZeroDivisionError]: ... if number: ... return Success(1 / number) ... return Failure(ZeroDivisionError('division by zero')) >>> container: Result[int, ValueError] = Success(1) >>> assert unify(div)(container) == Success(1.0) >>> # => Revealed type is: >>> # Result[float, Union[ValueError, ZeroDivisionError]] So, that's a way to go, if you need this composition. map vs bind ~~~~~~~~~~~ We use the ``map`` method when we're working with pure functions, a function is pure if it doesn't produce any side-effect (e.g. Exceptions). On the other hand, we use the ``bind`` method if a function returns a ``Result`` instance which translates its potential side-effect into a raw value. See the example below: .. code:: python >>> import json >>> from typing import Dict >>> from returns.result import Failure, Result, Success, safe >>> # `cast_to_bool` doesn't produce any side-effect >>> def cast_to_bool(arg: int) -> bool: ... return bool(arg) >>> # `parse_json` can produce Exceptions, so we use the `safe` decorator >>> # to prevent any kind of exceptions >>> @safe ... def parse_json(arg: str) -> Dict[str, str]: ... return json.loads(arg) >>> assert Success(1).map(cast_to_bool) == Success(True) >>> assert Success('{"example": "example"}').bind(parse_json) == Success({"example": "example"}) >>> assert Success('').bind(parse_json).alt(str) == Failure('Expecting value: line 1 column 1 (char 0)') How to check if your result is a success or failure? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``Result`` is a container and you can use :meth:`returns.pipeline.is_successful` like so: .. code:: python >>> from returns.result import Success, Failure >>> from returns.pipeline import is_successful >>> assert is_successful(Success(1)) is True >>> assert is_successful(Failure('text')) is False Further reading --------------- - `Railway Oriented Programming `_ - `Recoverable Errors with Result in Rust `_ - `Either overview in TypeScript `_ API Reference ------------- .. autoclasstree:: returns.result :strict: .. automodule:: returns.result :members: returns-0.24.0/docs/pages/trampolines.rst000066400000000000000000000023751472312074000204240ustar00rootroot00000000000000.. _trampolines: Trampolines =========== Python does not support TCO (tail call optimization), so any recursion-based algorithms become dangerous. We cannot be sure that they won't cause ``RecursionError`` on deeply nested data. Here's why we need trampolines: they allow to replicate tail call optimization by wrapping function calls into :class:`returns.trampolines.Trampoline` objects, making recursion-based function *always* safe. Example: .. code:: python >>> from typing import Union, List >>> from returns.trampolines import Trampoline, trampoline >>> @trampoline ... def accumulate( ... numbers: List[int], ... acc: int = 0, ... ) -> Union[int, Trampoline[int]]: ... if not numbers: ... return acc ... number = number = numbers.pop() ... return Trampoline(accumulate, numbers, acc + number) >>> assert accumulate([1, 2]) == 3 >>> assert accumulate([1, 2, 3]) == 6 The following function is still fully type-safe: - ``Trampoline`` object uses ``ParamSpec`` to be sure that passed arguments are correct - Final return type of the function is narrowed to contain only an original type (without ``Trampoline`` implementation detail) API Reference ------------- .. automodule:: returns.trampolines :members: returns-0.24.0/docs/pages/types.rst000066400000000000000000000055161472312074000172330ustar00rootroot00000000000000.. _primitive-types: Primitive types =============== We have several utility types that we use for our containers, that can also help end users as well. Fold ---- You can use all power of declarative loops in your app with ``Fold``. .. code:: python >>> from returns.iterables import Fold >>> from returns.io import IO >>> items = [IO(1), IO(2), IO(3)] >>> assert Fold.loop( ... items, ... IO(''), ... lambda num: lambda text: text + str(num), ... ) == IO('123') There are also other helpful methods as well. See :class:`returns.iterables.AbstractFold`. We also ship :class:`~returns.iterables.AbstractFold`, where you can change how ``loop`` (or any other) method works. For example, for performance reasons. Let's say you have a big number of :class:`~returns.context.requires_context.RequiresContext` instances and you want to do the same thing string concatenation we have shown above. You might face recursion problems with it: .. code:: python >>> import sys >>> from returns.context import Reader >>> from returns.iterables import Fold >>> items = [Reader.from_value(num) for num in range(sys.getrecursionlimit())] >>> Fold.loop(items, Reader.from_value(0), lambda x: lambda y: x + y)(...) Traceback (most recent call last): ... RecursionError: ... So, let's change how it works for this specific type: .. code:: python >>> from returns.iterables import AbstractFold >>> class ContextAwareFold(AbstractFold): ... @classmethod ... def _loop(cls, iterable, acc, function, concat, deps=None): ... wrapped = acc.from_value(function) ... for current in iterable: ... assert isinstance(current, Reader) ... acc = Reader.from_value(concat(current, acc, wrapped)(deps)) ... return acc .. note:: Don't forget to add typing annotations to your real code! This is just an example. And now let's test that it works without recursion: .. code:: python >>> items = [Reader.from_value(num) for num in range(sys.getrecursionlimit())] >>> assert ContextAwareFold.loop( ... items, Reader.from_value(0), lambda x: lambda y: x + y, ... )(...) == sum(range(sys.getrecursionlimit())) And no error will be produced! We now don't use recursion inside. Consider this way of doing things as a respected hack. Immutable --------- This class is useful when you need to make some instances immutable (like :ref:`our containers are immutable `). API Reference ------------- Iterables ~~~~~~~~~ .. autoclasstree:: returns.iterables :strict: .. automodule:: returns.iterables :members: Types ~~~~~ .. autoclasstree:: returns.primitives.types .. automodule:: returns.primitives.types :members: Exceptions ~~~~~~~~~~ .. autoclasstree:: returns.primitives.exceptions .. automodule:: returns.primitives.exceptions :members: returns-0.24.0/poetry.lock000066400000000000000000005242641472312074000155100ustar00rootroot00000000000000# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.16" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] name = "anyio" version = "4.6.2.post1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] [[package]] name = "astor" version = "0.8.1" description = "Read/rewrite/write Python ASTs" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"}, {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, ] [[package]] name = "attrs" version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "autorepr" version = "0.3.0" description = "Makes civilized __repr__, __str__, and __unicode__ methods" optional = false python-versions = "*" files = [ {file = "autorepr-0.3.0-py2-none-any.whl", hash = "sha256:c34567e4073630feb52d9c788fc198085e9e9de4817e3b93b7c4c534fc689f11"}, {file = "autorepr-0.3.0-py2.py3-none-any.whl", hash = "sha256:1d9010d14fb325d3961e3aa73692685563f97d6ba4a2f0f735329fb37422599c"}, {file = "autorepr-0.3.0.tar.gz", hash = "sha256:ef770b84793d5433e6bb893054973b8c7ce6b487274f9c3f734f678cae11e85e"}, ] [[package]] name = "babel" version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bandit" version = "1.8.0" description = "Security oriented static analyser for python code." optional = false python-versions = ">=3.9" files = [ {file = "bandit-1.8.0-py3-none-any.whl", hash = "sha256:b1a61d829c0968aed625381e426aa378904b996529d048f8d908fa28f6b13e38"}, {file = "bandit-1.8.0.tar.gz", hash = "sha256:b5bfe55a095abd9fe20099178a7c6c060f844bfd4fe4c76d28e35e4c52b9d31e"}, ] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} PyYAML = ">=5.3.1" rich = "*" stevedore = ">=1.20.0" [package.extras] baseline = ["GitPython (>=3.1.30)"] sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] toml = ["tomli (>=1.1.0)"] yaml = ["PyYAML"] [[package]] name = "beautifulsoup4" version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] cchardet = ["cchardet"] chardet = ["chardet"] charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "cattrs" version = "24.1.2" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.8" files = [ {file = "cattrs-24.1.2-py3-none-any.whl", hash = "sha256:67c7495b760168d931a10233f979b28dc04daf853b30752246f4f8471c6d68d0"}, {file = "cattrs-24.1.2.tar.gz", hash = "sha256:8028cfe1ff5382df59dd36474a86e02d817b06eaf8af84555441bac915d2ef85"}, ] [package.dependencies] attrs = ">=23.1.0" exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} [package.extras] bson = ["pymongo (>=4.4.0)"] cbor2 = ["cbor2 (>=5.4.6)"] msgpack = ["msgpack (>=1.0.5)"] msgspec = ["msgspec (>=0.18.5)"] orjson = ["orjson (>=3.9.2)"] pyyaml = ["pyyaml (>=6.0)"] tomlkit = ["tomlkit (>=0.11.8)"] ujson = ["ujson (>=5.7.0)"] [[package]] name = "certifi" version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] pycparser = "*" [[package]] name = "charset-normalizer" version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "codespell" version = "2.3.0" description = "Codespell" optional = false python-versions = ">=3.8" files = [ {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"}, {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"}, ] [package.extras] dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] hard-encoding-detection = ["chardet"] toml = ["tomli"] types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "configupdater" version = "3.2" description = "Parser like ConfigParser but for updating configuration files" optional = false python-versions = ">=3.6" files = [ {file = "ConfigUpdater-3.2-py2.py3-none-any.whl", hash = "sha256:0f65a041627d7693840b4dd743581db4c441c97195298a29d075f91b79539df2"}, {file = "ConfigUpdater-3.2.tar.gz", hash = "sha256:9fdac53831c1b062929bf398b649b87ca30e7f1a735f3fbf482072804106306b"}, ] [package.extras] testing = ["flake8", "pytest", "pytest-cov", "pytest-randomly", "pytest-xdist", "sphinx"] [[package]] name = "coverage" version = "7.6.8" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"}, {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"}, {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"}, {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"}, {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"}, {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"}, {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"}, {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"}, {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"}, {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"}, {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"}, {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"}, {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"}, {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"}, {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"}, {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"}, {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"}, {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"}, {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"}, {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"}, {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"}, {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"}, {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"}, {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"}, {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"}, {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"}, {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"}, {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"}, {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"}, {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"}, {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"}, {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"}, {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"}, {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"}, {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"}, {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"}, {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"}, {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"}, {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"}, {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"}, {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"}, {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"}, {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"}, {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"}, {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"}, {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"}, {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"}, {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"}, {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"}, {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"}, {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"}, {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"}, {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"}, {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"}, {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"}, {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"}, {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"}, {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"}, {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"}, {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"}, {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"}, {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "darglint" version = "1.8.1" description = "A utility for ensuring Google-style docstrings stay up to date with the source code." optional = false python-versions = ">=3.6,<4.0" files = [ {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, ] [[package]] name = "decorator" version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] [[package]] name = "dictdiffer" version = "0.9.0" description = "Dictdiffer is a library that helps you to diff and patch dictionaries." optional = false python-versions = "*" files = [ {file = "dictdiffer-0.9.0-py2.py3-none-any.whl", hash = "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595"}, {file = "dictdiffer-0.9.0.tar.gz", hash = "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578"}, ] [package.extras] all = ["Sphinx (>=3)", "check-manifest (>=0.42)", "mock (>=1.3.0)", "numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)", "pytest (==5.4.3)", "pytest (>=6)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2)", "pytest-pydocstyle (>=2.2.0)", "sphinx (>=3)", "sphinx-rtd-theme (>=0.2)", "tox (>=3.7.0)"] docs = ["Sphinx (>=3)", "sphinx-rtd-theme (>=0.2)"] numpy = ["numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)"] tests = ["check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest (==5.4.3)", "pytest (>=6)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2)", "pytest-pydocstyle (>=2.2.0)", "sphinx (>=3)", "tox (>=3.7.0)"] [[package]] name = "docutils" version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] name = "dpath" version = "2.2.0" description = "Filesystem-like pathing and searching for dictionaries" optional = false python-versions = ">=3.7" files = [ {file = "dpath-2.2.0-py3-none-any.whl", hash = "sha256:b330a375ded0a0d2ed404440f6c6a715deae5313af40bbb01c8a41d891900576"}, {file = "dpath-2.2.0.tar.gz", hash = "sha256:34f7e630dc55ea3f219e555726f5da4b4b25f2200319c8e6902c394258dd6a3e"}, ] [[package]] name = "eradicate" version = "2.3.0" description = "Removes commented-out code." optional = false python-versions = "*" files = [ {file = "eradicate-2.3.0-py3-none-any.whl", hash = "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e"}, {file = "eradicate-2.3.0.tar.gz", hash = "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37"}, ] [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "flake8" version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "flake8-bandit" version = "4.1.1" description = "Automated security testing with bandit and flake8." optional = false python-versions = ">=3.6" files = [ {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, ] [package.dependencies] bandit = ">=1.7.3" flake8 = ">=5.0.0" [[package]] name = "flake8-broken-line" version = "1.0.0" description = "Flake8 plugin to forbid backslashes for line breaks" optional = false python-versions = ">=3.8,<4.0" files = [ {file = "flake8_broken_line-1.0.0-py3-none-any.whl", hash = "sha256:96c964336024a5030dc536a9f6fb02aa679e2d2a6b35b80a558b5136c35832a9"}, {file = "flake8_broken_line-1.0.0.tar.gz", hash = "sha256:e2c6a17f8d9a129e99c1320fce89b33843e2963871025c4c2bb7b8b8d8732a85"}, ] [package.dependencies] flake8 = ">5" [[package]] name = "flake8-bugbear" version = "24.10.31" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." optional = false python-versions = ">=3.8.1" files = [ {file = "flake8_bugbear-24.10.31-py3-none-any.whl", hash = "sha256:cccf786ccf9b2e1052b1ecfa80fb8f80832d0880425bcbd4cd45d3c8128c2683"}, {file = "flake8_bugbear-24.10.31.tar.gz", hash = "sha256:435b531c72b27f8eff8d990419697956b9fd25c6463c5ba98b3991591de439db"}, ] [package.dependencies] attrs = ">=22.2.0" flake8 = ">=6.0.0" [package.extras] dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] [[package]] name = "flake8-commas" version = "2.1.0" description = "Flake8 lint for trailing commas." optional = false python-versions = "*" files = [ {file = "flake8-commas-2.1.0.tar.gz", hash = "sha256:940441ab8ee544df564ae3b3f49f20462d75d5c7cac2463e0b27436e2050f263"}, {file = "flake8_commas-2.1.0-py2.py3-none-any.whl", hash = "sha256:ebb96c31e01d0ef1d0685a21f3f0e2f8153a0381430e748bf0bbbb5d5b453d54"}, ] [package.dependencies] flake8 = ">=2" [[package]] name = "flake8-comprehensions" version = "3.16.0" description = "A flake8 plugin to help you write better list/set/dict comprehensions." optional = false python-versions = ">=3.9" files = [ {file = "flake8_comprehensions-3.16.0-py3-none-any.whl", hash = "sha256:7c1eadc9d22e765f39857798febe7766b4d9c519793c6c149e3e13bf99693f70"}, {file = "flake8_comprehensions-3.16.0.tar.gz", hash = "sha256:9cbf789905a8f03f9d350fb82b17b264d9a16c7ce3542b2a7b871ef568cafabe"}, ] [package.dependencies] flake8 = ">=3,<3.2 || >3.2" [[package]] name = "flake8-debugger" version = "4.1.2" description = "ipdb/pdb statement checker plugin for flake8" optional = false python-versions = ">=3.7" files = [ {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, ] [package.dependencies] flake8 = ">=3.0" pycodestyle = "*" [[package]] name = "flake8-docstrings" version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" optional = false python-versions = ">=3.7" files = [ {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, ] [package.dependencies] flake8 = ">=3" pydocstyle = ">=2.1" [[package]] name = "flake8-eradicate" version = "1.5.0" description = "Flake8 plugin to find commented out code" optional = false python-versions = ">=3.8,<4.0" files = [ {file = "flake8_eradicate-1.5.0-py3-none-any.whl", hash = "sha256:18acc922ad7de623f5247c7d5595da068525ec5437dd53b22ec2259b96ce9d22"}, {file = "flake8_eradicate-1.5.0.tar.gz", hash = "sha256:aee636cb9ecb5594a7cd92d67ad73eb69909e5cc7bd81710cf9d00970f3983a6"}, ] [package.dependencies] attrs = "*" eradicate = ">=2.0,<3.0" flake8 = ">5" [[package]] name = "flake8-isort" version = "6.1.1" description = "flake8 plugin that integrates isort" optional = false python-versions = ">=3.8" files = [ {file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"}, {file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"}, ] [package.dependencies] flake8 = "*" isort = ">=5.0.0,<6" [package.extras] test = ["pytest"] [[package]] name = "flake8-plugin-utils" version = "1.3.3" description = "The package provides base classes and utils for flake8 plugin writing" optional = false python-versions = ">=3.6,<4.0" files = [ {file = "flake8-plugin-utils-1.3.3.tar.gz", hash = "sha256:39f6f338d038b301c6fd344b06f2e81e382b68fa03c0560dff0d9b1791a11a2c"}, {file = "flake8_plugin_utils-1.3.3-py3-none-any.whl", hash = "sha256:e4848c57d9d50f19100c2d75fa794b72df068666a9041b4b0409be923356a3ed"}, ] [[package]] name = "flake8-pyi" version = "24.9.0" description = "A plugin for flake8 to enable linting .pyi stub files." optional = false python-versions = ">=3.9" files = [ {file = "flake8_pyi-24.9.0-py3-none-any.whl", hash = "sha256:c199b21e5a00b509d337bf376310437f16987e03bff4b1055d8b579d7bb3c01a"}, {file = "flake8_pyi-24.9.0.tar.gz", hash = "sha256:64fb735022958147afedd52781f263de337a34d118f3294ab2deb184f1c20cd9"}, ] [package.dependencies] flake8 = ">=6.0.0,<8.0.0" pyflakes = ">=2.1.1" [package.extras] dev = ["black (==24.8.0)", "flake8-bugbear (==24.8.19)", "flake8-noqa (==1.4.0)", "isort (==5.13.2)", "mypy (==1.11.2)", "pre-commit-hooks (==4.6.0)", "pytest (==8.3.3)", "pytest-xdist (==3.6.1)", "types-pyflakes (<4)"] [[package]] name = "flake8-pytest-style" version = "2.0.0" description = "A flake8 plugin checking common style issues or inconsistencies with pytest-based tests." optional = false python-versions = "<4.0.0,>=3.8.1" files = [ {file = "flake8_pytest_style-2.0.0-py3-none-any.whl", hash = "sha256:abcb9f56f277954014b749e5a0937fae215be01a21852e9d05e7600c3de6aae5"}, {file = "flake8_pytest_style-2.0.0.tar.gz", hash = "sha256:919c328cacd4bc4f873ea61ab4db0d8f2c32e0db09a3c73ab46b1de497556464"}, ] [package.dependencies] flake8-plugin-utils = ">=1.3.2,<2.0.0" [[package]] name = "flake8-quotes" version = "3.4.0" description = "Flake8 lint for quotes." optional = false python-versions = "*" files = [ {file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"}, ] [package.dependencies] flake8 = "*" setuptools = "*" [[package]] name = "flake8-rst-docstrings" version = "0.3.0" description = "Python docstring reStructuredText (RST) validator for flake8" optional = false python-versions = ">=3.7" files = [ {file = "flake8-rst-docstrings-0.3.0.tar.gz", hash = "sha256:d1ce22b4bd37b73cd86b8d980e946ef198cfcc18ed82fedb674ceaa2f8d1afa4"}, {file = "flake8_rst_docstrings-0.3.0-py3-none-any.whl", hash = "sha256:f8c3c6892ff402292651c31983a38da082480ad3ba253743de52989bdc84ca1c"}, ] [package.dependencies] flake8 = ">=3" pygments = "*" restructuredtext-lint = "*" [package.extras] develop = ["build", "twine"] [[package]] name = "flake8-string-format" version = "0.3.0" description = "string format checker, plugin for flake8" optional = false python-versions = "*" files = [ {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, ] [package.dependencies] flake8 = "*" [[package]] name = "flatten-dict" version = "0.4.2" description = "A flexible utility for flattening and unflattening dict-like objects in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ {file = "flatten-dict-0.4.2.tar.gz", hash = "sha256:506a96b6e6f805b81ae46a0f9f31290beb5fa79ded9d80dbe1b7fa236ab43076"}, {file = "flatten_dict-0.4.2-py2.py3-none-any.whl", hash = "sha256:7e245b20c4c718981212210eec4284a330c9f713e632e98765560e05421e48ad"}, ] [package.dependencies] six = ">=1.12,<2.0" [[package]] name = "furl" version = "2.1.3" description = "URL manipulation made simple." optional = false python-versions = "*" files = [ {file = "furl-2.1.3-py2.py3-none-any.whl", hash = "sha256:9ab425062c4217f9802508e45feb4a83e54324273ac4b202f1850363309666c0"}, {file = "furl-2.1.3.tar.gz", hash = "sha256:5a6188fe2666c484a12159c18be97a1977a71d632ef5bb867ef15f54af39cc4e"}, ] [package.dependencies] orderedmultidict = ">=1.0.1" six = ">=1.8.0" [[package]] name = "furo" version = "2024.8.6" description = "A clean customisable Sphinx documentation theme." optional = false python-versions = ">=3.8" files = [ {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, ] [package.dependencies] beautifulsoup4 = "*" pygments = ">=2.7" sphinx = ">=6.0,<9.0" sphinx-basic-ng = ">=1.0.0.beta2" [[package]] name = "gitignore-parser" version = "0.1.11" description = "A spec-compliant gitignore parser for Python 3.5+" optional = false python-versions = "*" files = [ {file = "gitignore_parser-0.1.11.tar.gz", hash = "sha256:fa10fde48b44888eeefac096f53bcdad9b87a4ffd7db788558dbdf71ff3bc9db"}, ] [[package]] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] [[package]] name = "httpcore" version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] certifi = "*" h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" version = "0.28.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ {file = "httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc"}, {file = "httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0"}, ] [package.dependencies] anyio = "*" certifi = "*" httpcore = "==1.*" idna = "*" [package.extras] brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "hypothesis" version = "6.122.0" description = "A library for property-based testing" optional = true python-versions = ">=3.9" files = [ {file = "hypothesis-6.122.0-py3-none-any.whl", hash = "sha256:523451a187cb0e861074bc560c2d27382b7872c28b57c6e14e98bb2152fe3a0e"}, {file = "hypothesis-6.122.0.tar.gz", hash = "sha256:8f1675a62f70e2821b347f550e6d3b5478ec25470b6e0281c974b57ab53f5dc7"}, ] [package.dependencies] attrs = ">=22.2.0" exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""} sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] all = ["black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.78)", "django (>=4.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.18)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.19.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.2)"] cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] codemods = ["libcst (>=0.3.16)"] crosshair = ["crosshair-tool (>=0.0.78)", "hypothesis-crosshair (>=0.0.18)"] dateutil = ["python-dateutil (>=1.4)"] django = ["django (>=4.2)"] dpcontracts = ["dpcontracts (>=0.4)"] ghostwriter = ["black (>=19.10b0)"] lark = ["lark (>=0.10.1)"] numpy = ["numpy (>=1.19.3)"] pandas = ["pandas (>=1.1)"] pytest = ["pytest (>=4.6)"] pytz = ["pytz (>=2014.1)"] redis = ["redis (>=3.0.0)"] zoneinfo = ["tzdata (>=2024.2)"] [[package]] name = "identify" version = "2.6.3" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, ] [package.extras] license = ["ukkonen"] [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] [[package]] name = "jsonschema" version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] attrs = ">=22.2.0" jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] [package.dependencies] referencing = ">=0.31.0" [[package]] name = "loguru" version = "0.7.2" description = "Python logging made (stupidly) simple" optional = false python-versions = ">=3.5" files = [ {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, ] [package.dependencies] colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] [[package]] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] [package.dependencies] mdurl = ">=0.1,<1.0" [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark"] code-style = ["pre-commit (>=3.0,<4.0)"] compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] linkify = ["linkify-it-py (>=1,<3)"] plugins = ["mdit-py-plugins"] profiling = ["gprof2dot"] rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "marshmallow" version = "3.23.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" files = [ {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "simplejson"] [[package]] name = "marshmallow-polyfield" version = "5.11" description = "An unofficial extension to Marshmallow to allow for polymorphic fields" optional = false python-versions = ">=3.5" files = [ {file = "marshmallow-polyfield-5.11.tar.gz", hash = "sha256:8075a9cc490da4af58b902b4a40a99882dd031adb7aaa96abd147a4fcd53415f"}, ] [package.dependencies] marshmallow = ">=3.0.0b10" [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] [[package]] name = "mdit-py-plugins" version = "0.4.2" description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" files = [ {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, ] [package.dependencies] markdown-it-py = ">=1.0.0,<4.0.0" [package.extras] code-style = ["pre-commit"] rtd = ["myst-parser", "sphinx-book-theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] [[package]] name = "more-itertools" version = "10.5.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.8" files = [ {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, ] [[package]] name = "mypy" version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "myst-parser" version = "4.0.0" description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," optional = false python-versions = ">=3.10" files = [ {file = "myst_parser-4.0.0-py3-none-any.whl", hash = "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d"}, {file = "myst_parser-4.0.0.tar.gz", hash = "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531"}, ] [package.dependencies] docutils = ">=0.19,<0.22" jinja2 = "*" markdown-it-py = ">=3.0,<4.0" mdit-py-plugins = ">=0.4.1,<1.0" pyyaml = "*" sphinx = ">=7,<9" [package.extras] code-style = ["pre-commit (>=3.0,<4.0)"] linkify = ["linkify-it-py (>=2.0,<3.0)"] rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] [[package]] name = "nitpick" version = "0.35.0" description = "Enforce the same settings across multiple language-independent projects" optional = false python-versions = ">=3.8,<4.0" files = [ {file = "nitpick-0.35.0-py3-none-any.whl", hash = "sha256:9911d32c2d488b41914aa1a6d230531fa92bbe3d6610e4a468a5cb5e30fab907"}, {file = "nitpick-0.35.0.tar.gz", hash = "sha256:098167a4c65655aca52c0ea3876b1e71cf634a27d0e17b971bce9bfcc1f3febe"}, ] [package.dependencies] attrs = ">=20.1.0" autorepr = "*" click = "*" ConfigUpdater = "*" dictdiffer = "*" dpath = "*" flake8 = ">=3.0.0" flatten-dict = "*" furl = "*" gitignore_parser = "*" identify = "*" jmespath = "*" loguru = "*" marshmallow = ">=3.0.0b10" marshmallow-polyfield = ">=5.10,<6.0" more-itertools = "*" packaging = "*" pluggy = "*" python-slugify = "*" requests = "*" requests-cache = ">=1.0.0" "ruamel.yaml" = "*" sortedcontainers = "*" StrEnum = "*" toml = "*" tomlkit = ">=0.8.0" [package.extras] doc = ["sphinx", "sphinx-gitref", "sphinx_rtd_theme", "sphobjinv"] lint = ["pylint"] test = ["freezegun", "pytest", "pytest-cov", "pytest-datadir", "pytest-socket", "pytest-testmon", "pytest-watch", "responses", "testfixtures"] [[package]] name = "orderedmultidict" version = "1.0.1" description = "Ordered Multivalue Dictionary" optional = false python-versions = "*" files = [ {file = "orderedmultidict-1.0.1-py2.py3-none-any.whl", hash = "sha256:43c839a17ee3cdd62234c47deca1a8508a3f2ca1d0678a3bf791c87cf84adbf3"}, {file = "orderedmultidict-1.0.1.tar.gz", hash = "sha256:04070bbb5e87291cc9bfa51df413677faf2141c73c61d2a5f7b26bea3cd882ad"}, ] [package.dependencies] six = ">=1.8.0" [[package]] name = "outcome" version = "1.3.0.post0" description = "Capture the outcome of Python function calls." optional = false python-versions = ">=3.7" files = [ {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, ] [package.dependencies] attrs = ">=19.2.0" [[package]] name = "packaging" version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pbr" version = "6.1.0" description = "Python Build Reasonableness" optional = false python-versions = ">=2.6" files = [ {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, ] [[package]] name = "pep8-naming" version = "0.13.3" description = "Check PEP-8 naming conventions, plugin for flake8" optional = false python-versions = ">=3.7" files = [ {file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"}, {file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"}, ] [package.dependencies] flake8 = ">=5.0.0" [[package]] name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "pycodestyle" version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] name = "pycparser" version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" optional = false python-versions = ">=3.6" files = [ {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, ] [package.dependencies] snowballstemmer = ">=2.2.0" [package.extras] toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" files = [ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" files = [ {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, ] [package.dependencies] coverage = {version = ">=7.5", extras = ["toml"]} pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-mypy-plugins" version = "3.1.2" description = "pytest plugin for writing tests for mypy plugins" optional = false python-versions = ">=3.8" files = [ {file = "pytest-mypy-plugins-3.1.2.tar.gz", hash = "sha256:14fa9b00e59713a6fdb88fcf04e8139b9467e117b98d61fc12038c60afb7febe"}, {file = "pytest_mypy_plugins-3.1.2-py3-none-any.whl", hash = "sha256:3478ccb68e26a159e1287c4614c60f84cd0720e3895f174365c7440498234b9f"}, ] [package.dependencies] decorator = "*" Jinja2 = "*" jsonschema = "*" mypy = ">=1.3" packaging = "*" pytest = ">=7.0.0" pyyaml = "*" regex = "*" tomlkit = ">=0.11" [[package]] name = "pytest-randomly" version = "3.16.0" description = "Pytest plugin to randomly order tests and control random.seed." optional = false python-versions = ">=3.9" files = [ {file = "pytest_randomly-3.16.0-py3-none-any.whl", hash = "sha256:8633d332635a1a0983d3bba19342196807f6afb17c3eef78e02c2f85dade45d6"}, {file = "pytest_randomly-3.16.0.tar.gz", hash = "sha256:11bf4d23a26484de7860d82f726c0629837cf4064b79157bd18ec9d41d7feb26"}, ] [package.dependencies] pytest = "*" [[package]] name = "pytest-shard" version = "0.1.2" description = "" optional = false python-versions = ">=3.6" files = [ {file = "pytest-shard-0.1.2.tar.gz", hash = "sha256:b86a967fbfd1c8e50295095ccda031b7e890862ee06531d5142844f4c1d1cd67"}, {file = "pytest_shard-0.1.2-py3-none-any.whl", hash = "sha256:407a1df385cebe1feb9b4d2e7eeee8b044f8a24f0919421233159a17c59be2b9"}, ] [package.dependencies] pytest = "*" [[package]] name = "pytest-subtests" version = "0.13.1" description = "unittest subTest() support and subtests fixture" optional = false python-versions = ">=3.7" files = [ {file = "pytest_subtests-0.13.1-py3-none-any.whl", hash = "sha256:ab616a22f64cd17c1aee65f18af94dbc30c444f8683de2b30895c3778265e3bd"}, {file = "pytest_subtests-0.13.1.tar.gz", hash = "sha256:989e38f0f1c01bc7c6b2e04db7d9fd859db35d77c2c1a430c831a70cbf3fde2d"}, ] [package.dependencies] attrs = ">=19.2.0" pytest = ">=7.0" [[package]] name = "python-slugify" version = "8.0.4" description = "A Python slugify application that also handles Unicode" optional = false python-versions = ">=3.7" files = [ {file = "python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856"}, {file = "python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8"}, ] [package.dependencies] text-unidecode = ">=1.3" [package.extras] unidecode = ["Unidecode (>=1.1.1)"] [[package]] name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "referencing" version = "0.35.1" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, ] [package.dependencies] attrs = ">=22.2.0" rpds-py = ">=0.7.0" [[package]] name = "regex" version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, ] [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" version = "1.2.1" description = "A persistent cache for python requests" optional = false python-versions = ">=3.8" files = [ {file = "requests_cache-1.2.1-py3-none-any.whl", hash = "sha256:1285151cddf5331067baa82598afe2d47c7495a1334bfe7a7d329b43e9fd3603"}, {file = "requests_cache-1.2.1.tar.gz", hash = "sha256:68abc986fdc5b8d0911318fbb5f7c80eebcd4d01bfacc6685ecf8876052511d1"}, ] [package.dependencies] attrs = ">=21.2" cattrs = ">=22.2" platformdirs = ">=2.5" requests = ">=2.22" url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "restructuredtext-lint" version = "1.4.0" description = "reStructuredText linter" optional = false python-versions = "*" files = [ {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] [package.dependencies] docutils = ">=0.11,<1.0" [[package]] name = "rich" version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" version = "0.21.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, ] [[package]] name = "ruamel-yaml" version = "0.18.6" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = false python-versions = ">=3.7" files = [ {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, ] [package.dependencies] "ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} [package.extras] docs = ["mercurial (>5.7)", "ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel-yaml-clib" version = "0.2.12" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" optional = false python-versions = ">=3.9" files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, ] [[package]] name = "setuptools" version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" files = [ {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] name = "slotscheck" version = "0.19.1" description = "Ensure your __slots__ are working properly." optional = false python-versions = ">=3.8.1" files = [ {file = "slotscheck-0.19.1-py3-none-any.whl", hash = "sha256:bff9926f8d6408ea21b6c6bbaa4389cea1682962e73ee4f30084b6d2b89260ee"}, {file = "slotscheck-0.19.1.tar.gz", hash = "sha256:6146b7747f8db335a00a66b782f86011b74b995f61746dc5b36a9e77d5326013"}, ] [package.dependencies] click = ">=8.0,<9.0" tomli = {version = ">=0.2.6,<3.0.0", markers = "python_version < \"3.11\""} [[package]] name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] [[package]] name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" optional = false python-versions = "*" files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] [[package]] name = "soupsieve" version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] name = "sphinx" version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [package.dependencies] alabaster = ">=0.7.14,<0.8.0" babel = ">=2.13" colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} docutils = ">=0.20,<0.22" imagesize = ">=1.3" Jinja2 = ">=3.1" packaging = ">=23.0" Pygments = ">=2.17" requests = ">=2.30.0" snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" sphinxcontrib-serializinghtml = ">=1.1.9" tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinx-autodoc-typehints" version = "2.3.0" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" optional = false python-versions = ">=3.9" files = [ {file = "sphinx_autodoc_typehints-2.3.0-py3-none-any.whl", hash = "sha256:3098e2c6d0ba99eacd013eb06861acc9b51c6e595be86ab05c08ee5506ac0c67"}, {file = "sphinx_autodoc_typehints-2.3.0.tar.gz", hash = "sha256:535c78ed2d6a1bad393ba9f3dfa2602cf424e2631ee207263e07874c38fde084"}, ] [package.dependencies] sphinx = ">=7.3.5" [package.extras] docs = ["furo (>=2024.1.29)"] numpy = ["nptyping (>=2.5)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.4.4)", "defusedxml (>=0.7.1)", "diff-cover (>=9)", "pytest (>=8.1.1)", "pytest-cov (>=5)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.11)"] [[package]] name = "sphinx-basic-ng" version = "1.0.0b2" description = "A modern skeleton for Sphinx themes." optional = false python-versions = ">=3.7" files = [ {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, ] [package.dependencies] sphinx = ">=4.0" [package.extras] docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] [[package]] name = "sphinxcontrib-applehelp" version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, ] [package.extras] test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-mermaid" version = "1.0.0" description = "Mermaid diagrams in yours Sphinx powered docs" optional = false python-versions = ">=3.8" files = [ {file = "sphinxcontrib_mermaid-1.0.0-py3-none-any.whl", hash = "sha256:60b72710ea02087f212028feb09711225fbc2e343a10d34822fe787510e1caa3"}, {file = "sphinxcontrib_mermaid-1.0.0.tar.gz", hash = "sha256:2e8ab67d3e1e2816663f9347d026a8dee4a858acdd4ad32dd1c808893db88146"}, ] [package.dependencies] pyyaml = "*" sphinx = "*" [package.extras] test = ["defusedxml", "myst-parser", "pytest", "ruff", "sphinx"] [[package]] name = "sphinxcontrib-qthelp" version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "stevedore" version = "5.4.0" description = "Manage dynamic plugins for Python applications" optional = false python-versions = ">=3.9" files = [ {file = "stevedore-5.4.0-py3-none-any.whl", hash = "sha256:b0be3c4748b3ea7b854b265dcb4caa891015e442416422be16f8b31756107857"}, {file = "stevedore-5.4.0.tar.gz", hash = "sha256:79e92235ecb828fe952b6b8b0c6c87863248631922c8e8e0fa5b17b232c4514d"}, ] [package.dependencies] pbr = ">=2.0.0" [[package]] name = "strenum" version = "0.4.15" description = "An Enum that inherits from str." optional = false python-versions = "*" files = [ {file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"}, {file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"}, ] [package.extras] docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"] release = ["twine"] test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] [[package]] name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" optional = false python-versions = "*" files = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] [[package]] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] [[package]] name = "tomli" version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tomlkit" version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] name = "trio" version = "0.27.0" description = "A friendly Python library for async concurrency and I/O" optional = false python-versions = ">=3.8" files = [ {file = "trio-0.27.0-py3-none-any.whl", hash = "sha256:68eabbcf8f457d925df62da780eff15ff5dc68fd6b367e2dde59f7aaf2a0b884"}, {file = "trio-0.27.0.tar.gz", hash = "sha256:1dcc95ab1726b2da054afea8fd761af74bad79bd52381b84eae408e983c76831"}, ] [package.dependencies] attrs = ">=23.2.0" cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""} exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} idna = "*" outcome = "*" sniffio = ">=1.3.0" sortedcontainers = "*" [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "url-normalize" version = "1.4.3" description = "URL normalization for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, ] [package.dependencies] six = "*" [[package]] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "wemake-python-styleguide" version = "0.19.2" description = "The strictest and most opinionated python linter ever" optional = false python-versions = "<4.0,>=3.9" files = [ {file = "wemake_python_styleguide-0.19.2-py3-none-any.whl", hash = "sha256:d53205dbb629755026d853d15fb3ca03ebb2717c97de4198b5676b9bdc0663bd"}, {file = "wemake_python_styleguide-0.19.2.tar.gz", hash = "sha256:850fe70e6d525fd37ac51778e552a121a489f1bd057184de96ffd74a09aef414"}, ] [package.dependencies] astor = ">=0.8,<0.9" attrs = "*" darglint = ">=1.2,<2.0" flake8 = ">=7.0,<8.0" flake8-bandit = ">=4.1,<5.0" flake8-broken-line = ">=1.0,<2.0" flake8-bugbear = ">=24.2,<25.0" flake8-commas = ">=2.0,<3.0" flake8-comprehensions = ">=3.1,<4.0" flake8-debugger = ">=4.0,<5.0" flake8-docstrings = ">=1.3,<2.0" flake8-eradicate = ">=1.5,<2.0" flake8-isort = ">=6.0,<7.0" flake8-quotes = ">=3.0,<4.0" flake8-rst-docstrings = ">=0.3,<0.4" flake8-string-format = ">=0.3,<0.4" pep8-naming = ">=0.13,<0.14" pygments = ">=2.4,<3.0" setuptools = "*" typing_extensions = ">=4.0,<5.0" [[package]] name = "win32-setctime" version = "1.1.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" files = [ {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, ] [package.extras] dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [extras] check-laws = ["hypothesis", "pytest"] compatible-mypy = ["mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" content-hash = "59456d59aa27ddfd7d37c30bf28d55d27dffa990fc74d1d5ec4c24708db55c71" returns-0.24.0/pyproject.toml000066400000000000000000000044621472312074000162210ustar00rootroot00000000000000[tool.poetry] name = "returns" version = "0.24.0" description = "Make your functions return something meaningful, typed, and safe!" license = "BSD-3-Clause" authors = [ "sobolevn " ] readme = "README.md" repository = "https://github.com/dry-python/returns" homepage = "https://returns.readthedocs.io" keywords = [ "functional programming", "fp", "monads", "monad", "monad transformers", "composition", "type-safety", "mypy", "railway-oriented-programming" ] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Operating System :: OS Independent", "Framework :: Hypothesis", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Utilities", "Typing :: Typed" ] [tool.poetry.urls] "Funding" = "https://github.com/sponsors/dry-python" [tool.poetry.plugins.pytest11] returns = "returns.contrib.pytest.plugin" [tool.poetry.plugins.hypothesis] _ = "returns.contrib.hypothesis._entrypoint:_setup_hook" [tool.poetry.dependencies] python = "^3.10" typing-extensions = ">=4.0,<5.0" pytest = { version = "^8.0", optional = true } hypothesis = { version = "^6.98", optional = true } mypy = {version = ">=1.12,<1.14", optional = true} [tool.poetry.group.dev.dependencies] anyio = "^4.3" trio = "^0.27" attrs = "^24.2" httpx = ">=0.27,<0.29" wemake-python-styleguide = "^0.19" flake8-pytest-style = "^2.0" flake8-pyi = "^24.1" nitpick = "^0.35" codespell = "^2.2" slotscheck = "^0.19" pytest-cov = "^6.0" pytest-randomly = "^3.12" pytest-mypy-plugins = "^3.1" pytest-subtests = "^0.13" pytest-shard = "^0.1" [tool.poetry.group.docs] optional = true [tool.poetry.group.docs.dependencies] sphinx = "^7.3" sphinx-autodoc-typehints = "^2.3" sphinxcontrib-mermaid = "^1.0" furo = "^2024.5" myst-parser = "^4.0" tomli = "^2.0" [tool.poetry.extras] compatible-mypy = ["mypy"] check-laws = ["pytest", "hypothesis"] [build-system] requires = ["poetry-core>=1.9.0"] build-backend = "poetry.core.masonry.api" [tool.nitpick] style = "https://raw.githubusercontent.com/wemake-services/wemake-python-styleguide/master/styles/nitpick-style-wemake.toml" [tool.slotscheck] strict-imports = true require-subclass = true require-superclass = true exclude-modules = 'returns\.contrib\.' exclude-classes = 'returns\.primitives\.exceptions:UnwrapFailedError' returns-0.24.0/returns/000077500000000000000000000000001472312074000150015ustar00rootroot00000000000000returns-0.24.0/returns/__init__.py000066400000000000000000000000001472312074000171000ustar00rootroot00000000000000returns-0.24.0/returns/_internal/000077500000000000000000000000001472312074000167545ustar00rootroot00000000000000returns-0.24.0/returns/_internal/__init__.py000066400000000000000000000007001472312074000210620ustar00rootroot00000000000000""" This package contains code that was "generated" via metaprogramming. This happens, because Python is not flexible enough to do most tasks common in typed functional programming. Policy: 1. We store implementations in regular ``.py`` files. 2. We store generated type annotations in ``.pyi`` files. 3. We re-export these functions into regular modules as public values. Please, do not touch this code unless you know what you are doing. """ returns-0.24.0/returns/_internal/futures/000077500000000000000000000000001472312074000204515ustar00rootroot00000000000000returns-0.24.0/returns/_internal/futures/__init__.py000066400000000000000000000000001472312074000225500ustar00rootroot00000000000000returns-0.24.0/returns/_internal/futures/_future.py000066400000000000000000000036271472312074000225040ustar00rootroot00000000000000from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, TypeVar from returns.io import IO from returns.primitives.hkt import Kind1, dekind if TYPE_CHECKING: from returns.future import Future # noqa: F401 _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') async def async_map( function: Callable[[_ValueType], _NewValueType], inner_value: Awaitable[_ValueType], ) -> _NewValueType: """Async maps a function over a value.""" return function(await inner_value) async def async_apply( container: 'Future[Callable[[_ValueType], _NewValueType]]', inner_value: Awaitable[_ValueType], ) -> _NewValueType: """Async applies a container with function over a value.""" return (await container)._inner_value(await inner_value) async def async_bind( function: Callable[[_ValueType], Kind1['Future', _NewValueType]], inner_value: Awaitable[_ValueType], ) -> _NewValueType: """Async binds a container over a value.""" return (await dekind(function(await inner_value)))._inner_value async def async_bind_awaitable( function: Callable[[_ValueType], Awaitable[_NewValueType]], inner_value: Awaitable[_ValueType], ) -> _NewValueType: """Async binds a coroutine over a value.""" return await function(await inner_value) async def async_bind_async( function: Callable[ [_ValueType], Awaitable[Kind1['Future', _NewValueType]], ], inner_value: Awaitable[_ValueType], ) -> _NewValueType: """Async binds a coroutine with container over a value.""" inner_io = dekind(await function(await inner_value))._inner_value return await inner_io async def async_bind_io( function: Callable[[_ValueType], IO[_NewValueType]], inner_value: Awaitable[_ValueType], ) -> _NewValueType: """Async binds a container over a value.""" return function(await inner_value)._inner_value returns-0.24.0/returns/_internal/futures/_future_result.py000066400000000000000000000145571472312074000241060ustar00rootroot00000000000000from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, Any, TypeVar from returns.io import IO, IOResult from returns.primitives.hkt import Kind2, dekind from returns.result import Failure, Result, Success if TYPE_CHECKING: from returns.future import Future, FutureResult # noqa: F401 _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') async def async_swap( inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_ErrorType, _ValueType]: """Swaps value and error types in ``Result``.""" return (await inner_value).swap() async def async_map( function: Callable[[_ValueType], _NewValueType], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async maps a function over a value.""" return (await inner_value).map(function) async def async_apply( container: 'FutureResult[Callable[[_ValueType], _NewValueType], _ErrorType]', inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async maps a function over a value.""" return (await inner_value).apply((await container)._inner_value) async def async_bind( function: Callable[ [_ValueType], Kind2['FutureResult', _NewValueType, _ErrorType], ], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async binds a container over a value.""" container = await inner_value if isinstance(container, Success): return (await dekind(function(container.unwrap())))._inner_value return container # type: ignore[return-value] async def async_bind_awaitable( function: Callable[[_ValueType], Awaitable[_NewValueType]], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async binds a coroutine over a value.""" container = await inner_value if isinstance(container, Success): return Result.from_value(await function(container.unwrap())) return container # type: ignore[return-value] async def async_bind_async( function: Callable[ [_ValueType], Awaitable[Kind2['FutureResult', _NewValueType, _ErrorType]], ], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async binds a coroutine with container over a value.""" container = await inner_value if isinstance(container, Success): return await dekind(await function(container.unwrap()))._inner_value return container # type: ignore[return-value] async def async_bind_result( function: Callable[[_ValueType], Result[_NewValueType, _ErrorType]], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async binds a container returning ``Result`` over a value.""" return (await inner_value).bind(function) async def async_bind_ioresult( function: Callable[[_ValueType], IOResult[_NewValueType, _ErrorType]], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async binds a container returning ``IOResult`` over a value.""" container = await inner_value if isinstance(container, Success): return function(container.unwrap())._inner_value return container # type: ignore[return-value] async def async_bind_io( function: Callable[[_ValueType], IO[_NewValueType]], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async binds a container returning ``IO`` over a value.""" container = await inner_value if isinstance(container, Success): return Success(function(container.unwrap())._inner_value) return container # type: ignore[return-value] async def async_bind_future( function: Callable[[_ValueType], 'Future[_NewValueType]'], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async binds a container returning ``IO`` over a value.""" container = await inner_value if isinstance(container, Success): return await async_from_success(function(container.unwrap())) return container # type: ignore[return-value] async def async_bind_async_future( function: Callable[[_ValueType], Awaitable['Future[_NewValueType]']], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async binds a container returning ``IO`` over a value.""" container = await inner_value if isinstance(container, Success): return await async_from_success(await function(container.unwrap())) return container # type: ignore[return-value] async def async_alt( function: Callable[[_ErrorType], _NewErrorType], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_ValueType, _NewErrorType]: """Async alts a function over a value.""" container = await inner_value if isinstance(container, Success): return container return Failure(function(container.failure())) async def async_lash( function: Callable[ [_ErrorType], Kind2['FutureResult', _ValueType, _NewErrorType], ], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_ValueType, _NewErrorType]: """Async lashes a function returning a container over a value.""" container = await inner_value if isinstance(container, Success): return container return (await dekind(function(container.failure())))._inner_value async def async_from_success( container: 'Future[_NewValueType]', ) -> Result[_NewValueType, Any]: """Async success unit factory.""" return Success((await container)._inner_value) async def async_from_failure( container: 'Future[_NewErrorType]', ) -> Result[Any, _NewErrorType]: """Async failure unit factory.""" return Failure((await container)._inner_value) async def async_compose_result( function: Callable[ [Result[_ValueType, _ErrorType]], Kind2['FutureResult', _NewValueType, _ErrorType], ], inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> Result[_NewValueType, _ErrorType]: """Async composes ``Result`` based function.""" return (await dekind(function(await inner_value)))._inner_value returns-0.24.0/returns/_internal/futures/_reader_future_result.py000066400000000000000000000033341472312074000254170ustar00rootroot00000000000000from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, TypeVar from returns.primitives.hkt import Kind3, dekind from returns.result import Result, Success if TYPE_CHECKING: from returns.context import RequiresContextFutureResult # noqa: F401 _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType', covariant=True) _EnvType = TypeVar('_EnvType') async def async_bind_async( function: Callable[ [_ValueType], Awaitable[ Kind3[ 'RequiresContextFutureResult', _NewValueType, _ErrorType, _EnvType, ], ], ], container: 'RequiresContextFutureResult[_ValueType, _ErrorType, _EnvType]', deps: _EnvType, ) -> Result[_NewValueType, _ErrorType]: """Async binds a coroutine with container over a value.""" inner_value = await container(deps)._inner_value if isinstance(inner_value, Success): return await dekind( await function(inner_value.unwrap()), )(deps)._inner_value return inner_value # type: ignore[return-value] async def async_compose_result( function: Callable[ [Result[_ValueType, _ErrorType]], Kind3[ 'RequiresContextFutureResult', _NewValueType, _ErrorType, _EnvType, ], ], container: 'RequiresContextFutureResult[_ValueType, _ErrorType, _EnvType]', deps: _EnvType, ) -> Result[_NewValueType, _ErrorType]: """Async composes ``Result`` based function.""" new_container = dekind(function((await container(deps))._inner_value)) return (await new_container(deps))._inner_value returns-0.24.0/returns/_internal/pipeline/000077500000000000000000000000001472312074000205615ustar00rootroot00000000000000returns-0.24.0/returns/_internal/pipeline/__init__.py000066400000000000000000000000001472312074000226600ustar00rootroot00000000000000returns-0.24.0/returns/_internal/pipeline/flow.py000066400000000000000000000031401472312074000221000ustar00rootroot00000000000000from functools import reduce from typing import TypeVar _InstanceType = TypeVar('_InstanceType') _PipelineStepType = TypeVar('_PipelineStepType') _ReturnType = TypeVar('_ReturnType') def flow( instance: _InstanceType, *functions: _PipelineStepType, ) -> _ReturnType: # type: ignore[type-var] """ Allows to compose a value and up to multiple functions that use this value. All starts with the value itself. Each next function uses the previous result as an input parameter. We use a custom ``mypy`` plugin to make sure types are correct. Otherwise, it is currently impossible to properly type this function. Currently, ``flow`` has a hard limit of 21 steps. Because, it is not possible to type it otherwise. We need a hard limit. See: https://github.com/dry-python/returns/issues/461 Here's how it should be used: .. code:: python >>> from returns.pipeline import flow >>> # => executes: str(float(int('1'))) >>> assert flow('1', int, float, str) == '1.0' This function is closely related to :func:`pipe `: .. code:: python >>> from returns.pipeline import pipe >>> assert flow('1', int, float, str) == pipe(int, float, str)('1') See also: - https://stackoverflow.com/a/41585450/4842742 - https://github.com/gcanti/fp-ts/blob/master/src/pipeable.ts Requires our :ref:`mypy plugin `. """ return reduce( # type: ignore lambda composed, function: function(composed), # type: ignore functions, instance, ) returns-0.24.0/returns/_internal/pipeline/managed.py000066400000000000000000000111261472312074000225300ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.specific.ioresult import IOResultLikeN from returns.primitives.hkt import Kinded, KindN, kinded from returns.result import Result _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _IOResultLikeType = TypeVar('_IOResultLikeType', bound=IOResultLikeN) def managed( use: Callable[ [_FirstType], KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType], ], release: Callable[ [_FirstType, Result[_UpdatedType, _SecondType]], KindN[_IOResultLikeType, None, _SecondType, _ThirdType], ], ) -> Kinded[Callable[ [KindN[_IOResultLikeType, _FirstType, _SecondType, _ThirdType]], KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType], ]]: """ Allows to run managed computation. Managed computations consist of three steps: 1. ``acquire`` when we get some initial resource to work with 2. ``use`` when the main logic is done 3. ``release`` when we release acquired resource Let's look at the example: 1. We need to acquire an opened file to read it later 2. We need to use acquired file to read its content 3. We need to release the acquired file in the end Here's a code example: .. code:: python >>> from returns.pipeline import managed >>> from returns.io import IOSuccess, IOFailure, impure_safe >>> class Lock(object): ... '''Example class to emulate state to acquire and release.''' ... def __init__(self, default: bool = False) -> None: ... self.set = default ... def __eq__(self, lock) -> bool: # we need this for testing ... return self.set == lock.set ... def release(self) -> None: ... self.set = False >>> pipeline = managed( ... lambda lock: IOSuccess(lock) if lock.set else IOFailure(False), ... lambda lock, use_result: impure_safe(lock.release)(), ... ) >>> assert pipeline(IOSuccess(Lock(True))) == IOSuccess(Lock(False)) >>> assert pipeline(IOSuccess(Lock())) == IOFailure(False) >>> assert pipeline(IOFailure('no lock')) == IOFailure('no lock') See also: - https://github.com/gcanti/fp-ts/blob/master/src/IOEither.ts - https://zio.dev/docs/datatypes/datatypes_managed .. rubric:: Implementation This class requires some explanation. First of all, we modeled this function as a class, so it can be partially applied easily. Secondly, we used imperative approach of programming inside this class. Functional approached was 2 times slower. And way more complex to read and understand. Lastly, we try to hide these two things for the end user. We pretend that this is not a class, but a function. We also do not break a functional abstraction for the end user. It is just an implementation detail. Type inference does not work so well with ``lambda`` functions. But, we do not recommend to use this function with ``lambda`` functions. """ @kinded def factory( acquire: KindN[_IOResultLikeType, _FirstType, _SecondType, _ThirdType], ) -> KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType]: return acquire.bind(_use(acquire, use, release)) return factory def _use( acquire: KindN[_IOResultLikeType, _FirstType, _SecondType, _ThirdType], use: Callable[ [_FirstType], KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType], ], release: Callable[ [_FirstType, Result[_UpdatedType, _SecondType]], KindN[_IOResultLikeType, None, _SecondType, _ThirdType], ], ) -> Callable[ [_FirstType], KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType], ]: """Uses the resource after it is acquired successfully.""" return lambda initial: use(initial).compose_result( _release(acquire, initial, release), ) def _release( acquire: KindN[_IOResultLikeType, _FirstType, _SecondType, _ThirdType], initial: _FirstType, release: Callable[ [_FirstType, Result[_UpdatedType, _SecondType]], KindN[_IOResultLikeType, None, _SecondType, _ThirdType], ], ) -> Callable[ [Result[_UpdatedType, _SecondType]], KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType], ]: """Release handler. Does its job after resource is acquired and used.""" return lambda updated: release(initial, updated).bind( lambda _: acquire.from_result(updated), # noqa: WPS430 ) returns-0.24.0/returns/_internal/pipeline/pipe.py000066400000000000000000000017701472312074000220750ustar00rootroot00000000000000from returns._internal.pipeline.flow import flow def pipe(*functions): """ Allows to compose a value and up to 7 functions that use this value. We use a custom ``mypy`` plugin to make sure types are correct. Otherwise, it is currently impossible to properly type this function. Each next function uses the previous result as an input parameter. Here's how it should be used: .. code:: python >>> from returns.pipeline import pipe >>> # => executes: str(float(int('1'))) >>> assert pipe(int, float, str)('1') == '1.0' This function is closely related to :func:`pipe `: .. code:: python >>> from returns.pipeline import flow >>> assert pipe(int, float, str)('1') == flow('1', int, float, str) See also: - https://stackoverflow.com/a/41585450/4842742 - https://github.com/gcanti/fp-ts/blob/master/src/pipeable.ts """ return lambda instance: flow(instance, *functions) returns-0.24.0/returns/_internal/pipeline/pipe.pyi000066400000000000000000000101161472312074000222400ustar00rootroot00000000000000from typing import Generic, TypeVar _InstanceType = TypeVar('_InstanceType') _ReturnType = TypeVar('_ReturnType') _PipelineStepType1 = TypeVar('_PipelineStepType1') _PipelineStepType2 = TypeVar('_PipelineStepType2') _PipelineStepType3 = TypeVar('_PipelineStepType3') _PipelineStepType4 = TypeVar('_PipelineStepType4') _PipelineStepType5 = TypeVar('_PipelineStepType5') _PipelineStepType6 = TypeVar('_PipelineStepType6') _PipelineStepType7 = TypeVar('_PipelineStepType7') _PipelineStepType8 = TypeVar('_PipelineStepType8') _PipelineStepType9 = TypeVar('_PipelineStepType9') _PipelineStepType10 = TypeVar('_PipelineStepType10') _PipelineStepType11 = TypeVar('_PipelineStepType11') _PipelineStepType12 = TypeVar('_PipelineStepType12') _PipelineStepType13 = TypeVar('_PipelineStepType13') _PipelineStepType14 = TypeVar('_PipelineStepType14') _PipelineStepType15 = TypeVar('_PipelineStepType15') _PipelineStepType16 = TypeVar('_PipelineStepType16') _PipelineStepType17 = TypeVar('_PipelineStepType17') _PipelineStepType18 = TypeVar('_PipelineStepType18') _PipelineStepType19 = TypeVar('_PipelineStepType19') _PipelineStepType20 = TypeVar('_PipelineStepType20') class _Pipe(Generic[ _InstanceType, _ReturnType, _PipelineStepType1, _PipelineStepType2, _PipelineStepType3, _PipelineStepType4, _PipelineStepType5, _PipelineStepType6, _PipelineStepType7, _PipelineStepType8, _PipelineStepType9, _PipelineStepType10, _PipelineStepType11, _PipelineStepType12, _PipelineStepType13, _PipelineStepType14, _PipelineStepType15, _PipelineStepType16, _PipelineStepType17, _PipelineStepType18, _PipelineStepType19, _PipelineStepType20, ]): """ Internal type to make ``mypy`` plugin work correctly. We need this to be able to check ``__call__`` signature. See docs on ``pipe`` feature in ``mypy`` plugin. """ def __init__( # noqa: WPS451 self, functions: tuple[ _PipelineStepType1, _PipelineStepType2, _PipelineStepType3, _PipelineStepType4, _PipelineStepType5, _PipelineStepType6, _PipelineStepType7, _PipelineStepType8, _PipelineStepType9, _PipelineStepType10, _PipelineStepType11, _PipelineStepType12, _PipelineStepType13, _PipelineStepType14, _PipelineStepType15, _PipelineStepType16, _PipelineStepType17, _PipelineStepType18, _PipelineStepType19, _PipelineStepType20, ], /, ) -> None: ... def __call__(self, instance: _InstanceType) -> _ReturnType: ... def pipe( # noqa: WPS451 function1: _PipelineStepType1, function2: _PipelineStepType2 = ..., function3: _PipelineStepType3 = ..., function4: _PipelineStepType4 = ..., function5: _PipelineStepType5 = ..., function6: _PipelineStepType6 = ..., function7: _PipelineStepType7 = ..., function8: _PipelineStepType8 = ..., function9: _PipelineStepType9 = ..., function10: _PipelineStepType10 = ..., function11: _PipelineStepType11 = ..., function12: _PipelineStepType12 = ..., function13: _PipelineStepType13 = ..., function14: _PipelineStepType14 = ..., function15: _PipelineStepType15 = ..., function16: _PipelineStepType16 = ..., function17: _PipelineStepType17 = ..., function18: _PipelineStepType18 = ..., function19: _PipelineStepType19 = ..., function20: _PipelineStepType20 = ..., /, ) -> _Pipe[ _InstanceType, _ReturnType, _PipelineStepType1, _PipelineStepType2, _PipelineStepType3, _PipelineStepType4, _PipelineStepType5, _PipelineStepType6, _PipelineStepType7, _PipelineStepType8, _PipelineStepType9, _PipelineStepType10, _PipelineStepType11, _PipelineStepType12, _PipelineStepType13, _PipelineStepType14, _PipelineStepType15, _PipelineStepType16, _PipelineStepType17, _PipelineStepType18, _PipelineStepType19, _PipelineStepType20, ]: ... returns-0.24.0/returns/context/000077500000000000000000000000001472312074000164655ustar00rootroot00000000000000returns-0.24.0/returns/context/__init__.py000066400000000000000000000030241472312074000205750ustar00rootroot00000000000000"""This module was quite a big one, so we have split it.""" from returns.context.requires_context import NoDeps as NoDeps from returns.context.requires_context import Reader as Reader from returns.context.requires_context import RequiresContext as RequiresContext from returns.context.requires_context_future_result import ( ReaderFutureResult as ReaderFutureResult, ) from returns.context.requires_context_future_result import ( ReaderFutureResultE as ReaderFutureResultE, ) from returns.context.requires_context_future_result import ( RequiresContextFutureResult as RequiresContextFutureResult, ) from returns.context.requires_context_future_result import ( RequiresContextFutureResultE as RequiresContextFutureResultE, ) from returns.context.requires_context_ioresult import ( ReaderIOResult as ReaderIOResult, ) from returns.context.requires_context_ioresult import ( ReaderIOResultE as ReaderIOResultE, ) from returns.context.requires_context_ioresult import ( RequiresContextIOResult as RequiresContextIOResult, ) from returns.context.requires_context_ioresult import ( RequiresContextIOResultE as RequiresContextIOResultE, ) from returns.context.requires_context_result import ReaderResult as ReaderResult from returns.context.requires_context_result import ( ReaderResultE as ReaderResultE, ) from returns.context.requires_context_result import ( RequiresContextResult as RequiresContextResult, ) from returns.context.requires_context_result import ( RequiresContextResultE as RequiresContextResultE, ) returns-0.24.0/returns/context/requires_context.py000066400000000000000000000344661472312074000224570ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, Any, ClassVar, TypeAlias, TypeVar, final from returns.functions import identity from returns.future import FutureResult from returns.interfaces.specific import reader from returns.io import IOResult from returns.primitives.container import BaseContainer from returns.primitives.hkt import Kind2, SupportsKind2, dekind from returns.result import Result if TYPE_CHECKING: # We need this condition to make sure Python can solve cycle imports. # But, since we only use these values in types, it is not important. from returns.context.requires_context_future_result import ( RequiresContextFutureResult, ) from returns.context.requires_context_ioresult import ( RequiresContextIOResult, ) from returns.context.requires_context_result import RequiresContextResult # Context: _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') _ReturnType = TypeVar('_ReturnType', covariant=True) _NewReturnType = TypeVar('_NewReturnType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') # Helpers: _FirstType = TypeVar('_FirstType') # Type Aliases: #: Sometimes ``RequiresContext`` and other similar types might be used with #: no explicit dependencies so we need to have this type alias for Any. NoDeps = Any @final class RequiresContext( # type: ignore[type-var] BaseContainer, SupportsKind2['RequiresContext', _ReturnType, _EnvType], reader.ReaderBased2[_ReturnType, _EnvType], ): """ The ``RequiresContext`` container. It's main purpose is to wrap some specific function and to provide tools to compose other functions around it without actually calling it. The ``RequiresContext`` container passes the state you want to share between functions. Functions may read that state, but can't change it. The ``RequiresContext`` container lets us access shared immutable state within a specific context. It can be used for lazy evaluation and typed dependency injection. ``RequiresContext`` is used with functions that never fail. If you want to use ``RequiresContext`` with returns ``Result`` then consider using ``RequiresContextResult`` instead. Note: This container does not wrap ANY value. It wraps only functions. You won't be able to supply arbitrary types! See also: - https://dev.to/gcanti/getting-started-with-fp-ts-reader-1ie5 - https://en.wikipedia.org/wiki/Lazy_evaluation - https://bit.ly/2R8l4WK """ __slots__ = () #: This field has an extra 'RequiresContext' just because `mypy` needs it. _inner_value: Callable[[_EnvType], _ReturnType] #: A convenient placeholder to call methods created by `.from_value()`: no_args: ClassVar[NoDeps] = object() def __init__( self, inner_value: Callable[[_EnvType], _ReturnType], ) -> None: """ Public constructor for this type. Also required for typing. Only allows functions of kind ``* -> *``. .. code:: python >>> from returns.context import RequiresContext >>> str(RequiresContext(lambda deps: deps + 1)) ' at ...>>' """ super().__init__(inner_value) def __call__(self, deps: _EnvType) -> _ReturnType: """ Evaluates the wrapped function. .. code:: python >>> from returns.context import RequiresContext >>> def first(lg: bool) -> RequiresContext[int, float]: ... # `deps` has `float` type here: ... return RequiresContext( ... lambda deps: deps if lg else -deps, ... ) >>> instance = first(False) # creating `RequiresContext` instance >>> assert instance(3.5) == -3.5 # calling it with `__call__` >>> # Example with another logic: >>> assert first(True)(3.5) == 3.5 In other things, it is a regular python magic method. """ return self._inner_value(deps) def map( self, function: Callable[[_ReturnType], _NewReturnType], ) -> RequiresContext[_NewReturnType, _EnvType]: """ Allows to compose functions inside the wrapped container. Here's how it works: .. code:: python >>> from returns.context import RequiresContext >>> def first(lg: bool) -> RequiresContext[int, float]: ... # `deps` has `float` type here: ... return RequiresContext( ... lambda deps: deps if lg else -deps, ... ) >>> assert first(True).map(lambda number: number * 10)(2.5) == 25.0 >>> assert first(False).map(lambda number: number * 10)(0.1) -1.0 """ return RequiresContext(lambda deps: function(self(deps))) def apply( self, container: Kind2[ RequiresContext, Callable[[_ReturnType], _NewReturnType], _EnvType, ], ) -> RequiresContext[_NewReturnType, _EnvType]: """ Calls a wrapped function in a container on this container. .. code:: python >>> from returns.context import RequiresContext >>> assert RequiresContext.from_value('a').apply( ... RequiresContext.from_value(lambda inner: inner + 'b') ... )(...) == 'ab' """ return RequiresContext( lambda deps: self.map(dekind(container)(deps))(deps), ) def bind( self, function: Callable[ [_ReturnType], Kind2[RequiresContext, _NewReturnType, _EnvType], ], ) -> RequiresContext[_NewReturnType, _EnvType]: """ Composes a container with a function returning another container. This is useful when you do several computations that rely on the same context. .. code:: python >>> from returns.context import RequiresContext >>> def first(lg: bool) -> RequiresContext[int, float]: ... # `deps` has `float` type here: ... return RequiresContext( ... lambda deps: deps if lg else -deps, ... ) >>> def second(number: int) -> RequiresContext[str, float]: ... # `deps` has `float` type here: ... return RequiresContext( ... lambda deps: '>=' if number >= deps else '<', ... ) >>> assert first(True).bind(second)(1) == '>=' >>> assert first(False).bind(second)(2) == '<' """ return RequiresContext(lambda deps: dekind(function(self(deps)))(deps)) #: Alias for `bind_context` method, it is the same as `bind` here. bind_context = bind def modify_env( self, function: Callable[[_NewEnvType], _EnvType], ) -> RequiresContext[_ReturnType, _NewEnvType]: """ Allows to modify the environment type. .. code:: python >>> from returns.context import RequiresContext >>> def mul(arg: int) -> RequiresContext[float, int]: ... return RequiresContext(lambda deps: arg * deps) >>> assert mul(3).modify_env(int)('2') == 6 """ return RequiresContext(lambda deps: self(function(deps))) @classmethod def ask(cls) -> RequiresContext[_EnvType, _EnvType]: """ Get current context to use the dependencies. It is a common scenario when you need to use the environment. For example, you want to do some context-related computation, but you don't have the context instance at your disposal. That's where ``.ask()`` becomes useful! .. code:: python >>> from typing_extensions import TypedDict >>> class Deps(TypedDict): ... message: str >>> def first(lg: bool) -> RequiresContext[int, Deps]: ... # `deps` has `Deps` type here: ... return RequiresContext( ... lambda deps: deps['message'] if lg else 'error', ... ) >>> def second(text: str) -> RequiresContext[int, Deps]: ... return first(len(text) > 3) >>> assert second('abc')({'message': 'ok'}) == 'error' >>> assert second('abcd')({'message': 'ok'}) == 'ok' And now imagine that you have to change this ``3`` limit. And you want to be able to set it via environment as well. Ok, let's fix it with the power of ``RequiresContext.ask()``! .. code:: python >>> from typing_extensions import TypedDict >>> class Deps(TypedDict): ... message: str ... limit: int # note this new field! >>> def new_first(lg: bool) -> RequiresContext[int, Deps]: ... # `deps` has `Deps` type here: ... return RequiresContext( ... lambda deps: deps['message'] if lg else 'err', ... ) >>> def new_second(text: str) -> RequiresContext[int, Deps]: ... return RequiresContext[int, Deps].ask().bind( ... lambda deps: new_first(len(text) > deps.get('limit', 3)), ... ) >>> assert new_second('abc')({'message': 'ok', 'limit': 2}) == 'ok' >>> assert new_second('abcd')({'message': 'ok'}) == 'ok' >>> assert new_second('abcd')({'message': 'ok', 'limit': 5}) == 'err' That's how ``ask`` works. This class contains methods that require to explicitly set type annotations. Why? Because it is impossible to figure out the type without them. So, here's how you should use them: .. code:: python RequiresContext[int, Dict[str, str]].ask() Otherwise, your ``.ask()`` method will return ``RequiresContext[Never, Never]``, which is unusable: .. code:: python env = RequiresContext.ask() env(some_deps) And ``mypy`` will warn you: ``error: Need type annotation for '...'`` See also: - https://dev.to/gcanti/getting-started-with-fp-ts-reader-1ie5 """ # noqa: F811 return RequiresContext(identity) @classmethod def from_value( cls, inner_value: _FirstType, ) -> RequiresContext[_FirstType, NoDeps]: """ Used to return some specific value from the container. Consider this method as some kind of factory. Passed value will be a return type. Make sure to use :attr:`~RequiresContext.no_args` for getting the unit value. .. code:: python >>> from returns.context import RequiresContext >>> unit = RequiresContext.from_value(5) >>> assert unit(RequiresContext.no_args) == 5 Might be used with or without direct type hint. """ return RequiresContext(lambda _: inner_value) @classmethod def from_context( cls, inner_value: RequiresContext[_NewReturnType, _NewEnvType], ) -> RequiresContext[_NewReturnType, _NewEnvType]: """ Used to create new containers from existing ones. Used as a part of ``ReaderBased2`` interface. .. code:: python >>> from returns.context import RequiresContext >>> unit = RequiresContext.from_value(5) >>> assert RequiresContext.from_context(unit)(...) == unit(...) """ return inner_value @classmethod def from_requires_context_result( cls, inner_value: RequiresContextResult[_ValueType, _ErrorType, _EnvType], ) -> RequiresContext[Result[_ValueType, _ErrorType], _EnvType]: """ Typecasts ``RequiresContextResult`` to ``RequiresContext`` instance. Breaks ``RequiresContextResult[a, b, e]`` into ``RequiresContext[Result[a, b], e]``. .. code:: python >>> from returns.context import RequiresContext >>> from returns.context import RequiresContextResult >>> from returns.result import Success >>> assert RequiresContext.from_requires_context_result( ... RequiresContextResult.from_value(1), ... )(...) == Success(1) Can be reverted with ``RequiresContextResult.from_typecast``. """ return RequiresContext(inner_value) @classmethod def from_requires_context_ioresult( cls, inner_value: RequiresContextIOResult[_ValueType, _ErrorType, _EnvType], ) -> RequiresContext[IOResult[_ValueType, _ErrorType], _EnvType]: """ Typecasts ``RequiresContextIOResult`` to ``RequiresContext`` instance. Breaks ``RequiresContextIOResult[a, b, e]`` into ``RequiresContext[IOResult[a, b], e]``. .. code:: python >>> from returns.context import RequiresContext >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess >>> assert RequiresContext.from_requires_context_ioresult( ... RequiresContextIOResult.from_value(1), ... )(...) == IOSuccess(1) Can be reverted with ``RequiresContextIOResult.from_typecast``. """ return RequiresContext(inner_value) @classmethod def from_requires_context_future_result( cls, inner_value: RequiresContextFutureResult[ _ValueType, _ErrorType, _EnvType, ], ) -> RequiresContext[FutureResult[_ValueType, _ErrorType], _EnvType]: """ Typecasts ``RequiresContextIOResult`` to ``RequiresContext`` instance. Breaks ``RequiresContextIOResult[a, b, e]`` into ``RequiresContext[IOResult[a, b], e]``. .. code:: python >>> import anyio >>> from returns.context import RequiresContext >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess >>> container = RequiresContext.from_requires_context_future_result( ... RequiresContextFutureResult.from_value(1), ... ) >>> assert anyio.run( ... container, RequiresContext.no_args, ... ) == IOSuccess(1) Can be reverted with ``RequiresContextFutureResult.from_typecast``. """ return RequiresContext(inner_value) # Aliases #: Sometimes `RequiresContext` is too long to type. Reader: TypeAlias = RequiresContext returns-0.24.0/returns/context/requires_context_future_result.py000066400000000000000000001335351472312074000254440ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, Any, ClassVar, TypeAlias, TypeVar, final from returns._internal.futures import _reader_future_result from returns.context import NoDeps from returns.future import Future, FutureResult from returns.interfaces.specific import future_result, reader_future_result from returns.io import IO, IOResult from returns.primitives.container import BaseContainer from returns.primitives.hkt import Kind3, SupportsKind3, dekind from returns.result import Result if TYPE_CHECKING: from returns.context.requires_context import RequiresContext from returns.context.requires_context_ioresult import ( ReaderIOResult, RequiresContextIOResult, ) from returns.context.requires_context_result import RequiresContextResult # Context: _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') # Result: _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') # Helpers: _FirstType = TypeVar('_FirstType') @final class RequiresContextFutureResult( # type: ignore[type-var] BaseContainer, SupportsKind3[ 'RequiresContextFutureResult', _ValueType, _ErrorType, _EnvType, ], reader_future_result.ReaderFutureResultBasedN[ _ValueType, _ErrorType, _EnvType, ], future_result.FutureResultLike3[_ValueType, _ErrorType, _EnvType], ): """ The ``RequiresContextFutureResult`` combinator. This probably the main type people are going to use in ``async`` programs. See :class:`returns.context.requires_context.RequiresContext`, :class:`returns.context.requires_context_result.RequiresContextResult`, and :class:`returns.context.requires_context_result.RequiresContextIOResult` for more docs. This is just a handy wrapper around ``RequiresContext[FutureResult[a, b], env]`` which represents a context-dependent impure async operation that might fail. So, this is a thin wrapper, without any changes in logic. Why do we need this wrapper? That's just for better usability! This way ``RequiresContextIOResult`` allows to simply work with: - raw values and pure functions - ``RequiresContext`` values and pure functions returning it - ``RequiresContextResult`` values and pure functions returning it - ``RequiresContextIOResult`` values and pure functions returning it - ``Result`` and pure functions returning it - ``IOResult`` and functions returning it - ``FutureResult`` and functions returning it - other ``RequiresContextFutureResult`` related functions and values This is a complex type for complex tasks! .. rubric:: Important implementation details Due it is meaning, ``RequiresContextFutureResult`` cannot have ``Success`` and ``Failure`` subclasses. We only have just one type. That's by design. Different converters are also not supported for this type. Use converters inside the ``RequiresContext`` context, not outside. See also: - https://dev.to/gcanti/getting-started-with-fp-ts-reader-1ie5 - https://en.wikipedia.org/wiki/Lazy_evaluation - https://bit.ly/2R8l4WK - https://bit.ly/2RwP4fp """ __slots__ = () #: Inner value of `RequiresContext` #: is just a function that returns `FutureResult`. #: This field has an extra 'RequiresContext' just because `mypy` needs it. _inner_value: Callable[[_EnvType], FutureResult[_ValueType, _ErrorType]] #: A convenient placeholder to call methods created by `.from_value()`. no_args: ClassVar[NoDeps] = object() def __init__( self, inner_value: Callable[[_EnvType], FutureResult[_ValueType, _ErrorType]], ) -> None: """ Public constructor for this type. Also required for typing. Only allows functions of kind ``* -> *`` and returning :class:`returns.result.Result` instances. .. code:: python >>> from returns.context import RequiresContextFutureResult >>> from returns.future import FutureResult >>> instance = RequiresContextFutureResult( ... lambda deps: FutureResult.from_value(1), ... ) >>> str(instance) ' at ...>>' """ super().__init__(inner_value) def __call__(self, deps: _EnvType) -> FutureResult[_ValueType, _ErrorType]: """ Evaluates the wrapped function. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import FutureResult >>> from returns.io import IOSuccess >>> def first(lg: bool) -> RequiresContextFutureResult[int, str, int]: ... # `deps` has `int` type here: ... return RequiresContextFutureResult( ... lambda deps: FutureResult.from_value( ... deps if lg else -deps, ... ), ... ) >>> instance = first(False) >>> assert anyio.run(instance(3).awaitable) == IOSuccess(-3) >>> instance = first(True) >>> assert anyio.run(instance(3).awaitable) == IOSuccess(3) In other things, it is a regular Python magic method. """ return self._inner_value(deps) def swap( self, ) -> RequiresContextFutureResult[_ErrorType, _ValueType, _EnvType]: """ Swaps value and error types. So, values become errors and errors become values. It is useful when you have to work with errors a lot. And since we have a lot of ``.bind_`` related methods and only a single ``.lash`` - it is easier to work with values. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess, IOFailure >>> success = RequiresContextFutureResult.from_value(1) >>> failure = RequiresContextFutureResult.from_failure(1) >>> assert anyio.run(success.swap(), ...) == IOFailure(1) >>> assert anyio.run(failure.swap(), ...) == IOSuccess(1) """ return RequiresContextFutureResult(lambda deps: self(deps).swap()) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Composes successful container with a pure function. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run(RequiresContextFutureResult.from_value(1).map( ... lambda x: x + 1, ... )(...).awaitable) == IOSuccess(2) >>> assert anyio.run(RequiresContextFutureResult.from_failure(1).map( ... lambda x: x + 1, ... )(...).awaitable) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: self(deps).map(function), ) def apply( self, container: Kind3[ RequiresContextFutureResult, Callable[[_ValueType], _NewValueType], _ErrorType, _EnvType, ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Calls a wrapped function in a container on this container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess, IOFailure >>> def transform(arg: str) -> str: ... return arg + 'b' >>> assert anyio.run( ... RequiresContextFutureResult.from_value('a').apply( ... RequiresContextFutureResult.from_value(transform), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess('ab') >>> assert anyio.run( ... RequiresContextFutureResult.from_failure('a').apply( ... RequiresContextFutureResult.from_value(transform), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure('a') """ return RequiresContextFutureResult( lambda deps: self(deps).apply(dekind(container)(deps)), ) def bind( self, function: Callable[ [_ValueType], Kind3[ RequiresContextFutureResult, _NewValueType, _ErrorType, _EnvType, ], ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Composes this container with a function returning the same type. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> def function( ... number: int, ... ) -> RequiresContextFutureResult[str, int, int]: ... # `deps` has `int` type here: ... return RequiresContextFutureResult( ... lambda deps: FutureResult.from_value(str(number + deps)), ... ) >>> assert anyio.run( ... RequiresContextFutureResult.from_value(2).bind(function), ... 3, ... ) == IOSuccess('5') >>> assert anyio.run( ... RequiresContextFutureResult.from_failure(2).bind(function), ... 3, ... ) == IOFailure(2) """ return RequiresContextFutureResult( lambda deps: self(deps).bind( lambda inner: dekind( # type: ignore[misc] function(inner), )(deps), ), ) #: Alias for `bind_context_future_result` method, #: it is the same as `bind` here. bind_context_future_result = bind def bind_async( self, function: Callable[ [_ValueType], Awaitable[ Kind3[ RequiresContextFutureResult, _NewValueType, _ErrorType, _EnvType, ], ], ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Composes this container with a async function returning the same type. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess, IOFailure >>> async def function( ... number: int, ... ) -> RequiresContextFutureResult[str, int, int]: ... return RequiresContextFutureResult.from_value(number + 1) >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).bind_async( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(2) >>> assert anyio.run( ... RequiresContextFutureResult.from_failure(1).bind_async( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: FutureResult(_reader_future_result.async_bind_async( function, self, deps, )), ) #: Alias for `bind_async_context_future_result` method, #: it is the same as `bind_async` here. bind_async_context_future_result = bind_async def bind_awaitable( self, function: Callable[[_ValueType], Awaitable[_NewValueType]], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Allows to compose a container and a regular ``async`` function. This function should return plain, non-container value. See :meth:`~RequiresContextFutureResult.bind_async` to bind ``async`` function that returns a container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess, IOFailure >>> async def coroutine(x: int) -> int: ... return x + 1 >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).bind_awaitable( ... coroutine, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(2) >>> assert anyio.run( ... RequiresContextFutureResult.from_failure(1).bind_awaitable( ... coroutine, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: self(deps).bind_awaitable(function), ) def bind_result( self, function: Callable[[_ValueType], Result[_NewValueType, _ErrorType]], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``Result`` returning function to the current container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.result import Success, Result >>> from returns.io import IOSuccess, IOFailure >>> def function(num: int) -> Result[int, str]: ... return Success(num + 1) >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).bind_result( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(2) >>> assert anyio.run( ... RequiresContextFutureResult.from_failure(':(').bind_result( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(':(') """ return RequiresContextFutureResult( lambda deps: self(deps).bind_result(function), ) def bind_context( self, function: Callable[ [_ValueType], RequiresContext[_NewValueType, _EnvType], ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``RequiresContext`` returning function to current container. .. code:: python >>> import anyio >>> from returns.context import RequiresContext >>> from returns.io import IOSuccess, IOFailure >>> def function(arg: int) -> RequiresContext[int, str]: ... return RequiresContext(lambda deps: len(deps) + arg) >>> assert anyio.run( ... RequiresContextFutureResult.from_value(2).bind_context( ... function, ... ), ... 'abc', ... ) == IOSuccess(5) >>> assert anyio.run( ... RequiresContextFutureResult.from_failure(0).bind_context( ... function, ... ), ... 'abc', ... ) == IOFailure(0) """ return RequiresContextFutureResult( lambda deps: self(deps).map( lambda inner: function(inner)(deps), # type: ignore[misc] ), ) def bind_context_result( self, function: Callable[ [_ValueType], RequiresContextResult[_NewValueType, _ErrorType, _EnvType], ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``RequiresContextResult`` returning function to the current one. .. code:: python >>> import anyio >>> from returns.context import RequiresContextResult >>> from returns.io import IOSuccess, IOFailure >>> from returns.result import Success >>> def function(arg: int) -> RequiresContextResult[int, int, str]: ... return RequiresContextResult( ... lambda deps: Success(len(deps) + arg), ... ) >>> instance = RequiresContextFutureResult.from_value( ... 2, ... ).bind_context_result( ... function, ... )('abc') >>> assert anyio.run(instance.awaitable) == IOSuccess(5) >>> instance = RequiresContextFutureResult.from_failure( ... 2, ... ).bind_context_result( ... function, ... )('abc') >>> assert anyio.run(instance.awaitable) == IOFailure(2) """ return RequiresContextFutureResult( lambda deps: self(deps).bind_result( lambda inner: function(inner)(deps), # type: ignore[misc] ), ) def bind_context_ioresult( self, function: Callable[ [_ValueType], RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType], ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``RequiresContextIOResult`` returning function to the current one. .. code:: python >>> import anyio >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> def function(arg: int) -> RequiresContextIOResult[int, int, str]: ... return RequiresContextIOResult( ... lambda deps: IOSuccess(len(deps) + arg), ... ) >>> instance = RequiresContextFutureResult.from_value( ... 2, ... ).bind_context_ioresult( ... function, ... )('abc') >>> assert anyio.run(instance.awaitable) == IOSuccess(5) >>> instance = RequiresContextFutureResult.from_failure( ... 2, ... ).bind_context_ioresult( ... function, ... )('abc') >>> assert anyio.run(instance.awaitable) == IOFailure(2) """ return RequiresContextFutureResult( lambda deps: self(deps).bind_ioresult( lambda inner: function(inner)(deps), # type: ignore[misc] ), ) def bind_io( self, function: Callable[[_ValueType], IO[_NewValueType]], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``IO`` returning function to the current container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IO, IOSuccess, IOFailure >>> def do_io(number: int) -> IO[str]: ... return IO(str(number)) # not IO operation actually >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).bind_io(do_io), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess('1') >>> assert anyio.run( ... RequiresContextFutureResult.from_failure(1).bind_io(do_io), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: self(deps).bind_io(function), ) def bind_ioresult( self, function: Callable[[_ValueType], IOResult[_NewValueType, _ErrorType]], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``IOResult`` returning function to the current container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOResult, IOSuccess, IOFailure >>> def function(num: int) -> IOResult[int, str]: ... return IOSuccess(num + 1) >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).bind_ioresult( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(2) >>> assert anyio.run( ... RequiresContextFutureResult.from_failure(':(').bind_ioresult( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(':(') """ return RequiresContextFutureResult( lambda deps: self(deps).bind_ioresult(function), ) def bind_future( self, function: Callable[[_ValueType], Future[_NewValueType]], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``Future`` returning function to the current container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import Future >>> from returns.io import IOSuccess, IOFailure >>> def function(num: int) -> Future[int]: ... return Future.from_value(num + 1) >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).bind_future( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(2) >>> failed = RequiresContextFutureResult.from_failure(':(') >>> assert anyio.run( ... failed.bind_future(function), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(':(') """ return RequiresContextFutureResult( lambda deps: self(deps).bind_future(function), ) def bind_future_result( self, function: Callable[ [_ValueType], FutureResult[_NewValueType, _ErrorType], ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``FutureResult`` returning function to the current container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> def function(num: int) -> FutureResult[int, str]: ... return FutureResult.from_value(num + 1) >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).bind_future_result( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(2) >>> failed = RequiresContextFutureResult.from_failure(':(') >>> assert anyio.run( ... failed.bind_future_result(function), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(':(') """ return RequiresContextFutureResult( lambda deps: self(deps).bind(function), ) def bind_async_future( self, function: Callable[[_ValueType], Awaitable[Future[_NewValueType]]], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``Future`` returning async function to the current container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import Future >>> from returns.io import IOSuccess, IOFailure >>> async def function(num: int) -> Future[int]: ... return Future.from_value(num + 1) >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).bind_async_future( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(2) >>> failed = RequiresContextFutureResult.from_failure(':(') >>> assert anyio.run( ... failed.bind_async_future(function), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(':(') """ return RequiresContextFutureResult( lambda deps: self(deps).bind_async_future(function), ) def bind_async_future_result( self, function: Callable[ [_ValueType], Awaitable[FutureResult[_NewValueType, _ErrorType]], ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Bind ``FutureResult`` returning async function to the current container. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> async def function(num: int) -> FutureResult[int, str]: ... return FutureResult.from_value(num + 1) >>> assert anyio.run( ... RequiresContextFutureResult.from_value( ... 1, ... ).bind_async_future_result( ... function, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(2) >>> failed = RequiresContextFutureResult.from_failure(':(') >>> assert anyio.run( ... failed.bind_async_future_result(function), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(':(') """ return RequiresContextFutureResult( lambda deps: self(deps).bind_async(function), ) def alt( self, function: Callable[[_ErrorType], _NewErrorType], ) -> RequiresContextFutureResult[_ValueType, _NewErrorType, _EnvType]: """ Composes failed container with a pure function. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_value(1).alt( ... lambda x: x + 1, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) >>> assert anyio.run( ... RequiresContextFutureResult.from_failure(1).alt( ... lambda x: x + 1, ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(2) """ return RequiresContextFutureResult( lambda deps: self(deps).alt(function), ) def lash( self, function: Callable[ [_ErrorType], Kind3[ RequiresContextFutureResult, _ValueType, _NewErrorType, _EnvType, ], ], ) -> RequiresContextFutureResult[_ValueType, _NewErrorType, _EnvType]: """ Composes this container with a function returning the same type. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import FutureResult >>> from returns.io import IOSuccess >>> def lashable( ... arg: str, ... ) -> RequiresContextFutureResult[str, str, str]: ... return RequiresContextFutureResult( ... lambda deps: FutureResult.from_value( ... deps + arg, ... ), ... ) >>> assert anyio.run( ... RequiresContextFutureResult.from_value('a').lash(lashable), ... 'c', ... ) == IOSuccess('a') >>> assert anyio.run( ... RequiresContextFutureResult.from_failure('aa').lash( ... lashable, ... ), ... 'b', ... ) == IOSuccess('baa') """ return RequiresContextFutureResult( lambda deps: self(deps).lash( lambda inner: function(inner)(deps), # type: ignore ), ) def compose_result( self, function: Callable[ [Result[_ValueType, _ErrorType]], Kind3[ RequiresContextFutureResult, _NewValueType, _ErrorType, _EnvType, ], ], ) -> RequiresContextFutureResult[_NewValueType, _ErrorType, _EnvType]: """ Composes inner ``Result`` with ``ReaderFutureResult`` returning func. Can be useful when you need an access to both states of the result. .. code:: python >>> import anyio >>> from returns.context import ReaderFutureResult, NoDeps >>> from returns.io import IOSuccess, IOFailure >>> from returns.result import Result >>> def count( ... container: Result[int, int], ... ) -> ReaderFutureResult[int, int, NoDeps]: ... return ReaderFutureResult.from_result( ... container.map(lambda x: x + 1).alt(abs), ... ) >>> success = ReaderFutureResult.from_value(1) >>> failure = ReaderFutureResult.from_failure(-1) >>> assert anyio.run( ... success.compose_result(count), ReaderFutureResult.no_args, ... ) == IOSuccess(2) >>> assert anyio.run( ... failure.compose_result(count), ReaderFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: FutureResult( _reader_future_result.async_compose_result( function, self, deps, ), ), ) def modify_env( self, function: Callable[[_NewEnvType], _EnvType], ) -> RequiresContextFutureResult[_ValueType, _ErrorType, _NewEnvType]: """ Allows to modify the environment type. .. code:: python >>> import anyio >>> from returns.future import future_safe, asyncify >>> from returns.context import RequiresContextFutureResultE >>> from returns.io import IOSuccess >>> def div(arg: int) -> RequiresContextFutureResultE[float, int]: ... return RequiresContextFutureResultE( ... future_safe(asyncify(lambda deps: arg / deps)), ... ) >>> assert anyio.run(div(3).modify_env(int), '2') == IOSuccess(1.5) >>> assert anyio.run(div(3).modify_env(int), '0').failure() """ return RequiresContextFutureResult(lambda deps: self(function(deps))) @classmethod def ask( cls, ) -> RequiresContextFutureResult[_EnvType, _ErrorType, _EnvType]: """ Is used to get the current dependencies inside the call stack. Similar to :meth:`returns.context.requires_context.RequiresContext.ask`, but returns ``FutureResult`` instead of a regular value. Please, refer to the docs there to learn how to use it. One important note that is worth duplicating here: you might need to provide type annotations explicitly, so ``mypy`` will know about it statically. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResultE >>> from returns.io import IOSuccess >>> assert anyio.run( ... RequiresContextFutureResultE[int, int].ask().map(str), ... 1, ... ) == IOSuccess('1') """ return RequiresContextFutureResult(FutureResult.from_value) @classmethod def from_result( cls, inner_value: Result[_NewValueType, _NewErrorType], ) -> RequiresContextFutureResult[_NewValueType, _NewErrorType, NoDeps]: """ Creates new container with ``Result`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.result import Success, Failure >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_result(Success(1)), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) >>> assert anyio.run( ... RequiresContextFutureResult.from_result(Failure(1)), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda _: FutureResult.from_result(inner_value), ) @classmethod def from_io( cls, inner_value: IO[_NewValueType], ) -> RequiresContextFutureResult[_NewValueType, Any, NoDeps]: """ Creates new container from successful ``IO`` value. .. code:: python >>> import anyio >>> from returns.io import IO, IOSuccess >>> from returns.context import RequiresContextFutureResult >>> assert anyio.run( ... RequiresContextFutureResult.from_io(IO(1)), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) """ return RequiresContextFutureResult( lambda deps: FutureResult.from_io(inner_value), ) @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> RequiresContextFutureResult[Any, _NewErrorType, NoDeps]: """ Creates a new container from failed ``IO`` value. .. code:: python >>> import anyio >>> from returns.io import IO, IOFailure >>> from returns.context import RequiresContextFutureResult >>> assert anyio.run( ... RequiresContextFutureResult.from_failed_io(IO(1)), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: FutureResult.from_failed_io(inner_value), ) @classmethod def from_ioresult( cls, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> RequiresContextFutureResult[_NewValueType, _NewErrorType, NoDeps]: """ Creates new container with ``IOResult`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_ioresult(IOSuccess(1)), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) >>> assert anyio.run( ... RequiresContextFutureResult.from_ioresult(IOFailure(1)), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda _: FutureResult.from_ioresult(inner_value), ) @classmethod def from_future( cls, inner_value: Future[_NewValueType], ) -> RequiresContextFutureResult[_NewValueType, Any, NoDeps]: """ Creates new container with successful ``Future`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import Future >>> from returns.io import IOSuccess >>> assert anyio.run( ... RequiresContextFutureResult.from_future(Future.from_value(1)), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) """ return RequiresContextFutureResult( lambda _: FutureResult.from_future(inner_value), ) @classmethod def from_failed_future( cls, inner_value: Future[_NewErrorType], ) -> RequiresContextFutureResult[Any, _NewErrorType, NoDeps]: """ Creates new container with failed ``Future`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import Future >>> from returns.io import IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_failed_future( ... Future.from_value(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda _: FutureResult.from_failed_future(inner_value), ) @classmethod def from_future_result_context( cls, inner_value: ReaderFutureResult[_NewValueType, _NewErrorType, _NewEnvType], ) -> ReaderFutureResult[_NewValueType, _NewErrorType, _NewEnvType]: """ Creates new container with ``ReaderFutureResult`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_future_result_context( ... RequiresContextFutureResult.from_value(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) >>> assert anyio.run( ... RequiresContextFutureResult.from_future_result_context( ... RequiresContextFutureResult.from_failure(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return inner_value @classmethod def from_future_result( cls, inner_value: FutureResult[_NewValueType, _NewErrorType], ) -> RequiresContextFutureResult[_NewValueType, _NewErrorType, NoDeps]: """ Creates new container with ``FutureResult`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_future_result( ... FutureResult.from_value(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) >>> assert anyio.run( ... RequiresContextFutureResult.from_future_result( ... FutureResult.from_failure(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult(lambda _: inner_value) @classmethod def from_typecast( cls, inner_value: RequiresContext[ FutureResult[_NewValueType, _NewErrorType], _EnvType, ], ) -> RequiresContextFutureResult[_NewValueType, _NewErrorType, _EnvType]: """ You might end up with ``RequiresContext[FutureResult]`` as a value. This method is designed to turn it into ``RequiresContextFutureResult``. It will save all the typing information. It is just more useful! .. code:: python >>> import anyio >>> from returns.context import RequiresContext >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_typecast( ... RequiresContext.from_value(FutureResult.from_value(1)), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) >>> assert anyio.run( ... RequiresContextFutureResult.from_typecast( ... RequiresContext.from_value(FutureResult.from_failure(1)), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult(inner_value) @classmethod def from_context( cls, inner_value: RequiresContext[_NewValueType, _NewEnvType], ) -> RequiresContextFutureResult[_NewValueType, Any, _NewEnvType]: """ Creates new container from ``RequiresContext`` as a success unit. .. code:: python >>> import anyio >>> from returns.context import RequiresContext >>> from returns.io import IOSuccess >>> assert anyio.run( ... RequiresContextFutureResult.from_context( ... RequiresContext.from_value(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) """ return RequiresContextFutureResult( lambda deps: FutureResult.from_value(inner_value(deps)), ) @classmethod def from_failed_context( cls, inner_value: RequiresContext[_NewValueType, _NewEnvType], ) -> RequiresContextFutureResult[Any, _NewValueType, _NewEnvType]: """ Creates new container from ``RequiresContext`` as a failure unit. .. code:: python >>> import anyio >>> from returns.context import RequiresContext >>> from returns.io import IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_failed_context( ... RequiresContext.from_value(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: FutureResult.from_failure(inner_value(deps)), ) @classmethod def from_result_context( cls, inner_value: RequiresContextResult[ _NewValueType, _NewErrorType, _NewEnvType, ], ) -> ReaderFutureResult[_NewValueType, _NewErrorType, _NewEnvType]: """ Creates new container from ``RequiresContextResult`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextResult >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_result_context( ... RequiresContextResult.from_value(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) >>> assert anyio.run( ... RequiresContextFutureResult.from_result_context( ... RequiresContextResult.from_failure(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: FutureResult.from_result(inner_value(deps)), ) @classmethod def from_ioresult_context( cls, inner_value: ReaderIOResult[_NewValueType, _NewErrorType, _NewEnvType], ) -> ReaderFutureResult[_NewValueType, _NewErrorType, _NewEnvType]: """ Creates new container from ``RequiresContextIOResult`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run( ... RequiresContextFutureResult.from_ioresult_context( ... RequiresContextIOResult.from_value(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOSuccess(1) >>> assert anyio.run( ... RequiresContextFutureResult.from_ioresult_context( ... RequiresContextIOResult.from_failure(1), ... ), ... RequiresContextFutureResult.no_args, ... ) == IOFailure(1) """ return RequiresContextFutureResult( lambda deps: FutureResult.from_ioresult(inner_value(deps)), ) @classmethod def from_value( cls, inner_value: _FirstType, ) -> RequiresContextFutureResult[_FirstType, Any, NoDeps]: """ Creates new container with successful ``FutureResult`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOSuccess >>> assert anyio.run(RequiresContextFutureResult.from_value(1)( ... RequiresContextFutureResult.no_args, ... ).awaitable) == IOSuccess(1) """ return RequiresContextFutureResult( lambda _: FutureResult.from_value(inner_value), ) @classmethod def from_failure( cls, inner_value: _FirstType, ) -> RequiresContextFutureResult[Any, _FirstType, NoDeps]: """ Creates new container with failed ``FutureResult`` as a unit value. .. code:: python >>> import anyio >>> from returns.context import RequiresContextFutureResult >>> from returns.io import IOFailure >>> assert anyio.run(RequiresContextFutureResult.from_failure(1)( ... RequiresContextFutureResult.no_args, ... ).awaitable) == IOFailure(1) """ return RequiresContextFutureResult( lambda _: FutureResult.from_failure(inner_value), ) # Aliases: #: Alias for a popular case when ``Result`` has ``Exception`` as error type. RequiresContextFutureResultE: TypeAlias = RequiresContextFutureResult[ _ValueType, Exception, _EnvType, ] #: Sometimes `RequiresContextFutureResult` is too long to type. ReaderFutureResult: TypeAlias = RequiresContextFutureResult #: Alias to save you some typing. Uses ``Exception`` as error type. ReaderFutureResultE: TypeAlias = RequiresContextFutureResult[ _ValueType, Exception, _EnvType, ] returns-0.24.0/returns/context/requires_context_ioresult.py000066400000000000000000000734141472312074000244010ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, Any, ClassVar, TypeAlias, TypeVar, final from returns.context import NoDeps from returns.interfaces.specific import reader_ioresult from returns.io import IO, IOFailure, IOResult, IOSuccess from returns.primitives.container import BaseContainer from returns.primitives.hkt import Kind3, SupportsKind3, dekind from returns.result import Result if TYPE_CHECKING: from returns.context.requires_context import RequiresContext from returns.context.requires_context_result import RequiresContextResult # Context: _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') # Result: _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') # Helpers: _FirstType = TypeVar('_FirstType') @final class RequiresContextIOResult( # type: ignore[type-var] BaseContainer, SupportsKind3['RequiresContextIOResult', _ValueType, _ErrorType, _EnvType], reader_ioresult.ReaderIOResultBasedN[_ValueType, _ErrorType, _EnvType], ): """ The ``RequiresContextIOResult`` combinator. See :class:`returns.context.requires_context.RequiresContext` and :class:`returns.context.requires_context_result.RequiresContextResult` for more docs. This is just a handy wrapper around ``RequiresContext[IOResult[a, b], env]`` which represents a context-dependent impure operation that might fail. It has several important differences from the regular ``Result`` classes. It does not have ``Success`` and ``Failure`` subclasses. Because, the computation is not yet performed. And we cannot know the type in advance. So, this is a thin wrapper, without any changes in logic. Why do we need this wrapper? That's just for better usability! .. code:: python >>> from returns.context import RequiresContext >>> from returns.io import IOSuccess, IOResult >>> def function(arg: int) -> IOResult[int, str]: ... return IOSuccess(arg + 1) >>> # Without wrapper: >>> assert RequiresContext.from_value(IOSuccess(1)).map( ... lambda ioresult: ioresult.bind(function), ... )(...) == IOSuccess(2) >>> # With wrapper: >>> assert RequiresContextIOResult.from_value(1).bind_ioresult( ... function, ... )(...) == IOSuccess(2) This way ``RequiresContextIOResult`` allows to simply work with: - raw values and pure functions - ``RequiresContext`` values and pure functions returning it - ``RequiresContextResult`` values and pure functions returning it - ``Result`` and pure functions returning it - ``IOResult`` and functions returning it - other ``RequiresContextIOResult`` related functions and values This is a complex type for complex tasks! .. rubric:: Important implementation details Due it is meaning, ``RequiresContextIOResult`` cannot have ``Success`` and ``Failure`` subclasses. We only have just one type. That's by design. Different converters are also not supported for this type. Use converters inside the ``RequiresContext`` context, not outside. See also: - https://dev.to/gcanti/getting-started-with-fp-ts-reader-1ie5 - https://en.wikipedia.org/wiki/Lazy_evaluation - https://bit.ly/2R8l4WK - https://bit.ly/2RwP4fp """ __slots__ = () #: Inner value of `RequiresContext` #: is just a function that returns `IOResult`. #: This field has an extra 'RequiresContext' just because `mypy` needs it. _inner_value: Callable[[_EnvType], IOResult[_ValueType, _ErrorType]] #: A convenient placeholder to call methods created by `.from_value()`. no_args: ClassVar[NoDeps] = object() def __init__( self, inner_value: Callable[[_EnvType], IOResult[_ValueType, _ErrorType]], ) -> None: """ Public constructor for this type. Also required for typing. Only allows functions of kind ``* -> *`` and returning :class:`returns.result.Result` instances. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess >>> str(RequiresContextIOResult(lambda deps: IOSuccess(deps + 1))) ' at ...>>' """ super().__init__(inner_value) def __call__(self, deps: _EnvType) -> IOResult[_ValueType, _ErrorType]: """ Evaluates the wrapped function. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess >>> def first(lg: bool) -> RequiresContextIOResult[int, str, float]: ... # `deps` has `float` type here: ... return RequiresContextIOResult( ... lambda deps: IOSuccess(deps if lg else -deps), ... ) >>> instance = first(False) >>> assert instance(3.5) == IOSuccess(-3.5) In other things, it is a regular Python magic method. """ return self._inner_value(deps) def swap( self, ) -> RequiresContextIOResult[_ErrorType, _ValueType, _EnvType]: """ Swaps value and error types. So, values become errors and errors become values. It is useful when you have to work with errors a lot. And since we have a lot of ``.bind_`` related methods and only a single ``.lash`` - it is easier to work with values. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> success = RequiresContextIOResult.from_value(1) >>> failure = RequiresContextIOResult.from_failure(1) >>> assert success.swap()(...) == IOFailure(1) >>> assert failure.swap()(...) == IOSuccess(1) """ return RequiresContextIOResult(lambda deps: self(deps).swap()) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Composes successful container with a pure function. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> assert RequiresContextIOResult.from_value(1).map( ... lambda x: x + 1, ... )(...) == IOSuccess(2) >>> assert RequiresContextIOResult.from_failure(1).map( ... lambda x: x + 1, ... )(...) == IOFailure(1) """ return RequiresContextIOResult(lambda deps: self(deps).map(function)) def apply( self, container: Kind3[ RequiresContextIOResult, Callable[[_ValueType], _NewValueType], _ErrorType, _EnvType, ], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Calls a wrapped function in a container on this container. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> def transform(arg: str) -> str: ... return arg + 'b' >>> assert RequiresContextIOResult.from_value('a').apply( ... RequiresContextIOResult.from_value(transform), ... )(...) == IOSuccess('ab') >>> assert RequiresContextIOResult.from_value('a').apply( ... RequiresContextIOResult.from_failure(1), ... )(...) == IOFailure(1) >>> assert RequiresContextIOResult.from_failure('a').apply( ... RequiresContextIOResult.from_value(transform), ... )(...) == IOFailure('a') >>> assert RequiresContextIOResult.from_failure('a').apply( ... RequiresContextIOResult.from_failure('b'), ... )(...) == IOFailure('a') """ return RequiresContextIOResult( lambda deps: self(deps).apply(dekind(container)(deps)), ) def bind( self, function: Callable[ [_ValueType], Kind3[ RequiresContextIOResult, _NewValueType, _ErrorType, _EnvType, ], ], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Composes this container with a function returning the same type. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> def first(lg: bool) -> RequiresContextIOResult[int, int, float]: ... # `deps` has `float` type here: ... return RequiresContextIOResult( ... lambda deps: IOSuccess(deps) if lg else IOFailure(-deps), ... ) >>> def second( ... number: int, ... ) -> RequiresContextIOResult[str, int, float]: ... # `deps` has `float` type here: ... return RequiresContextIOResult( ... lambda deps: IOSuccess('>=' if number >= deps else '<'), ... ) >>> assert first(True).bind(second)(1) == IOSuccess('>=') >>> assert first(False).bind(second)(2) == IOFailure(-2) """ return RequiresContextIOResult( lambda deps: self(deps).bind( lambda inner: dekind( # type: ignore[misc] function(inner), )(deps), ), ) #: Alias for `bind_context_ioresult` method, it is the same as `bind` here. bind_context_ioresult = bind def bind_result( self, function: Callable[[_ValueType], Result[_NewValueType, _ErrorType]], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``Result`` returning function to the current container. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.result import Failure, Result, Success >>> from returns.io import IOSuccess, IOFailure >>> def function(num: int) -> Result[int, str]: ... return Success(num + 1) if num > 0 else Failure('<0') >>> assert RequiresContextIOResult.from_value(1).bind_result( ... function, ... )(RequiresContextIOResult.no_args) == IOSuccess(2) >>> assert RequiresContextIOResult.from_value(0).bind_result( ... function, ... )(RequiresContextIOResult.no_args) == IOFailure('<0') >>> assert RequiresContextIOResult.from_failure(':(').bind_result( ... function, ... )(RequiresContextIOResult.no_args) == IOFailure(':(') """ return RequiresContextIOResult( lambda deps: self(deps).bind_result(function), ) def bind_context( self, function: Callable[ [_ValueType], RequiresContext[_NewValueType, _EnvType], ], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``RequiresContext`` returning function to current container. .. code:: python >>> from returns.context import RequiresContext >>> from returns.io import IOSuccess, IOFailure >>> def function(arg: int) -> RequiresContext[int, str]: ... return RequiresContext(lambda deps: len(deps) + arg) >>> assert function(2)('abc') == 5 >>> assert RequiresContextIOResult.from_value(2).bind_context( ... function, ... )('abc') == IOSuccess(5) >>> assert RequiresContextIOResult.from_failure(2).bind_context( ... function, ... )('abc') == IOFailure(2) """ return RequiresContextIOResult( lambda deps: self(deps).map( lambda inner: function(inner)(deps), # type: ignore[misc] ), ) def bind_context_result( self, function: Callable[ [_ValueType], RequiresContextResult[_NewValueType, _ErrorType, _EnvType], ], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``RequiresContextResult`` returning function to the current one. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.io import IOSuccess, IOFailure >>> from returns.result import Success, Failure >>> def function(arg: int) -> RequiresContextResult[int, int, str]: ... if arg > 0: ... return RequiresContextResult( ... lambda deps: Success(len(deps) + arg), ... ) ... return RequiresContextResult( ... lambda deps: Failure(len(deps) + arg), ... ) >>> assert function(2)('abc') == Success(5) >>> assert function(-1)('abc') == Failure(2) >>> assert RequiresContextIOResult.from_value( ... 2, ... ).bind_context_result( ... function, ... )('abc') == IOSuccess(5) >>> assert RequiresContextIOResult.from_value( ... -1, ... ).bind_context_result( ... function, ... )('abc') == IOFailure(2) >>> assert RequiresContextIOResult.from_failure( ... 2, ... ).bind_context_result( ... function, ... )('abc') == IOFailure(2) """ return RequiresContextIOResult( lambda deps: self(deps).bind_result( lambda inner: function(inner)(deps), # type: ignore[misc] ), ) def bind_io( self, function: Callable[[_ValueType], IO[_NewValueType]], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``IO`` returning function to the current container. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IO, IOSuccess, IOFailure >>> def function(number: int) -> IO[str]: ... return IO(str(number)) >>> assert RequiresContextIOResult.from_value(1).bind_io( ... function, ... )(RequiresContextIOResult.no_args) == IOSuccess('1') >>> assert RequiresContextIOResult.from_failure(1).bind_io( ... function, ... )(RequiresContextIOResult.no_args) == IOFailure(1) """ return RequiresContextIOResult( lambda deps: self(deps).bind_io(function), ) def bind_ioresult( self, function: Callable[[_ValueType], IOResult[_NewValueType, _ErrorType]], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``IOResult`` returning function to the current container. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOResult, IOSuccess, IOFailure >>> def function(num: int) -> IOResult[int, str]: ... return IOSuccess(num + 1) if num > 0 else IOFailure('<0') >>> assert RequiresContextIOResult.from_value(1).bind_ioresult( ... function, ... )(RequiresContextIOResult.no_args) == IOSuccess(2) >>> assert RequiresContextIOResult.from_value(0).bind_ioresult( ... function, ... )(RequiresContextIOResult.no_args) == IOFailure('<0') >>> assert RequiresContextIOResult.from_failure(':(').bind_ioresult( ... function, ... )(RequiresContextIOResult.no_args) == IOFailure(':(') """ return RequiresContextIOResult( lambda deps: self(deps).bind(function), ) def alt( self, function: Callable[[_ErrorType], _NewErrorType], ) -> RequiresContextIOResult[_ValueType, _NewErrorType, _EnvType]: """ Composes failed container with a pure function. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> assert RequiresContextIOResult.from_value(1).alt( ... lambda x: x + 1, ... )(...) == IOSuccess(1) >>> assert RequiresContextIOResult.from_failure(1).alt( ... lambda x: x + 1, ... )(...) == IOFailure(2) """ return RequiresContextIOResult(lambda deps: self(deps).alt(function)) def lash( self, function: Callable[ [_ErrorType], Kind3[ RequiresContextIOResult, _ValueType, _NewErrorType, _EnvType, ], ], ) -> RequiresContextIOResult[_ValueType, _NewErrorType, _EnvType]: """ Composes this container with a function returning the same type. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> def lashable( ... arg: str, ... ) -> RequiresContextIOResult[str, str, str]: ... if len(arg) > 1: ... return RequiresContextIOResult( ... lambda deps: IOSuccess(deps + arg), ... ) ... return RequiresContextIOResult( ... lambda deps: IOFailure(arg + deps), ... ) >>> assert RequiresContextIOResult.from_value('a').lash( ... lashable, ... )('c') == IOSuccess('a') >>> assert RequiresContextIOResult.from_failure('a').lash( ... lashable, ... )('c') == IOFailure('ac') >>> assert RequiresContextIOResult.from_failure('aa').lash( ... lashable, ... )('b') == IOSuccess('baa') """ return RequiresContextIOResult( lambda deps: self(deps).lash( lambda inner: function(inner)(deps), # type: ignore ), ) def compose_result( self, function: Callable[ [Result[_ValueType, _ErrorType]], Kind3[ RequiresContextIOResult, _NewValueType, _ErrorType, _EnvType, ], ], ) -> RequiresContextIOResult[_NewValueType, _ErrorType, _EnvType]: """ Composes inner ``Result`` with ``ReaderIOResult`` returning function. Can be useful when you need an access to both states of the result. .. code:: python >>> from returns.context import ReaderIOResult, NoDeps >>> from returns.io import IOSuccess, IOFailure >>> from returns.result import Result >>> def count( ... container: Result[int, int], ... ) -> ReaderIOResult[int, int, NoDeps]: ... return ReaderIOResult.from_result( ... container.map(lambda x: x + 1).alt(abs), ... ) >>> success = ReaderIOResult.from_value(1) >>> failure = ReaderIOResult.from_failure(-1) >>> assert success.compose_result(count)(...) == IOSuccess(2) >>> assert failure.compose_result(count)(...) == IOFailure(1) """ return RequiresContextIOResult( lambda deps: dekind( function(self(deps)._inner_value), # noqa: WPS437 )(deps), ) def modify_env( self, function: Callable[[_NewEnvType], _EnvType], ) -> RequiresContextIOResult[_ValueType, _ErrorType, _NewEnvType]: """ Allows to modify the environment type. .. code:: python >>> from returns.context import RequiresContextIOResultE >>> from returns.io import IOSuccess, impure_safe >>> def div(arg: int) -> RequiresContextIOResultE[float, int]: ... return RequiresContextIOResultE( ... impure_safe(lambda deps: arg / deps), ... ) >>> assert div(3).modify_env(int)('2') == IOSuccess(1.5) >>> assert div(3).modify_env(int)('0').failure() """ return RequiresContextIOResult(lambda deps: self(function(deps))) @classmethod def ask(cls) -> RequiresContextIOResult[_EnvType, _ErrorType, _EnvType]: """ Is used to get the current dependencies inside the call stack. Similar to :meth:`returns.context.requires_context.RequiresContext.ask`, but returns ``IOResult`` instead of a regular value. Please, refer to the docs there to learn how to use it. One important note that is worth duplicating here: you might need to provide ``_EnvType`` explicitly, so ``mypy`` will know about it statically. .. code:: python >>> from returns.context import RequiresContextIOResultE >>> from returns.io import IOSuccess >>> assert RequiresContextIOResultE[int, int].ask().map( ... str, ... )(1) == IOSuccess('1') """ return RequiresContextIOResult(IOSuccess) @classmethod def from_result( cls, inner_value: Result[_NewValueType, _NewErrorType], ) -> RequiresContextIOResult[_NewValueType, _NewErrorType, NoDeps]: """ Creates new container with ``Result`` as a unit value. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.result import Success, Failure >>> from returns.io import IOSuccess, IOFailure >>> deps = RequiresContextIOResult.no_args >>> assert RequiresContextIOResult.from_result( ... Success(1), ... )(deps) == IOSuccess(1) >>> assert RequiresContextIOResult.from_result( ... Failure(1), ... )(deps) == IOFailure(1) """ return RequiresContextIOResult( lambda _: IOResult.from_result(inner_value), ) @classmethod def from_io( cls, inner_value: IO[_NewValueType], ) -> RequiresContextIOResult[_NewValueType, Any, NoDeps]: """ Creates new container from successful ``IO`` value. .. code:: python >>> from returns.io import IO, IOSuccess >>> from returns.context import RequiresContextIOResult >>> assert RequiresContextIOResult.from_io(IO(1))( ... RequiresContextIOResult.no_args, ... ) == IOSuccess(1) """ return RequiresContextIOResult( lambda deps: IOResult.from_io(inner_value), ) @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> RequiresContextIOResult[Any, _NewErrorType, NoDeps]: """ Creates a new container from failed ``IO`` value. .. code:: python >>> from returns.io import IO, IOFailure >>> from returns.context import RequiresContextIOResult >>> assert RequiresContextIOResult.from_failed_io(IO(1))( ... RequiresContextIOResult.no_args, ... ) == IOFailure(1) """ return RequiresContextIOResult( lambda deps: IOResult.from_failed_io(inner_value), ) @classmethod def from_ioresult( cls, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> RequiresContextIOResult[_NewValueType, _NewErrorType, NoDeps]: """ Creates new container with ``IOResult`` as a unit value. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> deps = RequiresContextIOResult.no_args >>> assert RequiresContextIOResult.from_ioresult( ... IOSuccess(1), ... )(deps) == IOSuccess(1) >>> assert RequiresContextIOResult.from_ioresult( ... IOFailure(1), ... )(deps) == IOFailure(1) """ return RequiresContextIOResult(lambda _: inner_value) @classmethod def from_ioresult_context( cls, inner_value: ReaderIOResult[_NewValueType, _NewErrorType, _NewEnvType], ) -> ReaderIOResult[_NewValueType, _NewErrorType, _NewEnvType]: """ Creates new container with ``ReaderIOResult`` as a unit value. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess, IOFailure >>> assert RequiresContextIOResult.from_ioresult_context( ... RequiresContextIOResult.from_value(1), ... )(...) == IOSuccess(1) >>> assert RequiresContextIOResult.from_ioresult_context( ... RequiresContextIOResult.from_failure(1), ... )(...) == IOFailure(1) """ return inner_value @classmethod def from_typecast( cls, inner_value: RequiresContext[ IOResult[_NewValueType, _NewErrorType], _EnvType, ], ) -> RequiresContextIOResult[_NewValueType, _NewErrorType, _EnvType]: """ You might end up with ``RequiresContext[IOResult]`` as a value. This method is designed to turn it into ``RequiresContextIOResult``. It will save all the typing information. It is just more useful! .. code:: python >>> from returns.context import RequiresContext >>> from returns.io import IOSuccess, IOFailure >>> assert RequiresContextIOResult.from_typecast( ... RequiresContext.from_value(IOSuccess(1)), ... )(RequiresContextIOResult.no_args) == IOSuccess(1) >>> assert RequiresContextIOResult.from_typecast( ... RequiresContext.from_value(IOFailure(1)), ... )(RequiresContextIOResult.no_args) == IOFailure(1) """ return RequiresContextIOResult(inner_value) @classmethod def from_context( cls, inner_value: RequiresContext[_NewValueType, _NewEnvType], ) -> RequiresContextIOResult[_NewValueType, Any, _NewEnvType]: """ Creates new container from ``RequiresContext`` as a success unit. .. code:: python >>> from returns.context import RequiresContext >>> from returns.io import IOSuccess >>> assert RequiresContextIOResult.from_context( ... RequiresContext.from_value(1), ... )(...) == IOSuccess(1) """ return RequiresContextIOResult( lambda deps: IOSuccess(inner_value(deps)), ) @classmethod def from_failed_context( cls, inner_value: RequiresContext[_NewValueType, _NewEnvType], ) -> RequiresContextIOResult[Any, _NewValueType, _NewEnvType]: """ Creates new container from ``RequiresContext`` as a failure unit. .. code:: python >>> from returns.context import RequiresContext >>> from returns.io import IOFailure >>> assert RequiresContextIOResult.from_failed_context( ... RequiresContext.from_value(1), ... )(...) == IOFailure(1) """ return RequiresContextIOResult( lambda deps: IOFailure(inner_value(deps)), ) @classmethod def from_result_context( cls, inner_value: RequiresContextResult[ _NewValueType, _NewErrorType, _NewEnvType, ], ) -> RequiresContextIOResult[_NewValueType, _NewErrorType, _NewEnvType]: """ Creates new container from ``RequiresContextResult`` as a unit value. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.io import IOSuccess, IOFailure >>> assert RequiresContextIOResult.from_result_context( ... RequiresContextResult.from_value(1), ... )(...) == IOSuccess(1) >>> assert RequiresContextIOResult.from_result_context( ... RequiresContextResult.from_failure(1), ... )(...) == IOFailure(1) """ return RequiresContextIOResult( lambda deps: IOResult.from_result(inner_value(deps)), ) @classmethod def from_value( cls, inner_value: _NewValueType, ) -> RequiresContextIOResult[_NewValueType, Any, NoDeps]: """ Creates new container with ``IOSuccess(inner_value)`` as a unit value. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOSuccess >>> assert RequiresContextIOResult.from_value(1)( ... RequiresContextIOResult.no_args, ... ) == IOSuccess(1) """ return RequiresContextIOResult(lambda _: IOSuccess(inner_value)) @classmethod def from_failure( cls, inner_value: _NewErrorType, ) -> RequiresContextIOResult[Any, _NewErrorType, NoDeps]: """ Creates new container with ``IOFailure(inner_value)`` as a unit value. .. code:: python >>> from returns.context import RequiresContextIOResult >>> from returns.io import IOFailure >>> assert RequiresContextIOResult.from_failure(1)( ... RequiresContextIOResult.no_args, ... ) == IOFailure(1) """ return RequiresContextIOResult(lambda _: IOFailure(inner_value)) # Aliases: #: Alias for a popular case when ``Result`` has ``Exception`` as error type. RequiresContextIOResultE: TypeAlias = RequiresContextIOResult[ _ValueType, Exception, _EnvType, ] #: Alias to save you some typing. Uses original name from Haskell. ReaderIOResult: TypeAlias = RequiresContextIOResult #: Alias to save you some typing. Uses ``Exception`` as error type. ReaderIOResultE: TypeAlias = RequiresContextIOResult[ _ValueType, Exception, _EnvType, ] returns-0.24.0/returns/context/requires_context_result.py000066400000000000000000000502061472312074000240430ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, Any, ClassVar, TypeAlias, TypeVar, final from returns.context import NoDeps from returns.interfaces.specific import reader_result from returns.primitives.container import BaseContainer from returns.primitives.hkt import Kind3, SupportsKind3, dekind from returns.result import Failure, Result, Success if TYPE_CHECKING: from returns.context.requires_context import RequiresContext # Context: _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') # Result: _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') # Helpers: _FirstType = TypeVar('_FirstType') @final class RequiresContextResult( # type: ignore[type-var] BaseContainer, SupportsKind3['RequiresContextResult', _ValueType, _ErrorType, _EnvType], reader_result.ReaderResultBasedN[_ValueType, _ErrorType, _EnvType], ): """ The ``RequiresContextResult`` combinator. See :class:`returns.context.requires_context.RequiresContext` for more docs. This is just a handy wrapper around ``RequiresContext[Result[a, b], env]`` which represents a context-dependent pure operation that might fail and return :class:`returns.result.Result`. It has several important differences from the regular ``Result`` classes. It does not have ``Success`` and ``Failure`` subclasses. Because, the computation is not yet performed. And we cannot know the type in advance. So, this is a thin wrapper, without any changes in logic. Why do we need this wrapper? That's just for better usability! .. code:: python >>> from returns.context import RequiresContext >>> from returns.result import Success, Result >>> def function(arg: int) -> Result[int, str]: ... return Success(arg + 1) >>> # Without wrapper: >>> assert RequiresContext.from_value(Success(1)).map( ... lambda result: result.bind(function), ... )(...) == Success(2) >>> # With wrapper: >>> assert RequiresContextResult.from_value(1).bind_result( ... function, ... )(...) == Success(2) This way ``RequiresContextResult`` allows to simply work with: - raw values and pure functions - ``RequiresContext`` values and pure functions returning it - ``Result`` and functions returning it .. rubric:: Important implementation details Due it is meaning, ``RequiresContextResult`` cannot have ``Success`` and ``Failure`` subclasses. We only have just one type. That's by design. Different converters are also not supported for this type. Use converters inside the ``RequiresContext`` context, not outside. See also: - https://dev.to/gcanti/getting-started-with-fp-ts-reader-1ie5 - https://en.wikipedia.org/wiki/Lazy_evaluation - https://bit.ly/2R8l4WK - https://bit.ly/2RwP4fp """ __slots__ = () #: This field has an extra 'RequiresContext' just because `mypy` needs it. _inner_value: Callable[[_EnvType], Result[_ValueType, _ErrorType]] #: A convenient placeholder to call methods created by `.from_value()`. no_args: ClassVar[NoDeps] = object() def __init__( self, inner_value: Callable[[_EnvType], Result[_ValueType, _ErrorType]], ) -> None: """ Public constructor for this type. Also required for typing. Only allows functions of kind ``* -> *`` and returning :class:`returns.result.Result` instances. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success >>> str(RequiresContextResult(lambda deps: Success(deps + 1))) ' at ...>>' """ super().__init__(inner_value) def __call__(self, deps: _EnvType) -> Result[_ValueType, _ErrorType]: """ Evaluates the wrapped function. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success >>> def first(lg: bool) -> RequiresContextResult[int, str, float]: ... # `deps` has `float` type here: ... return RequiresContextResult( ... lambda deps: Success(deps if lg else -deps), ... ) >>> instance = first(False) >>> assert instance(3.5) == Success(-3.5) In other things, it is a regular Python magic method. """ return self._inner_value(deps) def swap(self) -> RequiresContextResult[_ErrorType, _ValueType, _EnvType]: """ Swaps value and error types. So, values become errors and errors become values. It is useful when you have to work with errors a lot. And since we have a lot of ``.bind_`` related methods and only a single ``.lash`` - it is easier to work with values. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Failure, Success >>> success = RequiresContextResult.from_value(1) >>> failure = RequiresContextResult.from_failure(1) >>> assert success.swap()(...) == Failure(1) >>> assert failure.swap()(...) == Success(1) """ return RequiresContextResult(lambda deps: self(deps).swap()) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> RequiresContextResult[_NewValueType, _ErrorType, _EnvType]: """ Composes successful container with a pure function. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success, Failure >>> assert RequiresContextResult.from_value(1).map( ... lambda x: x + 1, ... )(...) == Success(2) >>> assert RequiresContextResult.from_failure(1).map( ... lambda x: x + 1, ... )(...) == Failure(1) """ return RequiresContextResult(lambda deps: self(deps).map(function)) def apply( self, container: Kind3[ RequiresContextResult, Callable[[_ValueType], _NewValueType], _ErrorType, _EnvType, ], ) -> RequiresContextResult[_NewValueType, _ErrorType, _EnvType]: """ Calls a wrapped function in a container on this container. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Failure, Success >>> def transform(arg: str) -> str: ... return arg + 'b' >>> assert RequiresContextResult.from_value('a').apply( ... RequiresContextResult.from_value(transform), ... )(...) == Success('ab') >>> assert RequiresContextResult.from_failure('a').apply( ... RequiresContextResult.from_value(transform), ... )(...) == Failure('a') >>> assert isinstance(RequiresContextResult.from_value('a').apply( ... RequiresContextResult.from_failure(transform), ... )(...), Failure) is True """ return RequiresContextResult( lambda deps: self(deps).apply(dekind(container)(deps)), ) def bind( self, function: Callable[ [_ValueType], Kind3[ RequiresContextResult, _NewValueType, _ErrorType, _EnvType, ], ], ) -> RequiresContextResult[_NewValueType, _ErrorType, _EnvType]: """ Composes this container with a function returning the same type. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success, Failure >>> def first(lg: bool) -> RequiresContextResult[int, int, float]: ... # `deps` has `float` type here: ... return RequiresContextResult( ... lambda deps: Success(deps) if lg else Failure(-deps), ... ) >>> def second( ... number: int, ... ) -> RequiresContextResult[str, int, float]: ... # `deps` has `float` type here: ... return RequiresContextResult( ... lambda deps: Success('>=' if number >= deps else '<'), ... ) >>> assert first(True).bind(second)(1) == Success('>=') >>> assert first(False).bind(second)(2) == Failure(-2) """ return RequiresContextResult( lambda deps: self(deps).bind( lambda inner: function(inner)(deps), # type: ignore ), ) #: Alias for `bind_context_result` method, it is the same as `bind` here. bind_context_result = bind def bind_result( self, function: Callable[[_ValueType], Result[_NewValueType, _ErrorType]], ) -> RequiresContextResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``Result`` returning function to current container. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success, Failure, Result >>> def function(num: int) -> Result[str, int]: ... return Success(num + 1) if num > 0 else Failure('<0') >>> assert RequiresContextResult.from_value(1).bind_result( ... function, ... )(RequiresContextResult.no_args) == Success(2) >>> assert RequiresContextResult.from_value(0).bind_result( ... function, ... )(RequiresContextResult.no_args) == Failure('<0') >>> assert RequiresContextResult.from_failure(':(').bind_result( ... function, ... )(RequiresContextResult.no_args) == Failure(':(') """ return RequiresContextResult(lambda deps: self(deps).bind(function)) def bind_context( self, function: Callable[ [_ValueType], RequiresContext[_NewValueType, _EnvType], ], ) -> RequiresContextResult[_NewValueType, _ErrorType, _EnvType]: """ Binds ``RequiresContext`` returning function to current container. .. code:: python >>> from returns.context import RequiresContext >>> from returns.result import Success, Failure >>> def function(arg: int) -> RequiresContext[int, str]: ... return RequiresContext(lambda deps: len(deps) + arg) >>> assert function(2)('abc') == 5 >>> assert RequiresContextResult.from_value(2).bind_context( ... function, ... )('abc') == Success(5) >>> assert RequiresContextResult.from_failure(2).bind_context( ... function, ... )('abc') == Failure(2) """ return RequiresContextResult( lambda deps: self(deps).map( lambda inner: function(inner)(deps), # type: ignore[misc] ), ) def alt( self, function: Callable[[_ErrorType], _NewErrorType], ) -> RequiresContextResult[_ValueType, _NewErrorType, _EnvType]: """ Composes failed container with a pure function. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success, Failure >>> assert RequiresContextResult.from_value(1).alt( ... lambda x: x + 1, ... )(...) == Success(1) >>> assert RequiresContextResult.from_failure(1).alt( ... lambda x: x + 1, ... )(...) == Failure(2) """ return RequiresContextResult(lambda deps: self(deps).alt(function)) def lash( self, function: Callable[ [_ErrorType], Kind3[ RequiresContextResult, _ValueType, _NewErrorType, _EnvType, ], ], ) -> RequiresContextResult[_ValueType, _NewErrorType, _EnvType]: """ Composes this container with a function returning the same type. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success, Failure >>> def lashable(arg: str) -> RequiresContextResult[str, str, str]: ... if len(arg) > 1: ... return RequiresContextResult( ... lambda deps: Success(deps + arg), ... ) ... return RequiresContextResult( ... lambda deps: Failure(arg + deps), ... ) >>> assert RequiresContextResult.from_value('a').lash( ... lashable, ... )('c') == Success('a') >>> assert RequiresContextResult.from_failure('a').lash( ... lashable, ... )('c') == Failure('ac') >>> assert RequiresContextResult.from_failure('aa').lash( ... lashable, ... )('b') == Success('baa') """ return RequiresContextResult( lambda deps: self(deps).lash( lambda inner: function(inner)(deps), # type: ignore ), ) def modify_env( self, function: Callable[[_NewEnvType], _EnvType], ) -> RequiresContextResult[_ValueType, _ErrorType, _NewEnvType]: """ Allows to modify the environment type. .. code:: python >>> from returns.context import RequiresContextResultE >>> from returns.result import Success, safe >>> def div(arg: int) -> RequiresContextResultE[float, int]: ... return RequiresContextResultE( ... safe(lambda deps: arg / deps), ... ) >>> assert div(3).modify_env(int)('2') == Success(1.5) >>> assert div(3).modify_env(int)('0').failure() """ return RequiresContextResult(lambda deps: self(function(deps))) @classmethod def ask(cls) -> RequiresContextResult[_EnvType, _ErrorType, _EnvType]: """ Is used to get the current dependencies inside the call stack. Similar to :meth:`returns.context.requires_context.RequiresContext.ask`, but returns ``Result`` instead of a regular value. Please, refer to the docs there to learn how to use it. One important note that is worth duplicating here: you might need to provide ``_EnvType`` explicitly, so ``mypy`` will know about it statically. .. code:: python >>> from returns.context import RequiresContextResultE >>> from returns.result import Success >>> assert RequiresContextResultE[int, int].ask().map( ... str, ... )(1) == Success('1') """ return RequiresContextResult(Success) @classmethod def from_result( cls, inner_value: Result[_NewValueType, _NewErrorType], ) -> RequiresContextResult[_NewValueType, _NewErrorType, NoDeps]: """ Creates new container with ``Result`` as a unit value. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success, Failure >>> deps = RequiresContextResult.no_args >>> assert RequiresContextResult.from_result( ... Success(1), ... )(deps) == Success(1) >>> assert RequiresContextResult.from_result( ... Failure(1), ... )(deps) == Failure(1) """ return RequiresContextResult(lambda _: inner_value) @classmethod def from_typecast( cls, inner_value: RequiresContext[ Result[_NewValueType, _NewErrorType], _EnvType, ], ) -> RequiresContextResult[_NewValueType, _NewErrorType, _EnvType]: """ You might end up with ``RequiresContext[Result[...]]`` as a value. This method is designed to turn it into ``RequiresContextResult``. It will save all the typing information. It is just more useful! .. code:: python >>> from returns.context import RequiresContext >>> from returns.result import Success, Failure >>> assert RequiresContextResult.from_typecast( ... RequiresContext.from_value(Success(1)), ... )(RequiresContextResult.no_args) == Success(1) >>> assert RequiresContextResult.from_typecast( ... RequiresContext.from_value(Failure(1)), ... )(RequiresContextResult.no_args) == Failure(1) """ return RequiresContextResult(inner_value) @classmethod def from_context( cls, inner_value: RequiresContext[_NewValueType, _NewEnvType], ) -> RequiresContextResult[_NewValueType, Any, _NewEnvType]: """ Creates new container from ``RequiresContext`` as a success unit. .. code:: python >>> from returns.context import RequiresContext >>> from returns.result import Success >>> assert RequiresContextResult.from_context( ... RequiresContext.from_value(1), ... )(...) == Success(1) """ return RequiresContextResult(lambda deps: Success(inner_value(deps))) @classmethod def from_failed_context( cls, inner_value: RequiresContext[_NewValueType, _NewEnvType], ) -> RequiresContextResult[Any, _NewValueType, _NewEnvType]: """ Creates new container from ``RequiresContext`` as a failure unit. .. code:: python >>> from returns.context import RequiresContext >>> from returns.result import Failure >>> assert RequiresContextResult.from_failed_context( ... RequiresContext.from_value(1), ... )(...) == Failure(1) """ return RequiresContextResult(lambda deps: Failure(inner_value(deps))) @classmethod def from_result_context( cls, inner_value: RequiresContextResult[ _NewValueType, _NewErrorType, _NewEnvType, ], ) -> RequiresContextResult[_NewValueType, _NewErrorType, _NewEnvType]: """ Creates ``RequiresContextResult`` from another instance of it. .. code:: python >>> from returns.context import ReaderResult >>> from returns.result import Success, Failure >>> assert ReaderResult.from_result_context( ... ReaderResult.from_value(1), ... )(...) == Success(1) >>> assert ReaderResult.from_result_context( ... ReaderResult.from_failure(1), ... )(...) == Failure(1) """ return inner_value @classmethod def from_value( cls, inner_value: _FirstType, ) -> RequiresContextResult[_FirstType, Any, NoDeps]: """ Creates new container with ``Success(inner_value)`` as a unit value. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Success >>> assert RequiresContextResult.from_value(1)(...) == Success(1) """ return RequiresContextResult(lambda _: Success(inner_value)) @classmethod def from_failure( cls, inner_value: _FirstType, ) -> RequiresContextResult[Any, _FirstType, NoDeps]: """ Creates new container with ``Failure(inner_value)`` as a unit value. .. code:: python >>> from returns.context import RequiresContextResult >>> from returns.result import Failure >>> assert RequiresContextResult.from_failure(1)(...) == Failure(1) """ return RequiresContextResult(lambda _: Failure(inner_value)) # Aliases: #: Alias for a popular case when ``Result`` has ``Exception`` as error type. RequiresContextResultE: TypeAlias = RequiresContextResult[ _ValueType, Exception, _EnvType, ] #: Alias to save you some typing. Uses original name from Haskell. ReaderResult: TypeAlias = RequiresContextResult #: Alias to save you some typing. Has ``Exception`` as error type. ReaderResultE: TypeAlias = RequiresContextResult[ _ValueType, Exception, _EnvType, ] returns-0.24.0/returns/contrib/000077500000000000000000000000001472312074000164415ustar00rootroot00000000000000returns-0.24.0/returns/contrib/__init__.py000066400000000000000000000000001472312074000205400ustar00rootroot00000000000000returns-0.24.0/returns/contrib/hypothesis/000077500000000000000000000000001472312074000206405ustar00rootroot00000000000000returns-0.24.0/returns/contrib/hypothesis/__init__.py000066400000000000000000000000001472312074000227370ustar00rootroot00000000000000returns-0.24.0/returns/contrib/hypothesis/_entrypoint.py000066400000000000000000000032561472312074000235720ustar00rootroot00000000000000""" Used to register all our types as hypothesis strategies. See: https://hypothesis.readthedocs.io/en/latest/strategies.html But, beware that we only register concrete types here, interfaces won't be registered! """ from __future__ import annotations from collections.abc import Callable, Sequence from typing import TYPE_CHECKING, Any, TypeVar if TYPE_CHECKING: from returns.primitives.laws import Lawful _Inst = TypeVar('_Inst', bound='Lawful') def _setup_hook() -> None: from hypothesis import strategies as st from returns.context import ( RequiresContext, RequiresContextFutureResult, RequiresContextIOResult, RequiresContextResult, ) from returns.future import Future, FutureResult from returns.io import IO, IOResult from returns.maybe import Maybe from returns.result import Result def factory( container_type: type[_Inst], ) -> Callable[[Any], st.SearchStrategy[_Inst]]: def decorator(thing: Any) -> st.SearchStrategy[_Inst]: from returns.contrib.hypothesis.containers import ( strategy_from_container, ) return strategy_from_container(container_type)(thing) return decorator #: Our types that we register in hypothesis #: to be working with ``st.from_type`` registered_types: Sequence[type[Lawful]] = ( Result, Maybe, IO, IOResult, Future, FutureResult, RequiresContext, RequiresContextResult, RequiresContextIOResult, RequiresContextFutureResult, ) for type_ in registered_types: st.register_type_strategy(type_, factory(type_)) returns-0.24.0/returns/contrib/hypothesis/containers.py000066400000000000000000000043341472312074000233630ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, Any, TypeVar from hypothesis import strategies as st if TYPE_CHECKING: from returns.primitives.laws import Lawful def strategy_from_container( container_type: type[Lawful], *, use_init: bool = False, ) -> Callable[[type], st.SearchStrategy]: """ Creates a strategy from a container type. Basically, containers should not support ``__init__`` even when they have one. Because, that can be very complex: for example ``FutureResult`` requires ``Awaitable[Result[a, b]]`` as an ``__init__`` value. But, custom containers pass ``use_init`` if they are not an instance of ``ApplicativeN`` and do not have a working ``.from_value`` method. For example, pure ``MappableN`` can do that. We also try to resolve generic arguments. So, ``Result[_ValueType, Exception]`` will produce any value for success cases and only exceptions for failure cases. """ from returns.interfaces.applicative import ApplicativeN from returns.interfaces.specific import maybe, result def factory(type_: type) -> st.SearchStrategy: value_type, error_type = _get_type_vars(type_) strategies: list[st.SearchStrategy[Any]] = [] if use_init and getattr(container_type, '__init__', None): strategies.append(st.builds(container_type)) if issubclass(container_type, ApplicativeN): strategies.append(st.builds( container_type.from_value, st.from_type(value_type), )) if issubclass(container_type, result.ResultLikeN): strategies.append(st.builds( container_type.from_failure, st.from_type(error_type), )) if issubclass(container_type, maybe.MaybeLikeN): strategies.append(st.builds( container_type.from_optional, st.from_type(value_type), )) return st.one_of(*strategies) return factory _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') def _get_type_vars(thing: type): return getattr(thing, '__args__', (_FirstType, _SecondType))[:2] returns-0.24.0/returns/contrib/hypothesis/laws.py000066400000000000000000000175601472312074000221710ustar00rootroot00000000000000import inspect from collections.abc import Callable, Iterator from contextlib import ExitStack, contextmanager from typing import Any, NamedTuple, TypeVar, final import pytest from hypothesis import given from hypothesis import settings as hypothesis_settings from hypothesis import strategies as st from hypothesis.strategies._internal import types from returns.contrib.hypothesis.containers import strategy_from_container from returns.primitives.laws import Law, Lawful @final class _Settings(NamedTuple): """Settings that we provide to an end user.""" settings_kwargs: dict[str, Any] use_init: bool def check_all_laws( container_type: type[Lawful], *, settings_kwargs: dict[str, Any] | None = None, use_init: bool = False, ) -> None: """ Function to check all defined mathematical laws in a specified container. Should be used like so: .. code:: python from returns.contrib.hypothesis.laws import check_all_laws from returns.io import IO check_all_laws(IO) You can also pass different ``hypothesis`` settings inside: .. code:: python check_all_laws(IO, settings_kwargs={'max_examples': 100}) Note: Cannot be used inside doctests because of the magic we use inside. See also: - https://sobolevn.me/2021/02/make-tests-a-part-of-your-app - https://mmhaskell.com/blog/2017/3/13/obey-the-type-laws """ settings = _Settings( settings_kwargs if settings_kwargs is not None else {}, use_init, ) for interface, laws in container_type.laws().items(): for law in laws: _create_law_test_case( container_type, interface, law, settings=settings, ) @contextmanager def container_strategies( container_type: type[Lawful], *, settings: _Settings, ) -> Iterator[None]: """ Registers all types inside a container to resolve to a correct strategy. For example, let's say we have ``Result`` type. It is a subtype of ``ContainerN``, ``MappableN``, ``BindableN``, etc. When we check this type, we need ``MappableN`` to resolve to ``Result``. Can be used independently from other functions. """ our_interfaces = { base_type for base_type in container_type.__mro__ if ( getattr(base_type, '__module__', '').startswith('returns.') and # We don't register `Lawful` type, it is not a container: base_type != Lawful and # We will register the container itself later with # `maybe_register_container` function: base_type != container_type ) } for interface in our_interfaces: st.register_type_strategy( interface, strategy_from_container( container_type, use_init=settings.use_init, ), ) try: yield finally: for interface in our_interfaces: types._global_type_lookup.pop(interface) _clean_caches() @contextmanager def register_container( container_type: type['Lawful'], *, use_init: bool, ) -> Iterator[None]: """Temporary registers a container if it is not registered yet.""" used = types._global_type_lookup.pop(container_type, None) st.register_type_strategy( container_type, strategy_from_container( container_type, use_init=use_init, ), ) try: yield finally: types._global_type_lookup.pop(container_type) if used: st.register_type_strategy(container_type, used) else: _clean_caches() @contextmanager def pure_functions() -> Iterator[None]: """ Context manager to resolve all ``Callable`` as pure functions. It is not a default in ``hypothesis``. """ def factory(thing) -> st.SearchStrategy: like = ( (lambda: None) if len(thing.__args__) == 1 else (lambda *args, **kwargs: None) ) return_type = thing.__args__[-1] return st.functions( like=like, returns=st.from_type( return_type if return_type is not None else type(None), ), pure=True, ) used = types._global_type_lookup[Callable] # type: ignore[index] st.register_type_strategy(Callable, factory) # type: ignore[arg-type] try: yield finally: types._global_type_lookup.pop(Callable) # type: ignore[call-overload] st.register_type_strategy(Callable, used) # type: ignore[arg-type] @contextmanager def type_vars() -> Iterator[None]: """ Our custom ``TypeVar`` handling. There are several noticeable differences: 1. We add mutable types to the tests: like ``list`` and ``dict`` 2. We ensure that values inside strategies are self-equal, for example, ``nan`` does not work for us """ def factory(thing): return types.resolve_TypeVar(thing).filter( lambda inner: inner == inner, # noqa: WPS312 ) used = types._global_type_lookup.pop(TypeVar) st.register_type_strategy(TypeVar, factory) try: yield finally: types._global_type_lookup.pop(TypeVar) st.register_type_strategy(TypeVar, used) @contextmanager def clean_plugin_context() -> Iterator[None]: """ We register a lot of types in `_entrypoint.py`, we need to clean them. Otherwise, some types might be messed up. """ saved_stategies = {} for strategy_key, strategy in types._global_type_lookup.items(): if isinstance(strategy_key, type): if strategy_key.__module__.startswith('returns.'): saved_stategies.update({strategy_key: strategy}) for key_to_remove in saved_stategies: types._global_type_lookup.pop(key_to_remove) _clean_caches() try: yield finally: for saved_state in saved_stategies.items(): st.register_type_strategy(*saved_state) def _clean_caches() -> None: st.from_type.__clear_cache() # type: ignore[attr-defined] def _run_law( container_type: type[Lawful], law: Law, *, settings: _Settings, ) -> Callable[[st.DataObject], None]: def factory(source: st.DataObject) -> None: with ExitStack() as stack: stack.enter_context(clean_plugin_context()) stack.enter_context(type_vars()) stack.enter_context(pure_functions()) stack.enter_context( container_strategies(container_type, settings=settings), ) stack.enter_context( register_container(container_type, use_init=settings.use_init), ) source.draw(st.builds(law.definition)) return factory def _create_law_test_case( container_type: type[Lawful], interface: type[Lawful], law: Law, *, settings: _Settings, ) -> None: test_function = given(st.data())( hypothesis_settings(**settings.settings_kwargs)( _run_law(container_type, law, settings=settings), ), ) called_from = inspect.stack()[2] module = inspect.getmodule(called_from[0]) template = 'test_{container}_{interface}_{name}' test_function.__name__ = template.format( # noqa: WPS125 container=container_type.__qualname__.lower(), interface=interface.__qualname__.lower(), name=law.name, ) setattr( module, test_function.__name__, pytest.mark.filterwarnings( # We ignore multiple warnings about unused coroutines and stuff: 'ignore::pytest.PytestUnraisableExceptionWarning', )( # We mark all tests with `returns_lawful` marker, # so users can easily skip them if needed. pytest.mark.returns_lawful(test_function), ), ) returns-0.24.0/returns/contrib/mypy/000077500000000000000000000000001472312074000174375ustar00rootroot00000000000000returns-0.24.0/returns/contrib/mypy/__init__.py000066400000000000000000000000001472312074000215360ustar00rootroot00000000000000returns-0.24.0/returns/contrib/mypy/_consts.py000066400000000000000000000022771472312074000214710ustar00rootroot00000000000000from typing import Final # Constant fullnames for typechecking # =================================== #: Used for typed ``partial`` function. TYPED_PARTIAL_FUNCTION: Final = 'returns.curry.partial' #: Used for typed ``curry`` decorator. TYPED_CURRY_FUNCTION: Final = 'returns.curry.curry' #: Used for typed ``flow`` call. TYPED_FLOW_FUNCTION: Final = 'returns._internal.pipeline.flow.flow' #: Used for typed ``pipe`` call. TYPED_PIPE_FUNCTION: Final = 'returns._internal.pipeline.pipe.pipe' TYPED_PIPE_METHOD: Final = 'returns._internal.pipeline.pipe._Pipe.__call__' #: Used for HKT emulation. TYPED_KINDN: Final = 'returns.primitives.hkt.KindN' TYPED_KINDN_ACCESS: Final = '{0}.'.format(TYPED_KINDN) TYPED_KIND_DEKIND: Final = 'returns.primitives.hkt.dekind' TYPED_KIND_KINDED_CALL: Final = 'returns.primitives.hkt.Kinded.__call__' TYPED_KIND_KINDED_GET: Final = 'returns.primitives.hkt.Kinded.__get__' #: Used for :ref:`do-notation`. DO_NOTATION_METHODS: Final = ( # Just validation: 'returns.io.IO.do', 'returns.maybe.Maybe.do', 'returns.future.Future.do', # Also infer error types: 'returns.result.Result.do', 'returns.io.IOResult.do', 'returns.future.FutureResult.do', ) returns-0.24.0/returns/contrib/mypy/_features/000077500000000000000000000000001472312074000214145ustar00rootroot00000000000000returns-0.24.0/returns/contrib/mypy/_features/__init__.py000066400000000000000000000000001472312074000235130ustar00rootroot00000000000000returns-0.24.0/returns/contrib/mypy/_features/curry.py000066400000000000000000000145711472312074000231420ustar00rootroot00000000000000from collections.abc import Iterator from itertools import groupby, product from operator import itemgetter from typing import cast, final from mypy.nodes import ARG_STAR, ARG_STAR2 from mypy.plugin import FunctionContext from mypy.types import AnyType, CallableType, FunctionLike, Overloaded from mypy.types import Type as MypyType from mypy.types import TypeOfAny, get_proper_type from returns.contrib.mypy._structures.args import FuncArg from returns.contrib.mypy._typeops.transform_callable import ( Intermediate, proper_type, ) #: Raw material to build `_ArgTree`. _RawArgTree = list[list[list[FuncArg]]] def analyze(ctx: FunctionContext) -> MypyType: """Returns proper type for curried functions.""" default_return = get_proper_type(ctx.default_return_type) arg_type = get_proper_type(ctx.arg_types[0][0]) if not isinstance(arg_type, CallableType): return default_return if not isinstance(default_return, CallableType): return default_return return _CurryFunctionOverloads(arg_type, ctx).build_overloads() @final class _ArgTree: """Represents a node in tree of arguments.""" def __init__(self, case: CallableType | None) -> None: self.case = case self.children: list['_ArgTree'] = [] @final class _CurryFunctionOverloads: """ Implementation of ``@curry`` decorator typings. Basically does just two things: 1. Creates all possible ordered combitations of arguments 2. Creates ``Overload`` instances for functions' return types """ def __init__(self, original: CallableType, ctx: FunctionContext) -> None: """ Saving the things we need. Args: original: original function that was passed to ``@curry``. ctx: function context. """ self._original = original self._ctx = ctx self._overloads: list[CallableType] = [] self._args = FuncArg.from_callable(self._original) # We need to get rid of generics here. # Because, otherwise `detach_callable` with add # unused variables to intermediate callables. self._default = cast( CallableType, self._ctx.default_return_type, ).copy_modified( ret_type=AnyType(TypeOfAny.implementation_artifact), ) def build_overloads(self) -> MypyType: """ Builds lots of possible overloads for a given function. Inside we try to repsent all functions as sequence of arguments, grouped by the similar ones and returning one more overload instance. """ if not self._args: # There's nothing to do, function has 0 args. return self._original if any(arg.kind in {ARG_STAR, ARG_STAR2} for arg in self._args): # We don't support `*args` and `**kwargs`. # Because it is very complex. It might be fixes in the future. return self._default.ret_type # Any argtree = self._build_argtree( _ArgTree(None), # starting from root node list(self._slices(self._args)), ) self._build_overloads_from_argtree(argtree) return proper_type(self._overloads) def _build_argtree( self, node: _ArgTree, source: _RawArgTree, ) -> '_ArgTree': """ Builds argument tree. Each argument can point to zero, one, or more other nodes. Arguments that have zero children are treated as bottom (last) ones. Arguments that have just one child are meant to be regular functions. Arguments that have more than one child are treated as overloads. """ def factory( args: _RawArgTree, ) -> Iterator[tuple[list[FuncArg], _RawArgTree]]: if not args or not args[0]: return # we have reached an end of arguments yield from ( (case, [group[1:] for group in grouped]) for case, grouped in groupby(args, itemgetter(0)) ) for case, rest in factory(source): new_node = _ArgTree( Intermediate(self._default).with_signature(case), ) node.children.append(new_node) self._build_argtree(source=rest, node=new_node) return node def _build_overloads_from_argtree(self, argtree: _ArgTree) -> None: """Generates functions from argument tree.""" for child in argtree.children: self._build_overloads_from_argtree(child) assert child.case # mypy is not happy # noqa: S101 if not child.children: child.case = Intermediate(child.case).with_ret_type( self._original.ret_type, ) if argtree.case is not None: # We need to go backwards and to replace the return types # of the previous functions. Like so: # 1. `def x -> A` # 2. `def y -> A` # Will take `2` and apply its type to the previous function `1`. # Will result in `def x -> y -> A` # We also overloadify existing return types. ret_type = get_proper_type(argtree.case.ret_type) temp_any = isinstance( ret_type, AnyType, ) and ret_type.type_of_any == TypeOfAny.implementation_artifact argtree.case = Intermediate(argtree.case).with_ret_type( child.case if temp_any else Overloaded( [child.case, *cast(FunctionLike, ret_type).items], ), ) else: # Root is reached, we need to save the result: self._overloads.append(child.case) def _slices(self, source: list[FuncArg]) -> Iterator[list[list[FuncArg]]]: """ Generate all possible slices of a source list. Example:: _slices("AB") -> "AB" "A" "B" _slices("ABC") -> "ABC" "AB" "C" "A" "BC" "A" "B" "C" """ for doslice in product([True, False], repeat=len(source) - 1): slices = [] start = 0 for index, slicehere in enumerate(doslice, 1): if slicehere: slices.append(source[start:index]) start = index slices.append(source[start:]) yield slices returns-0.24.0/returns/contrib/mypy/_features/do_notation.py000066400000000000000000000103321472312074000243020ustar00rootroot00000000000000from typing import Final from mypy.maptype import map_instance_to_supertype from mypy.nodes import Expression, GeneratorExpr, TypeInfo from mypy.plugin import MethodContext from mypy.subtypes import is_subtype from mypy.typeops import make_simplified_union from mypy.types import AnyType, CallableType, Instance from mypy.types import Type as MypyType from mypy.types import TypeOfAny, UnionType, get_proper_type _INVALID_DO_NOTATION_SOURCE: Final = ( 'Invalid type supplied in do-notation: expected "{0}", got "{1}"' ) _LITERAL_GENERATOR_EXPR_REQUIRED: Final = ( 'Literal generator expression is required, not a variable or function call' ) _IF_CONDITIONS_ARE_NOT_ALLOWED: Final = ( 'Using "if" conditions inside a generator is not allowed' ) def analyze(ctx: MethodContext) -> MypyType: """ Used to handle validation and error types in :ref:`do-notation`. What it does? 1. For all types we ensure that only a single container type is used in a single do-notation. We don't allow mixing them. 2. For types with error types (like ``Result``), it inferes what possible errors types can we have. The result is a ``Union`` of all possible errors. 3. Ensures that expression passed into ``.do`` method is literal. 4. Checks that default value is provided if generator expression has ``if`` conditions inside. """ default_return = get_proper_type(ctx.default_return_type) if not ctx.args or not ctx.args[0]: return default_return expr = ctx.args[0][0] if not isinstance(expr, GeneratorExpr): ctx.api.fail(_LITERAL_GENERATOR_EXPR_REQUIRED, expr) return default_return if not isinstance(ctx.type, CallableType): return default_return if not isinstance(default_return, Instance): return default_return return _do_notation( expr=expr, type_info=ctx.type.type_object(), default_return_type=default_return, ctx=ctx, ) def _do_notation( expr: GeneratorExpr, type_info: TypeInfo, default_return_type: Instance, ctx: MethodContext, ) -> MypyType: types = [] for seq in expr.sequences: error_type = _try_fetch_error_type(type_info, seq, ctx) if error_type is not None: types.append(error_type) _check_if_conditions(expr, ctx) if types: return default_return_type.copy_modified( args=[ default_return_type.args[0], make_simplified_union(types), *default_return_type.args[2:], ], ) return default_return_type def _try_fetch_error_type( type_info: TypeInfo, seq: Expression, ctx: MethodContext, ) -> MypyType | None: inst = Instance( type_info, [ AnyType(TypeOfAny.implementation_artifact) for _ in type_info.type_vars ], ) typ = ctx.api.expr_checker.accept(seq) # type: ignore if is_subtype(typ, inst, ignore_type_params=True): is_success, error_type = _extract_error_type(typ, type_info) if is_success: return error_type ctx.api.fail( _INVALID_DO_NOTATION_SOURCE.format(inst, typ), seq, ) return None def _extract_error_type( typ: MypyType, type_info: TypeInfo, ) -> tuple[bool, MypyType | None]: typ = get_proper_type(typ) if isinstance(typ, Instance): return True, _decide_error_type( map_instance_to_supertype(typ, type_info), ) if isinstance(typ, UnionType): types = [] is_success = True for type_item in typ.items: is_success, error_type = _extract_error_type(type_item, type_info) if error_type is not None: types.append(error_type) if is_success: return True, make_simplified_union(types) return False, None def _decide_error_type(typ: Instance) -> MypyType | None: if len(typ.args) < 2: return None if isinstance(get_proper_type(typ.args[1]), AnyType): return None return typ.args[1] def _check_if_conditions( expr: GeneratorExpr, ctx: MethodContext, ) -> None: if any(cond for cond in expr.condlists): ctx.api.fail(_IF_CONDITIONS_ARE_NOT_ALLOWED, expr) returns-0.24.0/returns/contrib/mypy/_features/flow.py000066400000000000000000000032471472312074000227430ustar00rootroot00000000000000from mypy.plugin import FunctionContext from mypy.types import Type as MypyType from mypy.types import get_proper_type from returns.contrib.mypy._typeops.inference import PipelineInference def analyze(ctx: FunctionContext) -> MypyType: """ Helps to analyze ``flow`` function calls. By default, ``mypy`` cannot infer and check this function call: .. code:: python >>> from returns.pipeline import flow >>> assert flow( ... 1, ... lambda x: x + 1, ... lambda y: y / 2, ... ) == 1.0 But, this plugin can! It knows all the types for all ``lambda`` functions in the pipeline. How? 1. We use the first passed parameter as the first argument to the first passed function 2. We use parameter + function to check the call and reveal types of current pipeline step 3. We iterate through all passed function and use previous return type as a new parameter to call current function """ if not ctx.arg_types[0]: return ctx.default_return_type if not ctx.arg_types[1]: # We do require to pass `*functions` arg. ctx.api.fail('Too few arguments for "flow"', ctx.context) return ctx.default_return_type # We use custom argument type inference here, # because for some reason, `mypy` does not do it correctly. # It inferes `covariant` types incorrectly. real_arg_types = tuple( ctx.api.expr_checker.accept(arg) # type: ignore for arg in ctx.args[1] ) return PipelineInference( get_proper_type(ctx.arg_types[0][0]), ).from_callable_sequence( real_arg_types, ctx.arg_kinds[1], ctx, ) returns-0.24.0/returns/contrib/mypy/_features/kind.py000066400000000000000000000122631472312074000227170ustar00rootroot00000000000000from collections.abc import Sequence from enum import Enum, unique from mypy.checkmember import analyze_member_access from mypy.plugin import ( AttributeContext, FunctionContext, MethodContext, MethodSigContext, ) from mypy.typeops import bind_self from mypy.types import AnyType, CallableType, FunctionLike, Instance, Overloaded from mypy.types import Type as MypyType from mypy.types import TypeOfAny, TypeType, TypeVarType, get_proper_type from returns.contrib.mypy._typeops.fallback import asserts_fallback_to_any from returns.contrib.mypy._typeops.visitor import translate_kind_instance # TODO: probably we can validate `KindN[]` creation during `get_analtype` @asserts_fallback_to_any def attribute_access(ctx: AttributeContext) -> MypyType: """ Ensures that attribute access to ``KindN`` is correct. In other words: .. code:: python from typing import TypeVar from returns.primitives.hkt import KindN from returns.interfaces.mappable import MappableN _MappableType = TypeVar('_MappableType', bound=MappableN) kind: KindN[_MappableType, int, int, int] reveal_type(kind.map) # will work correctly! """ assert isinstance(ctx.type, Instance) instance = get_proper_type(ctx.type.args[0]) if isinstance(instance, TypeVarType): bound = get_proper_type(instance.upper_bound) assert isinstance(bound, Instance) accessed = bound.copy_modified( args=_crop_kind_args(ctx.type, bound.args), ) elif isinstance(instance, Instance): accessed = instance.copy_modified(args=_crop_kind_args(ctx.type)) else: return ctx.default_attr_type exprchecker = ctx.api.expr_checker # type: ignore return analyze_member_access( ctx.context.name, # type: ignore accessed, ctx.context, is_lvalue=False, is_super=False, is_operator=False, msg=ctx.api.msg, original_type=instance, chk=ctx.api, # type: ignore in_literal_context=exprchecker.is_literal_context(), ) def dekind(ctx: FunctionContext) -> MypyType: """ Infers real type behind ``Kind`` form. Basically, it turns ``Kind[IO, int]`` into ``IO[int]``. The only limitation is that it works with only ``Instance`` type in the first type argument position. So, ``dekind(KindN[T, int])`` will fail. """ kind = get_proper_type(ctx.arg_types[0][0]) assert isinstance(kind, Instance) # mypy requires these lines kind_inst = get_proper_type(kind.args[0]) if not isinstance(kind_inst, Instance): ctx.api.fail(_KindErrors.dekind_not_instance, ctx.context) return AnyType(TypeOfAny.from_error) return kind_inst.copy_modified(args=_crop_kind_args(kind)) @asserts_fallback_to_any def kinded_signature(ctx: MethodSigContext) -> CallableType: """ Returns the internal function wrapped as ``Kinded[def]``. Works for ``Kinded`` class when ``__call__`` magic method is used. See :class:`returns.primitives.hkt.Kinded` for more information. """ assert isinstance(ctx.type, Instance) wrapped_method = get_proper_type(ctx.type.args[0]) assert isinstance(wrapped_method, FunctionLike) if isinstance(wrapped_method, Overloaded): return ctx.default_signature assert isinstance(wrapped_method, CallableType) return wrapped_method # TODO: we should raise an error if bound type does not have any `KindN` # instances, because that's not how `@kinded` and `Kinded[]` should be used. def kinded_call(ctx: MethodContext) -> MypyType: """ Reveals the correct return type of ``Kinded.__call__`` method. Turns ``-> KindN[I, t1, t2, t3]`` into ``-> I[t1, t2, t3]``. Also strips unused type arguments for ``KindN``, so: - ``KindN[IO, int, Never, Never]`` will be ``IO[int]`` - ``KindN[Result, int, str, Never]`` will be ``Result[int, str]`` It also processes nested ``KindN`` with recursive strategy. See :class:`returns.primitives.hkt.Kinded` for more information. """ return translate_kind_instance(ctx.default_return_type) @asserts_fallback_to_any def kinded_get_descriptor(ctx: MethodContext) -> MypyType: """ Used to analyze ``@kinded`` method calls. We do this due to ``__get__`` descriptor magic. """ assert isinstance(ctx.type, Instance) wrapped_method = get_proper_type(ctx.type.args[0]) assert isinstance(wrapped_method, CallableType) self_type = get_proper_type(wrapped_method.arg_types[0]) signature = bind_self( wrapped_method, is_classmethod=isinstance(self_type, TypeType), ) return ctx.type.copy_modified(args=[signature]) @unique # noqa: WPS600 class _KindErrors(str, Enum): # noqa: WPS600 """Represents a set of possible errors we can throw during typechecking.""" dekind_not_instance = ( 'dekind must be used with Instance as the first type argument' ) def _crop_kind_args( kind: Instance, limit: Sequence[MypyType] | None = None, ) -> tuple[MypyType, ...]: """Returns the correct amount of type arguments for a kind.""" if limit is None: limit = kind.args[0].args # type: ignore return kind.args[1:len(limit) + 1] returns-0.24.0/returns/contrib/mypy/_features/partial.py000066400000000000000000000213511472312074000234240ustar00rootroot00000000000000from collections.abc import Iterator from typing import Final, final from mypy.nodes import ARG_STAR, ARG_STAR2 from mypy.plugin import FunctionContext from mypy.types import ( CallableType, FunctionLike, Instance, Overloaded, ProperType, TypeType, get_proper_type, ) from returns.contrib.mypy._structures.args import FuncArg from returns.contrib.mypy._typeops.analtype import ( analyze_call, safe_translate_to_function, ) from returns.contrib.mypy._typeops.inference import CallableInference from returns.contrib.mypy._typeops.transform_callable import ( Functions, Intermediate, detach_callable, proper_type, ) _SUPPORTED_TYPES: Final = ( CallableType, Instance, TypeType, Overloaded, ) def analyze(ctx: FunctionContext) -> ProperType: """ This hook is used to make typed curring a thing in `returns` project. This plugin is a temporary solution to the problem. It should be later replaced with the official way of doing things. One day functions will have better API and we plan to submit this plugin into ``mypy`` core plugins, so it would not be required. Internally we just reduce the original function's argument count. And drop some of them from function's signature. """ default_return = get_proper_type(ctx.default_return_type) if not isinstance(default_return, CallableType): return default_return function_def = get_proper_type(ctx.arg_types[0][0]) func_args = _AppliedArgs(ctx) if len(list(filter(len, ctx.arg_types))) == 1: return function_def # this means, that `partial(func)` is called elif not isinstance(function_def, _SUPPORTED_TYPES): return default_return elif isinstance(function_def, (Instance, TypeType)): # We force `Instance` and similar types to coercse to callable: function_def = func_args.get_callable_from_context() is_valid, applied_args = func_args.build_from_context() if not isinstance(function_def, (CallableType, Overloaded)) or not is_valid: return default_return return _PartialFunctionReducer( default_return, function_def, applied_args, ctx, ).new_partial() @final class _PartialFunctionReducer: """ Helper object to work with curring. Here's a quick overview of things that is going on inside: 1. Firstly we create intermediate callable that represents a subset of argument that are passed with the ``curry`` call 2. Then, we run typechecking on this intermediate function and passed arguments to make sure that everything is correct 3. Then, we subtract intermediate arguments from the passed function 4. Finally we run type substitution on newly created final function to replace generic vars we already know to make sure that everything still works and the number of type vars is reduced This plugin requires several things: - One should now how ``ExpressionChecker`` from ``mypy`` works - What ``FunctionLike`` is - How kinds work in type checking - What ``map_actuals_to_formals`` is - How constraints work That's not an easy plugin to work with. """ def __init__( self, default_return_type: FunctionLike, original: FunctionLike, applied_args: list[FuncArg], ctx: FunctionContext, ) -> None: """ Saving the things we need. Args: default_return_type: default callable type got by ``mypy``. original: passed function to be curried. applied_args: arguments that are already provided in the definition. ctx: plugin hook context provided by ``mypy``. """ self._default_return_type = default_return_type self._original = original self._applied_args = applied_args self._ctx = ctx self._case_functions: list[CallableType] = [] self._fallbacks: list[CallableType] = [] def new_partial(self) -> ProperType: """ Creates new partial functions. Splits passed functions into ``case_function`` where each overloaded spec is processed inducidually. Then we combine everything back together removing unfit parts. """ for case_function in self._original.items: fallback, intermediate = self._create_intermediate(case_function) self._fallbacks.append(fallback) if intermediate: partial = self._create_partial_case( case_function, intermediate, fallback, ) self._case_functions.append(partial) return self._create_new_partial() def _create_intermediate( self, case_function: CallableType, ) -> tuple[CallableType, CallableType | None]: intermediate = Intermediate(case_function).with_applied_args( self._applied_args, ) return intermediate, analyze_call( intermediate, self._applied_args, self._ctx, show_errors=False, ) def _create_partial_case( self, case_function: CallableType, intermediate: CallableType, fallback: CallableType, ) -> CallableType: partial = CallableInference( Functions(case_function, intermediate).diff(), self._ctx, fallback=fallback, ).from_usage(self._applied_args) if case_function.is_generic(): # We can deal with really different `case_function` over here. # The first one is regular `generic` function # that has variables and typevars in its spec. # In this case, we process `partial` the same way. # It should be generic also. # # The second possible type of `case_function` is pseudo-generic. # These are functions that contain typevars in its spec, # but variables are empty. # Probably these functions are already used in a generic context. # So, we ignore them and do not add variables back. # # Regular functions are also untouched by this. return detach_callable(partial) return partial.copy_modified(variables=[]) def _create_new_partial(self) -> ProperType: """ Creates a new partial function-like from set of callables. We also need fallbacks here, because sometimes there are no possible ways to create at least a single partial case. In this scenario we analyze the set of fallbacks and tell user what went wrong. """ if not self._case_functions: analyze_call( proper_type(self._fallbacks), self._applied_args, self._ctx, show_errors=True, ) return self._default_return_type return proper_type(self._case_functions) @final class _AppliedArgs: """Builds applied args that were partially applied.""" def __init__(self, function_ctx: FunctionContext) -> None: """ We need the function default context. The first arguments of ``partial`` is skipped: it is the applied function itself. """ self._function_ctx = function_ctx self._parts = zip( self._function_ctx.arg_names[1:], self._function_ctx.arg_types[1:], self._function_ctx.arg_kinds[1:], ) def get_callable_from_context(self) -> ProperType: """Returns callable type from the context.""" return get_proper_type(safe_translate_to_function( self._function_ctx.arg_types[0][0], self._function_ctx, )) def build_from_context(self) -> tuple[bool, list[FuncArg]]: """ Builds handy arguments structures from the context. Some usages might be invalid, because we cannot really infer some arguments. .. code:: python partial(some, *args) partial(other, **kwargs) Here ``*args`` and ``**kwargs`` can be literally anything! In these cases we fallback to the default return type. """ applied_args = [] for names, types, kinds in self._parts: for arg in self._generate_applied_args(zip(names, types, kinds)): if arg.kind in {ARG_STAR, ARG_STAR2}: # We cannot really work with `*args`, `**kwargs`. return False, [] applied_args.append(arg) return True, applied_args def _generate_applied_args(self, arg_parts) -> Iterator[FuncArg]: yield from ( FuncArg(name, typ, kind) for name, typ, kind in arg_parts ) returns-0.24.0/returns/contrib/mypy/_features/pipe.py000066400000000000000000000107421472312074000227270ustar00rootroot00000000000000""" Typing ``pipe`` functions requires several phases. It is pretty obvious from its usage: 1. When we pass a sequence of functions we have to reduce the final callable type, it is require to match the ``callable`` protocol. And at this point we also kinda try to check that all pipeline functions do match, but this is impossible to do 100% correctly at this point, because generic functions don't have a type argument to infer the final result 2. When we call the function, we need to check for two things. First, we check that passed argument fits our instance requirement. Second, we check that pipeline functions match. Now we have all arguments to do the real inference. 3. We also need to fix generic in method signature. It might be broken, because we add new generic arguments and return type. So, it is safe to reattach generic back to the function. Here's when it works: .. code:: python >>> from returns.pipeline import pipe >>> def first(arg: int) -> bool: ... return arg > 0 >>> def second(arg: bool) -> str: ... return 'bigger' if arg else 'not bigger' >>> pipeline = pipe(first, second) # `analyzed` is called >>> assert pipeline(1) == 'bigger' # `signature and `infer` are called >>> assert pipeline(0) == 'not bigger' # `signature and `infer` again """ from collections.abc import Callable from mypy.nodes import ARG_POS from mypy.plugin import FunctionContext, MethodContext, MethodSigContext from mypy.types import AnyType, CallableType, FunctionLike, Instance, ProperType from mypy.types import Type as MypyType from mypy.types import TypeOfAny, UnionType, get_proper_type, get_proper_types from returns.contrib.mypy._typeops.analtype import translate_to_function from returns.contrib.mypy._typeops.inference import PipelineInference from returns.contrib.mypy._typeops.transform_callable import detach_callable def analyze(ctx: FunctionContext) -> MypyType: """This hook helps when we create the pipeline from sequence of funcs.""" default_return = get_proper_type(ctx.default_return_type) if not isinstance(default_return, Instance): return default_return if not ctx.arg_types[0]: # We do require to pass `*functions` arg. ctx.api.fail('Too few arguments for "pipe"', ctx.context) return default_return arg_types = [arg_type[0] for arg_type in ctx.arg_types if arg_type] first_step, last_step = _get_pipeline_def(arg_types, ctx) if not isinstance(first_step, FunctionLike): return default_return if not isinstance(last_step, FunctionLike): return default_return return default_return.copy_modified( args=[ # First type argument represents first function arguments type: _unify_type(first_step, _get_first_arg_type), # Second argument represents pipeline final return type: _unify_type(last_step, lambda case: case.ret_type), # Other types are just functions inside the pipeline: *arg_types, ], ) def infer(ctx: MethodContext) -> MypyType: """This hook helps when we finally call the created pipeline.""" if not isinstance(ctx.type, Instance): return ctx.default_return_type pipeline_functions = get_proper_types(ctx.type.args[2:]) return PipelineInference( get_proper_type(ctx.arg_types[0][0]), ).from_callable_sequence( pipeline_functions, list((ARG_POS,) * len(pipeline_functions)), ctx, ) def signature(ctx: MethodSigContext) -> CallableType: """Helps to fix generics in method signature.""" return detach_callable(ctx.default_signature) def _get_first_arg_type(case: CallableType) -> MypyType: """Function might not have args at all.""" if case.arg_types: return case.arg_types[0] return AnyType(TypeOfAny.implementation_artifact) def _unify_type( function: FunctionLike, fetch_type: Callable[[CallableType], MypyType], ) -> MypyType: return UnionType.make_union([ fetch_type(case) for case in function.items ]) def _get_pipeline_def( arg_types: list[MypyType], ctx: FunctionContext, ) -> tuple[ProperType, ProperType]: first_step = get_proper_type(arg_types[0]) last_step = get_proper_type(arg_types[-1]) if not isinstance(first_step, FunctionLike): first_step = translate_to_function(first_step, ctx) if not isinstance(last_step, FunctionLike): last_step = translate_to_function(last_step, ctx) return first_step, last_step returns-0.24.0/returns/contrib/mypy/_structures/000077500000000000000000000000001472312074000220215ustar00rootroot00000000000000returns-0.24.0/returns/contrib/mypy/_structures/__init__.py000066400000000000000000000000001472312074000241200ustar00rootroot00000000000000returns-0.24.0/returns/contrib/mypy/_structures/args.py000066400000000000000000000020061472312074000233250ustar00rootroot00000000000000from collections import namedtuple from typing import final from mypy.nodes import ArgKind, Context, TempNode from mypy.types import CallableType from mypy.types import Type as MypyType #: Basic struct to represent function arguments. _FuncArgStruct = namedtuple('_FuncArgStruct', ('name', 'type', 'kind')) @final class FuncArg(_FuncArgStruct): """Representation of function arg with all required fields and methods.""" name: str | None type: MypyType # noqa: WPS125 kind: ArgKind def expression(self, context: Context) -> TempNode: """Hack to pass unexisting `Expression` to typechecker.""" return TempNode(self.type, context=context) @classmethod def from_callable(cls, function_def: CallableType) -> list['FuncArg']: """Public constructor to create FuncArg lists from callables.""" parts = zip( function_def.arg_names, function_def.arg_types, function_def.arg_kinds, ) return [cls(*part) for part in parts] returns-0.24.0/returns/contrib/mypy/_structures/types.py000066400000000000000000000003261472312074000235400ustar00rootroot00000000000000from typing import Union from mypy.plugin import FunctionContext, MethodContext #: We treat them equally when working with functions or methods. CallableContext = Union[ FunctionContext, MethodContext, ] returns-0.24.0/returns/contrib/mypy/_typeops/000077500000000000000000000000001472312074000213015ustar00rootroot00000000000000returns-0.24.0/returns/contrib/mypy/_typeops/__init__.py000066400000000000000000000000001472312074000234000ustar00rootroot00000000000000returns-0.24.0/returns/contrib/mypy/_typeops/analtype.py000066400000000000000000000072161472312074000234760ustar00rootroot00000000000000from types import MappingProxyType from typing import Final, Literal, overload from mypy.checkmember import analyze_member_access from mypy.nodes import ARG_NAMED, ARG_OPT from mypy.types import CallableType, FunctionLike, ProperType from mypy.types import Type as MypyType from mypy.types import get_proper_type from returns.contrib.mypy._structures.args import FuncArg from returns.contrib.mypy._structures.types import CallableContext #: Mapping for better `call || function` argument compatibility. _KIND_MAPPING: Final = MappingProxyType({ # We have to replace `ARG_OPT` to `ARG_NAMED`, # because `ARG_OPT` is only used in function defs, not calls. # And `ARG_NAMED` is the same thing for calls. ARG_OPT: ARG_NAMED, }) @overload def analyze_call( function: FunctionLike, args: list[FuncArg], ctx: CallableContext, *, show_errors: Literal[True], ) -> CallableType: """Case when errors are reported and we cannot get ``None``.""" @overload def analyze_call( function: FunctionLike, args: list[FuncArg], ctx: CallableContext, *, show_errors: bool, ) -> CallableType | None: """Errors are not reported, we can get ``None`` when errors happen.""" def analyze_call(function, args, ctx, *, show_errors): """ Analyzes function call based on passed arguments. Internally uses ``check_call`` from ``mypy``. It does a lot of magic. We also allow to return ``None`` instead of showing errors. This might be helpful for cases when we run intermediate analysis. """ checker = ctx.api.expr_checker with checker.msg.filter_errors(save_filtered_errors=True) as local_errors: return_type, checked_function = checker.check_call( function, [arg.expression(ctx.context) for arg in args], [_KIND_MAPPING.get(arg.kind, arg.kind) for arg in args], ctx.context, [arg.name for arg in args], ) if not show_errors and local_errors.has_new_errors(): # noqa: WPS441 return None checker.msg.add_errors(local_errors.filtered_errors()) # noqa: WPS441 return checked_function def safe_translate_to_function( function_def: MypyType, ctx: CallableContext, ) -> MypyType: """ Transforms many other types to something close to callable type. There's why we need it: - We can use this on real functions - We can use this on ``@overload`` functions - We can use this on instances with ``__call__`` - We can use this on ``Type`` types It can probably work with other types as well. This function allows us to unify this process. We also need to disable errors, because we explicitly pass empty args. This function also resolves all type arguments. """ checker = ctx.api.expr_checker # type: ignore with checker.msg.filter_errors(): _return_type, function_def = checker.check_call( function_def, [], [], ctx.context, [], ) return function_def def translate_to_function( function_def: ProperType, ctx: CallableContext, ) -> ProperType: """ Tries to translate a type into callable by accessing ``__call__`` attr. This might fail with ``mypy`` errors and that's how it must work. This also preserves all type arguments as-is. """ checker = ctx.api.expr_checker # type: ignore return get_proper_type(analyze_member_access( '__call__', function_def, ctx.context, is_lvalue=False, is_super=False, is_operator=True, msg=checker.msg, original_type=function_def, chk=checker.chk, in_literal_context=checker.is_literal_context(), )) returns-0.24.0/returns/contrib/mypy/_typeops/fallback.py000066400000000000000000000016121472312074000234120ustar00rootroot00000000000000from collections.abc import Callable from functools import wraps from typing import TypeVar from mypy.types import AnyType, TypeOfAny _CallableType = TypeVar('_CallableType', bound=Callable) def asserts_fallback_to_any(function: _CallableType) -> _CallableType: """ Falls back to ``Any`` when some ``assert ...`` fails in our plugin code. We often use ``assert isinstance(variable, Instance)`` as a way to ensure correctness in this plugin. But, we need a generic way to handle all possible exceptions in a single manner: just return ``Any`` and hope that someday someone reports it. """ @wraps(function) def decorator(*args, **kwargs): try: return function(*args, **kwargs) except AssertionError: # TODO: log it somehow return AnyType(TypeOfAny.implementation_artifact) return decorator # type: ignore returns-0.24.0/returns/contrib/mypy/_typeops/inference.py000066400000000000000000000111131472312074000236060ustar00rootroot00000000000000from collections.abc import Iterable, Mapping from typing import TypeAlias, cast, final from mypy.argmap import map_actuals_to_formals from mypy.constraints import infer_constraints_for_callable from mypy.expandtype import expand_type from mypy.nodes import ARG_POS, ArgKind from mypy.plugin import FunctionContext from mypy.types import CallableType, FunctionLike, ProperType from mypy.types import Type as MypyType from mypy.types import TypeVarId, get_proper_type from returns.contrib.mypy._structures.args import FuncArg from returns.contrib.mypy._structures.types import CallableContext from returns.contrib.mypy._typeops.analtype import analyze_call #: Mapping of `typevar` to real type. _Constraints: TypeAlias = Mapping[TypeVarId, MypyType] @final class CallableInference: """ Used to infer function arguments and return type. There are multiple ways to do it. For example, one can infer argument types from its usage. """ def __init__( self, case_function: CallableType, ctx: FunctionContext, *, fallback: CallableType | None = None, ) -> None: """ Create the callable inference. Sometimes we need two functions. When construction one function from another there might be some lost information during the process. That's why we optionally need ``fallback``. If it is not provided, we treat ``case_function`` as a full one. Args: case_function: function with solved constraints. fallback: Function with unsolved constraints. ctx: Function context with checker and expr_checker objects. """ self._case_function = case_function self._fallback = fallback if fallback else self._case_function self._ctx = ctx def from_usage( self, applied_args: list[FuncArg], ) -> CallableType: """Infers function constrains from its usage: passed arguments.""" constraints = self._infer_constraints(applied_args) return expand_type(self._case_function, constraints) def _infer_constraints( self, applied_args: list[FuncArg], ) -> _Constraints: """Creates mapping of ``typevar`` to real type that we already know.""" checker = self._ctx.api.expr_checker # type: ignore kinds = [arg.kind for arg in applied_args] exprs = [ arg.expression(self._ctx.context) for arg in applied_args ] formal_to_actual = map_actuals_to_formals( kinds, [arg.name for arg in applied_args], self._fallback.arg_kinds, self._fallback.arg_names, lambda index: checker.accept(exprs[index]), ) constraints = infer_constraints_for_callable( self._fallback, arg_types=[arg.type for arg in applied_args], arg_kinds=kinds, arg_names=[arg.name for arg in applied_args], formal_to_actual=formal_to_actual, context=checker.argument_infer_context(), ) return { constraint.type_var: constraint.target for constraint in constraints } @final class PipelineInference: """ Very helpful tool to work with functions like ``flow`` and ``pipe``. It iterates all over the given list of pipeline steps, passes the first argument, and then infers types step by step. """ def __init__(self, instance: ProperType) -> None: """We do need the first argument to start the inference.""" self._instance = instance def from_callable_sequence( self, pipeline_types: Iterable[ProperType], pipeline_kinds: Iterable[ArgKind], ctx: CallableContext, ) -> ProperType: """Pass pipeline functions to infer them one by one.""" parameter = FuncArg(None, self._instance, ARG_POS) ret_type = get_proper_type(ctx.default_return_type) for pipeline, kind in zip(pipeline_types, pipeline_kinds): ret_type = self._proper_type( analyze_call( cast(FunctionLike, pipeline), [parameter], ctx, show_errors=True, ), ) parameter = FuncArg(None, ret_type, kind) return ret_type def _proper_type(self, typ: MypyType) -> ProperType: res_typ = get_proper_type(typ) if isinstance(res_typ, CallableType): return get_proper_type(res_typ.ret_type) return res_typ # It might be `Instance` or `AnyType` or `Nothing` returns-0.24.0/returns/contrib/mypy/_typeops/transform_callable.py000066400000000000000000000152451472312074000255140ustar00rootroot00000000000000from typing import ClassVar, final from mypy.nodes import ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, ArgKind from mypy.typeops import get_type_vars from mypy.types import AnyType, CallableType, FunctionLike, Overloaded from mypy.types import Type as MypyType from mypy.types import TypeOfAny, TypeVarType from returns.contrib.mypy._structures.args import FuncArg def proper_type( case_functions: list[CallableType], ) -> FunctionLike: """Returns a ``CallableType`` or ``Overloaded`` based on case functions.""" if len(case_functions) == 1: return case_functions[0] return Overloaded(case_functions) @final class Intermediate: """ Allows to build a new callable from old one and different options. For example, helps to tell which callee arguments was already provided in caller. """ #: Positional arguments can be of this kind. _positional_kinds: ClassVar[frozenset[ArgKind]] = frozenset(( ARG_POS, ARG_OPT, ARG_STAR, )) def __init__(self, case_function: CallableType) -> None: """We only need a callable to work on.""" self._case_function = case_function def with_applied_args(self, applied_args: list[FuncArg]) -> CallableType: """ By calling this method we construct a new callable from its usage. This allows use to create an intermediate callable with just used args. """ new_pos_args = self._applied_positional_args(applied_args) new_named_args = self._applied_named_args(applied_args) return self.with_signature(new_pos_args + new_named_args) def with_signature(self, new_args: list[FuncArg]) -> CallableType: """Smartly creates a new callable from a given arguments.""" return detach_callable(self._case_function.copy_modified( arg_names=[arg.name for arg in new_args], arg_types=[arg.type for arg in new_args], arg_kinds=[arg.kind for arg in new_args], )) def with_ret_type(self, ret_type: MypyType) -> CallableType: """Smartly creates a new callable from a given return type.""" return self._case_function.copy_modified(ret_type=ret_type) def _applied_positional_args( self, applied_args: list[FuncArg], ) -> list[FuncArg]: callee_args = list(filter( lambda name: name.name is None, # TODO: maybe use `kind` instead? applied_args, )) new_function_args = [] for ind, arg in enumerate(FuncArg.from_callable(self._case_function)): if arg.kind in self._positional_kinds and ind < len(callee_args): new_function_args.append(arg) return new_function_args def _applied_named_args( self, applied_args: list[FuncArg], ) -> list[FuncArg]: callee_args = list(filter( lambda name: name.name is not None, applied_args, )) new_function_args = [] for arg in FuncArg.from_callable(self._case_function): has_named_arg_def = any( # Argument can either be used as a named argument # or passed to `**kwargs` if it exists. arg.name == rdc.name or arg.kind == ARG_STAR2 for rdc in callee_args ) if callee_args and has_named_arg_def: new_function_args.append(arg) return new_function_args @final class Functions: """ Allows to create new callables based on two existing ones. For example, one can need a diff of two callables. """ def __init__( self, original: CallableType, intermediate: CallableType, ) -> None: """We need two callable to work with.""" self._original = original self._intermediate = intermediate def diff(self) -> CallableType: """Finds a diff between two functions' arguments.""" intermediate_names = [ arg.name for arg in FuncArg.from_callable(self._intermediate) ] new_function_args = [] for index, arg in enumerate(FuncArg.from_callable(self._original)): should_be_copied = ( arg.kind in {ARG_STAR, ARG_STAR2} or arg.name not in intermediate_names or # We need to treat unnamed args differently, because python3.8 # has pos_only_args, all their names are `None`. # This is also true for `lambda` functions where `.name` # might be missing for some reason. ( not arg.name and not ( index < len(intermediate_names) and # If this is also unnamed arg, then ignoring it. not intermediate_names[index] ) ) ) if should_be_copied: new_function_args.append(arg) return Intermediate(self._original).with_signature( new_function_args, ) # TODO: Remove this function once `mypy` order the TypeVars # by their appearance sequence def detach_callable(typ: CallableType) -> CallableType: # noqa: C901, WPS210 """ THIS IS A COPY OF `mypy.checker.detach_callable` FUNCTION. THE ONLY PURPOSE WE'VE COPIED IS TO GUARANTEE A DETERMINISTIC FOR OUR TYPE VARIABLES! AS YOU CAN SEE, WE ORDER THE TYPE VARS BY THEIR APPEARANCE SEQUENCE. """ type_list = typ.arg_types + [typ.ret_type] appear_map: dict[str, list[int]] = {} for idx, inner_type in enumerate(type_list): typevars_available = get_type_vars(inner_type) for var in typevars_available: # noqa: WPS110 if var.fullname not in appear_map: appear_map[var.fullname] = [] appear_map[var.fullname].append(idx) used_type_var_names = set() for var_name, _ in appear_map.items(): used_type_var_names.add(var_name) all_type_vars = get_type_vars(typ) new_variables = [] for var in set(all_type_vars): # noqa: WPS110 if var.fullname not in used_type_var_names: continue new_variables.append( TypeVarType( name=var.name, fullname=var.fullname, id=var.id, values=var.values, upper_bound=var.upper_bound, variance=var.variance, default=AnyType(TypeOfAny.from_omitted_generics), ), ) new_variables = sorted( new_variables, key=lambda item: appear_map[item.fullname][0], # noqa: WPS110 ) return typ.copy_modified( variables=new_variables, arg_types=type_list[:-1], ret_type=type_list[-1], ) returns-0.24.0/returns/contrib/mypy/_typeops/visitor.py000066400000000000000000000105761472312074000233630ustar00rootroot00000000000000from collections.abc import Iterable from mypy.typeops import erase_to_bound from mypy.types import ( AnyType, CallableType, DeletedType, ErasedType, Instance, LiteralType, NoneType, Overloaded, PartialType, ProperType, TupleType, ) from mypy.types import Type as MypyType from mypy.types import ( TypedDictType, TypeOfAny, TypeType, TypeVarType, UnboundType, UninhabitedType, UnionType, get_proper_type, ) from returns.contrib.mypy._consts import TYPED_KINDN # TODO: replace with real `TypeTranslator` in the next mypy release. _LEAF_TYPES = ( UnboundType, AnyType, NoneType, UninhabitedType, ErasedType, DeletedType, TypeVarType, PartialType, ) def translate_kind_instance(typ: MypyType) -> ProperType: # noqa: WPS, C901 """ We use this ugly hack to translate ``KindN[x, y]`` into ``x[y]``. This is required due to the fact that ``KindN`` can be nested in other types, like: ``List[KindN[...]]``. We will refactor this code after ``TypeTranslator`` is released in ``mypy@0.800`` version. """ typ = get_proper_type(typ) if isinstance(typ, _LEAF_TYPES): # noqa: WPS223 return typ elif isinstance(typ, Instance): last_known_value: LiteralType | None = None if typ.last_known_value is not None: raw_last_known_value = translate_kind_instance(typ.last_known_value) assert isinstance(raw_last_known_value, LiteralType) last_known_value = raw_last_known_value instance = Instance( typ=typ.type, args=_translate_types(typ.args), line=typ.line, column=typ.column, last_known_value=last_known_value, ) if typ.type.fullname == TYPED_KINDN: # That's where we do the change return _process_kinded_type(instance) return instance elif isinstance(typ, CallableType): return typ.copy_modified( arg_types=_translate_types(typ.arg_types), ret_type=translate_kind_instance(typ.ret_type), ) elif isinstance(typ, TupleType): return TupleType( _translate_types(typ.items), translate_kind_instance(typ.partial_fallback), # type: ignore typ.line, typ.column, ) elif isinstance(typ, TypedDictType): dict_items: dict[str, MypyType] = { item_name: translate_kind_instance(item_type) for item_name, item_type in typ.items.items() } return TypedDictType( dict_items, required_keys=typ.required_keys, readonly_keys=typ.readonly_keys, fallback=translate_kind_instance(typ.fallback), # type: ignore line=typ.line, column=typ.column, ) elif isinstance(typ, LiteralType): fallback = translate_kind_instance(typ.fallback) assert isinstance(fallback, Instance) return LiteralType( value=typ.value, fallback=fallback, line=typ.line, column=typ.column, ) elif isinstance(typ, UnionType): return UnionType(_translate_types(typ.items), typ.line, typ.column) elif isinstance(typ, Overloaded): functions: list[CallableType] = [] for func in typ.items: new = translate_kind_instance(func) assert isinstance(new, CallableType) functions.append(new) return Overloaded(items=functions) elif isinstance(typ, TypeType): return TypeType.make_normalized( translate_kind_instance(typ.item), line=typ.line, column=typ.column, ) return typ def _translate_types(types: Iterable[MypyType]) -> list[MypyType]: return [translate_kind_instance(typ) for typ in types] def _process_kinded_type(kind: Instance) -> ProperType: """Recursively process all type arguments in a kind.""" if not kind.args: return kind real_type = get_proper_type(kind.args[0]) if isinstance(real_type, TypeVarType): return get_proper_type(erase_to_bound(real_type)) elif isinstance(real_type, Instance): return real_type.copy_modified( args=kind.args[1:len(real_type.args) + 1], ) # This should never happen, probably can be an exception: return AnyType(TypeOfAny.implementation_artifact) returns-0.24.0/returns/contrib/mypy/returns_plugin.py000066400000000000000000000072511472312074000230760ustar00rootroot00000000000000""" Custom mypy plugin to solve the temporary problem with python typing. Important: we don't do anything ugly here. We only solve problems of the current typing implementation. ``mypy`` API docs are here: https://mypy.readthedocs.io/en/latest/extending_mypy.html We use ``pytest-mypy-plugins`` to test that it works correctly, see: https://github.com/mkurnikov/pytest-mypy-plugins """ from collections.abc import Callable, Mapping from typing import ClassVar, Optional, final from mypy.nodes import SymbolTableNode from mypy.plugin import ( AttributeContext, FunctionContext, MethodContext, MethodSigContext, Plugin, ) from mypy.types import CallableType from mypy.types import Type as MypyType from returns.contrib.mypy import _consts from returns.contrib.mypy._features import ( curry, do_notation, flow, kind, partial, pipe, ) # Type aliases # ============ #: Type for a function hook. _FunctionCallback = Callable[[FunctionContext], MypyType] #: Type for a function hook that need a definition node. _FunctionDefCallback = Callable[ [Optional[SymbolTableNode]], Callable[[FunctionContext], MypyType], ] #: Type for attribute hook. _AttributeCallback = Callable[[AttributeContext], MypyType] #: Type for a method hook. _MethodCallback = Callable[[MethodContext], MypyType] #: Type for a method signature hook. _MethodSigCallback = Callable[[MethodSigContext], CallableType] # Interface # ========= @final class _ReturnsPlugin(Plugin): """Our main plugin to dispatch different callbacks to specific features.""" _function_hook_plugins: ClassVar[Mapping[str, _FunctionCallback]] = { _consts.TYPED_PARTIAL_FUNCTION: partial.analyze, _consts.TYPED_CURRY_FUNCTION: curry.analyze, _consts.TYPED_FLOW_FUNCTION: flow.analyze, _consts.TYPED_PIPE_FUNCTION: pipe.analyze, _consts.TYPED_KIND_DEKIND: kind.dekind, } _method_sig_hook_plugins: ClassVar[Mapping[str, _MethodSigCallback]] = { _consts.TYPED_PIPE_METHOD: pipe.signature, _consts.TYPED_KIND_KINDED_CALL: kind.kinded_signature, } _method_hook_plugins: ClassVar[Mapping[str, _MethodCallback]] = { _consts.TYPED_PIPE_METHOD: pipe.infer, _consts.TYPED_KIND_KINDED_CALL: kind.kinded_call, _consts.TYPED_KIND_KINDED_GET: kind.kinded_get_descriptor, **dict.fromkeys(_consts.DO_NOTATION_METHODS, do_notation.analyze), } def get_function_hook( self, fullname: str, ) -> _FunctionCallback | None: """ Called for function return types from ``mypy``. Runs on each function call in the source code. We are only interested in a particular subset of all functions. So, we return a function handler for them. Otherwise, we return ``None``. """ return self._function_hook_plugins.get(fullname) def get_attribute_hook( self, fullname: str, ) -> _AttributeCallback | None: """Called for any exiting or ``__getattr__`` aatribute access.""" if fullname.startswith(_consts.TYPED_KINDN_ACCESS): return kind.attribute_access return None def get_method_signature_hook( self, fullname: str, ) -> _MethodSigCallback | None: """Called for method signature from ``mypy``.""" return self._method_sig_hook_plugins.get(fullname) def get_method_hook( self, fullname: str, ) -> _MethodCallback | None: """Called for method return types from ``mypy``.""" return self._method_hook_plugins.get(fullname) def plugin(version: str) -> type[Plugin]: """Plugin's public API and entrypoint.""" return _ReturnsPlugin returns-0.24.0/returns/contrib/pytest/000077500000000000000000000000001472312074000177715ustar00rootroot00000000000000returns-0.24.0/returns/contrib/pytest/__init__.py000066400000000000000000000001131472312074000220750ustar00rootroot00000000000000from returns.contrib.pytest.plugin import ReturnsAsserts as ReturnsAsserts returns-0.24.0/returns/contrib/pytest/plugin.py000066400000000000000000000156061472312074000216510ustar00rootroot00000000000000import inspect import sys from collections.abc import Callable, Iterator from contextlib import ExitStack, contextmanager from functools import partial, wraps from types import FrameType, MappingProxyType from typing import TYPE_CHECKING, Any, Final, TypeVar, Union, final from unittest import mock import pytest if TYPE_CHECKING: from returns.interfaces.specific.result import ResultLikeN # We keep track of errors handled by keeping a mapping of : object. # If an error is handled, it is in the mapping. # If it isn't in the mapping, the error is not handled. # # Note only storing object IDs would not work, as objects may be GC'ed # and their object id assigned to another object. # Also, the object itself cannot be (in) the key because # (1) we cannot always assume hashability and # (2) we need to track the object identity, not its value _ErrorsHandled = dict[int, Any] _FunctionType = TypeVar('_FunctionType', bound=Callable) _ReturnsResultType = TypeVar( '_ReturnsResultType', bound=Union['ResultLikeN', Callable[..., 'ResultLikeN']], ) @final class ReturnsAsserts: """Class with helpers assertions to check containers.""" __slots__ = ('_errors_handled', ) def __init__(self, errors_handled: _ErrorsHandled) -> None: """Constructor for this type.""" self._errors_handled = errors_handled @staticmethod # noqa: WPS602 def assert_equal( # noqa: WPS602 first, second, *, deps=None, backend: str = 'asyncio', ) -> None: """Can compare two containers even with extra calling and awaiting.""" from returns.primitives.asserts import assert_equal assert_equal(first, second, deps=deps, backend=backend) def is_error_handled(self, container) -> bool: """Ensures that container has its error handled in the end.""" return id(container) in self._errors_handled @staticmethod # noqa: WPS602 @contextmanager def assert_trace( # noqa: WPS602 trace_type: _ReturnsResultType, function_to_search: _FunctionType, ) -> Iterator[None]: """ Ensures that a given function was called during execution. Use it to determine where the failure happened. """ old_tracer = sys.gettrace() sys.settrace(partial(_trace_function, trace_type, function_to_search)) try: yield except _DesiredFunctionFound: pass # noqa: WPS420 else: pytest.fail( 'No container {0} was created'.format( trace_type.__class__.__name__, ), ) finally: sys.settrace(old_tracer) def _trace_function( trace_type: _ReturnsResultType, function_to_search: _FunctionType, frame: FrameType, event: str, arg: Any, ) -> None: is_desired_type_call = ( event == 'call' and ( # Some containers is created through functions and others # is created directly using class constructors! # The first line covers when it's created through a function # The second line covers when it's created through a # class constructor frame.f_code is getattr(trace_type, '__code__', None) or frame.f_code is getattr(trace_type.__init__, '__code__', None) # type: ignore[misc] # noqa: E501 ) ) if is_desired_type_call: current_call_stack = inspect.stack() function_to_search_code = getattr(function_to_search, '__code__', None) for frame_info in current_call_stack: if function_to_search_code is frame_info.frame.f_code: raise _DesiredFunctionFound() class _DesiredFunctionFound(BaseException): # noqa: WPS418 """Exception to raise when expected function is found.""" def pytest_configure(config) -> None: """ Hook to be executed on import. We use it define custom markers. """ config.addinivalue_line( 'markers', ( 'returns_lawful: all tests under `check_all_laws` ' + 'is marked this way, ' + 'use `-m "not returns_lawful"` to skip them.' ), ) @pytest.fixture def returns() -> Iterator[ReturnsAsserts]: """Returns class with helpers assertions to check containers.""" with _spy_error_handling() as errors_handled: yield ReturnsAsserts(errors_handled) @contextmanager def _spy_error_handling() -> Iterator[_ErrorsHandled]: """Track error handling of containers.""" errs: _ErrorsHandled = {} with ExitStack() as cleanup: for container in _containers_to_patch(): for method, patch in _ERROR_HANDLING_PATCHERS.items(): cleanup.enter_context(mock.patch.object( container, method, patch(getattr(container, method), errs=errs), )) yield errs # delayed imports are needed to prevent messing up coverage def _containers_to_patch() -> list: from returns.context import ( RequiresContextFutureResult, RequiresContextIOResult, RequiresContextResult, ) from returns.future import FutureResult from returns.io import IOFailure, IOSuccess from returns.result import Failure, Success return [ Success, Failure, IOSuccess, IOFailure, RequiresContextResult, RequiresContextIOResult, RequiresContextFutureResult, FutureResult, ] def _patched_error_handler( original: _FunctionType, errs: _ErrorsHandled, ) -> _FunctionType: if inspect.iscoroutinefunction(original): async def wrapper(self, *args, **kwargs): original_result = await original(self, *args, **kwargs) errs[id(original_result)] = original_result return original_result else: def wrapper(self, *args, **kwargs): original_result = original(self, *args, **kwargs) errs[id(original_result)] = original_result return original_result return wraps(original)(wrapper) # type: ignore def _patched_error_copier( original: _FunctionType, errs: _ErrorsHandled, ) -> _FunctionType: if inspect.iscoroutinefunction(original): async def wrapper(self, *args, **kwargs): original_result = await original(self, *args, **kwargs) if id(self) in errs: errs[id(original_result)] = original_result return original_result else: def wrapper(self, *args, **kwargs): original_result = original(self, *args, **kwargs) if id(self) in errs: errs[id(original_result)] = original_result return original_result return wraps(original)(wrapper) # type: ignore _ERROR_HANDLING_PATCHERS: Final = MappingProxyType({ 'lash': _patched_error_handler, 'map': _patched_error_copier, 'alt': _patched_error_copier, }) returns-0.24.0/returns/converters.py000066400000000000000000000061031472312074000175450ustar00rootroot00000000000000from typing import TypeVar, overload from returns.functions import identity from returns.interfaces.bindable import BindableN from returns.maybe import Maybe, Nothing, Some from returns.pipeline import is_successful from returns.primitives.hkt import KindN, kinded from returns.result import Failure, Result, Success _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _BindableKind = TypeVar('_BindableKind', bound=BindableN) @kinded def flatten( container: KindN[ _BindableKind, KindN[_BindableKind, _FirstType, _SecondType, _ThirdType], _SecondType, _ThirdType, ], ) -> KindN[_BindableKind, _FirstType, _SecondType, _ThirdType]: """ Joins two nested containers together. Please, note that it will not join two ``Failure`` for ``Result`` case or two ``Nothing`` for ``Maybe`` case (or basically any two error types) together. .. code:: python >>> from returns.converters import flatten >>> from returns.io import IO >>> from returns.result import Failure, Success >>> assert flatten(IO(IO(1))) == IO(1) >>> assert flatten(Success(Success(1))) == Success(1) >>> assert flatten(Failure(Failure(1))) == Failure(Failure(1)) See also: - https://bit.ly/2sIviUr """ return container.bind(identity) def result_to_maybe( result_container: Result[_FirstType, _SecondType], ) -> Maybe[_FirstType]: """ Converts ``Result`` container to ``Maybe`` container. .. code:: python >>> from returns.maybe import Some, Nothing >>> from returns.result import Failure, Success >>> assert result_to_maybe(Success(1)) == Some(1) >>> assert result_to_maybe(Success(None)) == Some(None) >>> assert result_to_maybe(Failure(1)) == Nothing >>> assert result_to_maybe(Failure(None)) == Nothing """ if is_successful(result_container): return Some(result_container.unwrap()) return Nothing @overload def maybe_to_result( maybe_container: Maybe[_FirstType], ) -> Result[_FirstType, None]: """No default case.""" @overload def maybe_to_result( maybe_container: Maybe[_FirstType], default_error: _SecondType, ) -> Result[_FirstType, _SecondType]: """Default value case.""" def maybe_to_result( maybe_container: Maybe[_FirstType], default_error: _SecondType | None = None, ) -> Result[_FirstType, _SecondType | None]: """ Converts ``Maybe`` container to ``Result`` container. With optional ``default_error`` to be used for ``Failure``'s error value. .. code:: python >>> from returns.maybe import Some, Nothing >>> from returns.result import Failure, Success >>> assert maybe_to_result(Some(1)) == Success(1) >>> assert maybe_to_result(Some(None)) == Success(None) >>> assert maybe_to_result(Nothing) == Failure(None) >>> assert maybe_to_result(Nothing, 'error') == Failure('error') """ if is_successful(maybe_container): return Success(maybe_container.unwrap()) return Failure(default_error) returns-0.24.0/returns/curry.py000066400000000000000000000145621472312074000165270ustar00rootroot00000000000000from collections.abc import Callable from functools import partial as _partial from functools import wraps from inspect import BoundArguments, Signature from typing import Any, TypeVar, Union _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ReturnType = TypeVar('_ReturnType') def partial( func: Callable[..., _ReturnType], *args: Any, **kwargs: Any, ) -> Callable[..., _ReturnType]: """ Typed partial application. It is just a ``functools.partial`` wrapper with better typing support. We use a custom ``mypy`` plugin to make sure types are correct. Otherwise, it is currently impossible to properly type this function. .. code:: python >>> from returns.curry import partial >>> def sum_two_numbers(first: int, second: int) -> int: ... return first + second >>> sum_with_ten = partial(sum_two_numbers, 10) >>> assert sum_with_ten(2) == 12 >>> assert sum_with_ten(-5) == 5 See also: - https://docs.python.org/3/library/functools.html#functools.partial """ return _partial(func, *args, **kwargs) def curry(function: Callable[..., _ReturnType]) -> Callable[..., _ReturnType]: """ Typed currying decorator. Currying is a conception from functional languages that does partial applying. That means that if we pass one argument in a function that gets 2 or more arguments, we'll get a new function that remembers all previously passed arguments. Then we can pass remaining arguments, and the function will be executed. :func:`~partial` function does a similar thing, but it does partial application exactly once. ``curry`` is a bit smarter and will do partial application until enough arguments passed. If wrong arguments are passed, ``TypeError`` will be raised immediately. We use a custom ``mypy`` plugin to make sure types are correct. Otherwise, it is currently impossible to properly type this function. .. code:: pycon >>> from returns.curry import curry >>> @curry ... def divide(number: int, by: int) -> float: ... return number / by >>> divide(1) # doesn't call the func and remembers arguments >>> assert divide(1)(by=10) == 0.1 # calls the func when possible >>> assert divide(1)(10) == 0.1 # calls the func when possible >>> assert divide(1, by=10) == 0.1 # or call the func like always Here are several examples with wrong arguments: .. code:: pycon >>> divide(1, 2, 3) Traceback (most recent call last): ... TypeError: too many positional arguments >>> divide(a=1) Traceback (most recent call last): ... TypeError: got an unexpected keyword argument 'a' Limitations: - It is kinda slow. Like 100 times slower than a regular function call. - It does not work with several builtins like ``str``, ``int``, and possibly other ``C`` defined callables - ``*args`` and ``**kwargs`` are not supported and we use ``Any`` as a fallback - Support of arguments with default values is very limited, because we cannot be totally sure which case we are using: with the default value or without it, be careful - We use a custom ``mypy`` plugin to make types correct, otherwise, it is currently impossible - It might not work as expected with curried ``Klass().method``, it might generate invalid method signature (looks like a bug in ``mypy``) - It is probably a bad idea to ``curry`` a function with lots of arguments, because you will end up with lots of overload functions, that you won't be able to understand. It might also be slow during the typecheck - Currying of ``__init__`` does not work because of the bug in ``mypy``: https://github.com/python/mypy/issues/8801 We expect people to use this tool responsibly when they know that they are doing. See also: - https://en.wikipedia.org/wiki/Currying - https://stackoverflow.com/questions/218025/ """ argspec = Signature.from_callable(function).bind_partial() def decorator(*args, **kwargs): return _eager_curry(function, argspec, args, kwargs) return wraps(function)(decorator) def _eager_curry( function: Callable[..., _ReturnType], argspec, args: tuple, kwargs: dict, ) -> _ReturnType | Callable[..., _ReturnType]: """ Internal ``curry`` implementation. The interesting part about it is that it return the result or a new callable that will return a result at some point. """ intermediate, full_args = _intermediate_argspec(argspec, args, kwargs) if full_args is not None: return function(*full_args[0], **full_args[1]) # We use closures to avoid names conflict between # the function args and args of the curry implementation. def decorator(*inner_args, **inner_kwargs): return _eager_curry(function, intermediate, inner_args, inner_kwargs) return wraps(function)(decorator) _ArgSpec = Union[ # Case when all arguments are bound and function can be called: tuple[None, tuple[tuple, dict]], # Case when there are still unbound arguments: tuple[BoundArguments, None], ] def _intermediate_argspec( argspec: BoundArguments, args: tuple, kwargs: dict, ) -> _ArgSpec: """ That's where ``curry`` magic happens. We use ``Signature`` objects from ``inspect`` to bind existing arguments. If there's a ``TypeError`` while we ``bind`` the arguments we try again. The second time we try to ``bind_partial`` arguments. It can fail too! It fails when there are invalid arguments or more arguments than we can fit in a function. This function is slow. Any optimization ideas are welcome! """ full_args = argspec.args + args full_kwargs = {**argspec.kwargs, **kwargs} try: argspec.signature.bind(*full_args, **full_kwargs) except TypeError: # Another option is to copy-paste and patch `getcallargs` func # but in this case we get responsibility to maintain it over # python releases. # This place is also responsible for raising ``TypeError`` for cases: # 1. When incorrect argument is provided # 2. When too many arguments are provided return argspec.signature.bind_partial(*full_args, **full_kwargs), None return None, (full_args, full_kwargs) returns-0.24.0/returns/functions.py000066400000000000000000000076431472312074000173750ustar00rootroot00000000000000from collections.abc import Callable from functools import wraps from typing import Any, TypeVar from typing_extensions import Never, ParamSpec _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _FuncParams = ParamSpec('_FuncParams') def identity(instance: _FirstType) -> _FirstType: """ Function that returns its argument. .. code:: python >>> assert identity(1) == 1 >>> assert identity([1, 2, 3]) == [1, 2, 3] This function is really helpful for some composition. It is also useful for "do nothing" use-case. See also: - https://en.wikipedia.org/wiki/Identity_function - https://stackoverflow.com/a/21506571/4842742 """ return instance def compose( first: Callable[[_FirstType], _SecondType], second: Callable[[_SecondType], _ThirdType], ) -> Callable[[_FirstType], _ThirdType]: """ Allows function composition. Works as: ``second . first`` or ``first() |> second()``. You can read it as "second after first". .. code:: python >>> assert compose(float, int)('123.5') == 123 We can only compose functions with one argument and one return. Type checked. """ return lambda argument: second(first(argument)) def tap( function: Callable[[_FirstType], Any], ) -> Callable[[_FirstType], _FirstType]: """ Allows to apply some function and return an argument, instead of a result. Is useful for composing functions with side-effects like ``print()``, ``logger.log()``, etc. .. code:: python >>> assert tap(print)(1) == 1 1 >>> assert tap(lambda _: 1)(2) == 2 See also: - https://github.com/dry-python/returns/issues/145 """ def decorator(argument_to_return: _FirstType) -> _FirstType: function(argument_to_return) return argument_to_return return decorator def untap( function: Callable[[_FirstType], Any], ) -> Callable[[_FirstType], None]: """ Allows to apply some function and always return ``None`` as a result. Is useful for composing functions that do some side effects and return some nosense. Is the kind of a reverse of the ``tap`` function. .. code:: python >>> def strange_log(arg: int) -> int: ... print(arg) ... return arg >>> assert untap(strange_log)(2) is None 2 >>> assert untap(tap(lambda _: 1))(2) is None See also: - https://github.com/dry-python/returns/issues/145 """ def decorator(argument_to_return: _FirstType) -> None: function(argument_to_return) return decorator def raise_exception(exception: Exception) -> Never: """ Helper function to raise exceptions as a function. It might be required as a compatibility tool for existing APIs. That's how it can be used: .. code:: pycon >>> from returns.result import Failure, Result >>> # Some operation result: >>> user: Result[int, ValueError] = Failure(ValueError('boom')) >>> # Here we unwrap internal exception and raise it: >>> user.alt(raise_exception) Traceback (most recent call last): ... ValueError: boom See also: - https://github.com/dry-python/returns/issues/56 """ raise exception def not_(function: Callable[_FuncParams, bool]) -> Callable[_FuncParams, bool]: """ Denies the function returns. .. code:: python >>> from returns.result import Result, Success, Failure >>> def is_successful(result_container: Result[float, int]) -> bool: ... return isinstance(result_container, Success) >>> assert not_(is_successful)(Success(1.0)) is False >>> assert not_(is_successful)(Failure(1)) is True """ @wraps(function) def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> bool: return not function(*args, **kwargs) return decorator returns-0.24.0/returns/future.py000066400000000000000000001355561472312074000167040ustar00rootroot00000000000000from collections.abc import ( AsyncGenerator, AsyncIterator, Awaitable, Callable, Coroutine, Generator, ) from functools import wraps from typing import Any, TypeAlias, TypeVar, final, overload from typing_extensions import ParamSpec from returns._internal.futures import _future, _future_result from returns.interfaces.specific.future import FutureBased1 from returns.interfaces.specific.future_result import FutureResultBased2 from returns.io import IO, IOResult from returns.primitives.container import BaseContainer from returns.primitives.exceptions import UnwrapFailedError from returns.primitives.hkt import ( Kind1, Kind2, SupportsKind1, SupportsKind2, dekind, ) from returns.primitives.reawaitable import ReAwaitable from returns.result import Failure, Result, Success # Definitions: _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') _FuncParams = ParamSpec('_FuncParams') # Aliases: _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') # Public composition helpers: async def async_identity(instance: _FirstType) -> _FirstType: """ Async function that returns its argument. .. code:: python >>> import anyio >>> from returns.future import async_identity >>> assert anyio.run(async_identity, 1) == 1 See :func:`returns.functions.identity` for sync version of this function and more docs and examples. """ return instance # Future # ====== @final class Future( # type: ignore[type-var] BaseContainer, SupportsKind1['Future', _ValueType], FutureBased1[_ValueType], ): """ Container to easily compose ``async`` functions. Represents a better abstraction over a simple coroutine. Is framework, event-loop, and IO-library agnostics. Works with ``asyncio``, ``curio``, ``trio``, or any other tool. Internally we use ``anyio`` to test that it works as expected for any io stack. Note that ``Future[a]`` represents a computation that never fails and returns ``IO[a]`` type. Use ``FutureResult[a, b]`` for operations that might fail. Like DB access or network operations. Is not related to ``asyncio.Future`` in any kind. .. rubric:: Tradeoffs Due to possible performance issues we move all coroutines definitions to a separate module. See also: - https://gcanti.github.io/fp-ts/modules/Task.ts.html - https://zio.dev/docs/overview/overview_basic_concurrency """ __slots__ = () _inner_value: Awaitable[_ValueType] def __init__(self, inner_value: Awaitable[_ValueType]) -> None: """ Public constructor for this type. Also required for typing. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def coro(arg: int) -> int: ... return arg + 1 >>> container = Future(coro(1)) >>> assert anyio.run(container.awaitable) == IO(2) """ super().__init__(ReAwaitable(inner_value)) def __await__(self) -> Generator[None, None, IO[_ValueType]]: """ By defining this magic method we make ``Future`` awaitable. This means you can use ``await`` keyword to evaluate this container: .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def main() -> IO[int]: ... return await Future.from_value(1) >>> assert anyio.run(main) == IO(1) When awaited we returned the value wrapped in :class:`returns.io.IO` container to indicate that the computation was impure. See also: - https://docs.python.org/3/library/asyncio-task.html#awaitables - https://bit.ly/2SfayNc """ return self.awaitable().__await__() # noqa: WPS609 async def awaitable(self) -> IO[_ValueType]: """ Transforms ``Future[a]`` to ``Awaitable[IO[a]]``. Use this method when you need a real coroutine. Like for ``asyncio.run`` calls. Note, that returned value will be wrapped in :class:`returns.io.IO` container. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> assert anyio.run(Future.from_value(1).awaitable) == IO(1) """ return IO(await self._inner_value) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> 'Future[_NewValueType]': """ Applies function to the inner value. Applies 'function' to the contents of the IO instance and returns a new ``Future`` object containing the result. 'function' should accept a single "normal" (non-container) argument and return a non-container result. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> def mappable(x: int) -> int: ... return x + 1 >>> assert anyio.run( ... Future.from_value(1).map(mappable).awaitable, ... ) == IO(2) """ return Future(_future.async_map(function, self._inner_value)) def apply( self, container: Kind1['Future', Callable[[_ValueType], _NewValueType]], ) -> 'Future[_NewValueType]': """ Calls a wrapped function in a container on this container. .. code:: python >>> import anyio >>> from returns.future import Future >>> def transform(arg: int) -> str: ... return str(arg) + 'b' >>> assert anyio.run( ... Future.from_value(1).apply( ... Future.from_value(transform), ... ).awaitable, ... ) == IO('1b') """ return Future(_future.async_apply(dekind(container), self._inner_value)) def bind( self, function: Callable[[_ValueType], Kind1['Future', _NewValueType]], ) -> 'Future[_NewValueType]': """ Applies 'function' to the result of a previous calculation. 'function' should accept a single "normal" (non-container) argument and return ``Future`` type object. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> def bindable(x: int) -> Future[int]: ... return Future.from_value(x + 1) >>> assert anyio.run( ... Future.from_value(1).bind(bindable).awaitable, ... ) == IO(2) """ return Future(_future.async_bind(function, self._inner_value)) #: Alias for `bind` method. Part of the `FutureBasedN` interface. bind_future = bind def bind_async( self, function: Callable[ [_ValueType], Awaitable[Kind1['Future', _NewValueType]], ], ) -> 'Future[_NewValueType]': """ Compose a container and ``async`` function returning a container. This function should return a container value. See :meth:`~Future.bind_awaitable` to bind ``async`` function that returns a plain value. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def coroutine(x: int) -> Future[str]: ... return Future.from_value(str(x + 1)) >>> assert anyio.run( ... Future.from_value(1).bind_async(coroutine).awaitable, ... ) == IO('2') """ return Future(_future.async_bind_async(function, self._inner_value)) #: Alias for `bind_async` method. Part of the `FutureBasedN` interface. bind_async_future = bind_async def bind_awaitable( self, function: Callable[[_ValueType], 'Awaitable[_NewValueType]'], ) -> 'Future[_NewValueType]': """ Allows to compose a container and a regular ``async`` function. This function should return plain, non-container value. See :meth:`~Future.bind_async` to bind ``async`` function that returns a container. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def coroutine(x: int) -> int: ... return x + 1 >>> assert anyio.run( ... Future.from_value(1).bind_awaitable(coroutine).awaitable, ... ) == IO(2) """ return Future(_future.async_bind_awaitable( function, self._inner_value, )) def bind_io( self, function: Callable[[_ValueType], IO[_NewValueType]], ) -> 'Future[_NewValueType]': """ Applies 'function' to the result of a previous calculation. 'function' should accept a single "normal" (non-container) argument and return ``IO`` type object. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> def bindable(x: int) -> IO[int]: ... return IO(x + 1) >>> assert anyio.run( ... Future.from_value(1).bind_io(bindable).awaitable, ... ) == IO(2) """ return Future(_future.async_bind_io(function, self._inner_value)) def __aiter__(self) -> AsyncIterator[_ValueType]: # noqa: WPS611 """API for :ref:`do-notation`.""" async def factory() -> AsyncGenerator[_ValueType, None]: yield await self._inner_value return factory() @classmethod def do( cls, expr: AsyncGenerator[_NewValueType, None], ) -> 'Future[_NewValueType]': """ Allows working with unwrapped values of containers in a safe way. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def main() -> bool: ... return await Future.do( ... first + second ... async for first in Future.from_value(2) ... async for second in Future.from_value(3) ... ) == IO(5) >>> assert anyio.run(main) is True See :ref:`do-notation` to learn more. """ async def factory() -> _NewValueType: return await expr.__anext__() # noqa: WPS609 return Future(factory()) @classmethod def from_value(cls, inner_value: _NewValueType) -> 'Future[_NewValueType]': """ Allows to create a ``Future`` from a plain value. The resulting ``Future`` will just return the given value wrapped in :class:`returns.io.IO` container when awaited. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def main() -> bool: ... return (await Future.from_value(1)) == IO(1) >>> assert anyio.run(main) is True """ return Future(async_identity(inner_value)) @classmethod def from_future( cls, inner_value: 'Future[_NewValueType]', ) -> 'Future[_NewValueType]': """ Creates a new ``Future`` from the existing one. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> future = Future.from_value(1) >>> assert anyio.run(Future.from_future(future).awaitable) == IO(1) Part of the ``FutureBasedN`` interface. """ return inner_value @classmethod def from_io(cls, inner_value: IO[_NewValueType]) -> 'Future[_NewValueType]': """ Allows to create a ``Future`` from ``IO`` container. .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def main() -> bool: ... return (await Future.from_io(IO(1))) == IO(1) >>> assert anyio.run(main) is True """ return Future(async_identity(inner_value._inner_value)) @classmethod def from_future_result( cls, inner_value: 'FutureResult[_NewValueType, _NewErrorType]', ) -> 'Future[Result[_NewValueType, _NewErrorType]]': """ Creates ``Future[Result[a, b]]`` instance from ``FutureResult[a, b]``. This method is the inverse of :meth:`~FutureResult.from_typecast`. .. code:: python >>> import anyio >>> from returns.future import Future, FutureResult >>> from returns.io import IO >>> from returns.result import Success >>> container = Future.from_future_result(FutureResult.from_value(1)) >>> assert anyio.run(container.awaitable) == IO(Success(1)) """ return Future(inner_value._inner_value) # Decorators: def future( function: Callable[ _FuncParams, Coroutine[_FirstType, _SecondType, _ValueType], ], ) -> Callable[_FuncParams, Future[_ValueType]]: """ Decorator to turn a coroutine definition into ``Future`` container. .. code:: python >>> import anyio >>> from returns.io import IO >>> from returns.future import future >>> @future ... async def test(x: int) -> int: ... return x + 1 >>> assert anyio.run(test(1).awaitable) == IO(2) """ @wraps(function) def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> Future[_ValueType]: return Future(function(*args, **kwargs)) return decorator def asyncify( function: Callable[_FuncParams, _ValueType], ) -> Callable[_FuncParams, Coroutine[Any, Any, _ValueType]]: """ Decorator to turn a common function into an asynchronous function. This decorator is useful for composition with ``Future`` and ``FutureResult`` containers. .. warning:: This function will not your sync function **run** like async one. It will still be a blocking function that looks like async one. We recommend to only use this decorator with functions that do not access network or filesystem. It is only a composition helper, not a transformer. Usage example: .. code:: python >>> import anyio >>> from returns.future import asyncify >>> @asyncify ... def test(x: int) -> int: ... return x + 1 >>> assert anyio.run(test, 1) == 2 Read more about async and sync functions: https://journal.stuffwithstuff.com/2015/02/01/what-color-is-your-function/ """ @wraps(function) async def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> _ValueType: return function(*args, **kwargs) return decorator # FutureResult # ============ @final class FutureResult( # type: ignore[type-var] BaseContainer, SupportsKind2['FutureResult', _ValueType, _ErrorType], FutureResultBased2[_ValueType, _ErrorType], ): """ Container to easily compose ``async`` functions. Represents a better abstraction over a simple coroutine. Is framework, event-loop, and IO-library agnostics. Works with ``asyncio``, ``curio``, ``trio``, or any other tool. Internally we use ``anyio`` to test that it works as expected for any io stack. Note that ``FutureResult[a, b]`` represents a computation that can fail and returns ``IOResult[a, b]`` type. Use ``Future[a]`` for operations that cannot fail. This is a ``Future`` that returns ``Result`` type. By providing this utility type we make developers' lives easier. ``FutureResult`` has a lot of composition helpers to turn complex nested operations into a one function calls. .. rubric:: Tradeoffs Due to possible performance issues we move all coroutines definitions to a separate module. See also: - https://gcanti.github.io/fp-ts/modules/TaskEither.ts.html - https://zio.dev/docs/overview/overview_basic_concurrency """ __slots__ = () _inner_value: Awaitable[Result[_ValueType, _ErrorType]] def __init__( self, inner_value: Awaitable[Result[_ValueType, _ErrorType]], ) -> None: """ Public constructor for this type. Also required for typing. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess >>> from returns.result import Success, Result >>> async def coro(arg: int) -> Result[int, str]: ... return Success(arg + 1) >>> container = FutureResult(coro(1)) >>> assert anyio.run(container.awaitable) == IOSuccess(2) """ super().__init__(ReAwaitable(inner_value)) def __await__(self) -> Generator[ None, None, IOResult[_ValueType, _ErrorType], ]: """ By defining this magic method we make ``FutureResult`` awaitable. This means you can use ``await`` keyword to evaluate this container: .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOResult >>> async def main() -> IOResult[int, str]: ... return await FutureResult.from_value(1) >>> assert anyio.run(main) == IOSuccess(1) When awaited we returned the value wrapped in :class:`returns.io.IOResult` container to indicate that the computation was impure and can fail. See also: - https://docs.python.org/3/library/asyncio-task.html#awaitables - https://bit.ly/2SfayNc """ return self.awaitable().__await__() # noqa: WPS609 async def awaitable(self) -> IOResult[_ValueType, _ErrorType]: """ Transforms ``FutureResult[a, b]`` to ``Awaitable[IOResult[a, b]]``. Use this method when you need a real coroutine. Like for ``asyncio.run`` calls. Note, that returned value will be wrapped in :class:`returns.io.IOResult` container. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess >>> assert anyio.run( ... FutureResult.from_value(1).awaitable, ... ) == IOSuccess(1) """ return IOResult.from_result(await self._inner_value) def swap(self) -> 'FutureResult[_ErrorType, _ValueType]': """ Swaps value and error types. So, values become errors and errors become values. It is useful when you have to work with errors a lot. And since we have a lot of ``.bind_`` related methods and only a single ``.lash``. It is easier to work with values than with errors. .. code:: python >>> import anyio >>> from returns.future import FutureSuccess, FutureFailure >>> from returns.io import IOSuccess, IOFailure >>> assert anyio.run(FutureSuccess(1).swap) == IOFailure(1) >>> assert anyio.run(FutureFailure(1).swap) == IOSuccess(1) """ return FutureResult(_future_result.async_swap(self._inner_value)) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Applies function to the inner value. Applies 'function' to the contents of the IO instance and returns a new ``FutureResult`` object containing the result. 'function' should accept a single "normal" (non-container) argument and return a non-container result. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> def mappable(x: int) -> int: ... return x + 1 >>> assert anyio.run( ... FutureResult.from_value(1).map(mappable).awaitable, ... ) == IOSuccess(2) >>> assert anyio.run( ... FutureResult.from_failure(1).map(mappable).awaitable, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_map( function, self._inner_value, )) def apply( self, container: Kind2[ 'FutureResult', Callable[[_ValueType], _NewValueType], _ErrorType, ], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Calls a wrapped function in a container on this container. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> def appliable(x: int) -> int: ... return x + 1 >>> assert anyio.run( ... FutureResult.from_value(1).apply( ... FutureResult.from_value(appliable), ... ).awaitable, ... ) == IOSuccess(2) >>> assert anyio.run( ... FutureResult.from_failure(1).apply( ... FutureResult.from_value(appliable), ... ).awaitable, ... ) == IOFailure(1) >>> assert anyio.run( ... FutureResult.from_value(1).apply( ... FutureResult.from_failure(2), ... ).awaitable, ... ) == IOFailure(2) >>> assert anyio.run( ... FutureResult.from_failure(1).apply( ... FutureResult.from_failure(2), ... ).awaitable, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_apply( dekind(container), self._inner_value, )) def bind( self, function: Callable[ [_ValueType], Kind2['FutureResult', _NewValueType, _ErrorType], ], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Applies 'function' to the result of a previous calculation. 'function' should accept a single "normal" (non-container) argument and return ``Future`` type object. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> def bindable(x: int) -> FutureResult[int, str]: ... return FutureResult.from_value(x + 1) >>> assert anyio.run( ... FutureResult.from_value(1).bind(bindable).awaitable, ... ) == IOSuccess(2) >>> assert anyio.run( ... FutureResult.from_failure(1).bind(bindable).awaitable, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_bind( function, self._inner_value, )) #: Alias for `bind` method. #: Part of the `FutureResultBasedN` interface. bind_future_result = bind def bind_async( self, function: Callable[ [_ValueType], Awaitable[Kind2['FutureResult', _NewValueType, _ErrorType]], ], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Composes a container and ``async`` function returning container. This function should return a container value. See :meth:`~FutureResult.bind_awaitable` to bind ``async`` function that returns a plain value. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> async def coroutine(x: int) -> FutureResult[str, int]: ... return FutureResult.from_value(str(x + 1)) >>> assert anyio.run( ... FutureResult.from_value(1).bind_async(coroutine).awaitable, ... ) == IOSuccess('2') >>> assert anyio.run( ... FutureResult.from_failure(1).bind_async(coroutine).awaitable, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_bind_async( function, self._inner_value, )) #: Alias for `bind_async` method. #: Part of the `FutureResultBasedN` interface. bind_async_future_result = bind_async def bind_awaitable( self, function: Callable[[_ValueType], Awaitable[_NewValueType]], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Allows to compose a container and a regular ``async`` function. This function should return plain, non-container value. See :meth:`~FutureResult.bind_async` to bind ``async`` function that returns a container. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> async def coro(x: int) -> int: ... return x + 1 >>> assert anyio.run( ... FutureResult.from_value(1).bind_awaitable(coro).awaitable, ... ) == IOSuccess(2) >>> assert anyio.run( ... FutureResult.from_failure(1).bind_awaitable(coro).awaitable, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_bind_awaitable( function, self._inner_value, )) def bind_result( self, function: Callable[[_ValueType], Result[_NewValueType, _ErrorType]], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Binds a function returning ``Result[a, b]`` container. .. code:: python >>> import anyio >>> from returns.io import IOSuccess, IOFailure >>> from returns.result import Result, Success >>> from returns.future import FutureResult >>> def bind(inner_value: int) -> Result[int, str]: ... return Success(inner_value + 1) >>> assert anyio.run( ... FutureResult.from_value(1).bind_result(bind).awaitable, ... ) == IOSuccess(2) >>> assert anyio.run( ... FutureResult.from_failure('a').bind_result(bind).awaitable, ... ) == IOFailure('a') """ return FutureResult(_future_result.async_bind_result( function, self._inner_value, )) def bind_ioresult( self, function: Callable[[_ValueType], IOResult[_NewValueType, _ErrorType]], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Binds a function returning ``IOResult[a, b]`` container. .. code:: python >>> import anyio >>> from returns.io import IOResult, IOSuccess, IOFailure >>> from returns.future import FutureResult >>> def bind(inner_value: int) -> IOResult[int, str]: ... return IOSuccess(inner_value + 1) >>> assert anyio.run( ... FutureResult.from_value(1).bind_ioresult(bind).awaitable, ... ) == IOSuccess(2) >>> assert anyio.run( ... FutureResult.from_failure('a').bind_ioresult(bind).awaitable, ... ) == IOFailure('a') """ return FutureResult(_future_result.async_bind_ioresult( function, self._inner_value, )) def bind_io( self, function: Callable[[_ValueType], IO[_NewValueType]], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Binds a function returning ``IO[a]`` container. .. code:: python >>> import anyio >>> from returns.io import IO, IOSuccess, IOFailure >>> from returns.future import FutureResult >>> def bind(inner_value: int) -> IO[float]: ... return IO(inner_value + 0.5) >>> assert anyio.run( ... FutureResult.from_value(1).bind_io(bind).awaitable, ... ) == IOSuccess(1.5) >>> assert anyio.run( ... FutureResult.from_failure(1).bind_io(bind).awaitable, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_bind_io( function, self._inner_value, )) def bind_future( self, function: Callable[[_ValueType], Future[_NewValueType]], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Binds a function returning ``Future[a]`` container. .. code:: python >>> import anyio >>> from returns.io import IOSuccess, IOFailure >>> from returns.future import Future, FutureResult >>> def bind(inner_value: int) -> Future[float]: ... return Future.from_value(inner_value + 0.5) >>> assert anyio.run( ... FutureResult.from_value(1).bind_future(bind).awaitable, ... ) == IOSuccess(1.5) >>> assert anyio.run( ... FutureResult.from_failure(1).bind_future(bind).awaitable, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_bind_future( function, self._inner_value, )) def bind_async_future( self, function: Callable[[_ValueType], Awaitable['Future[_NewValueType]']], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Composes a container and ``async`` function returning ``Future``. Similar to :meth:`~FutureResult.bind_future` but works with async functions. .. code:: python >>> import anyio >>> from returns.future import Future, FutureResult >>> from returns.io import IOSuccess, IOFailure >>> async def coroutine(x: int) -> Future[str]: ... return Future.from_value(str(x + 1)) >>> assert anyio.run( ... FutureResult.from_value(1).bind_async_future, ... coroutine, ... ) == IOSuccess('2') >>> assert anyio.run( ... FutureResult.from_failure(1).bind_async, ... coroutine, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_bind_async_future( function, self._inner_value, )) def alt( self, function: Callable[[_ErrorType], _NewErrorType], ) -> 'FutureResult[_ValueType, _NewErrorType]': """ Composes failed container with a pure function to modify failure. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> def altable(arg: int) -> int: ... return arg + 1 >>> assert anyio.run( ... FutureResult.from_value(1).alt(altable).awaitable, ... ) == IOSuccess(1) >>> assert anyio.run( ... FutureResult.from_failure(1).alt(altable).awaitable, ... ) == IOFailure(2) """ return FutureResult(_future_result.async_alt( function, self._inner_value, )) def lash( self, function: Callable[ [_ErrorType], Kind2['FutureResult', _ValueType, _NewErrorType], ], ) -> 'FutureResult[_ValueType, _NewErrorType]': """ Composes failed container with a function that returns a container. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess >>> def lashable(x: int) -> FutureResult[int, str]: ... return FutureResult.from_value(x + 1) >>> assert anyio.run( ... FutureResult.from_value(1).lash(lashable).awaitable, ... ) == IOSuccess(1) >>> assert anyio.run( ... FutureResult.from_failure(1).lash(lashable).awaitable, ... ) == IOSuccess(2) """ return FutureResult(_future_result.async_lash( function, self._inner_value, )) def compose_result( self, function: Callable[ [Result[_ValueType, _ErrorType]], Kind2['FutureResult', _NewValueType, _ErrorType], ], ) -> 'FutureResult[_NewValueType, _ErrorType]': """ Composes inner ``Result`` with ``FutureResult`` returning function. Can be useful when you need an access to both states of the result. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> from returns.result import Result >>> def count(container: Result[int, int]) -> FutureResult[int, int]: ... return FutureResult.from_result( ... container.map(lambda x: x + 1).alt(abs), ... ) >>> assert anyio.run( ... FutureResult.from_value(1).compose_result, count, ... ) == IOSuccess(2) >>> assert anyio.run( ... FutureResult.from_failure(-1).compose_result, count, ... ) == IOFailure(1) """ return FutureResult(_future_result.async_compose_result( function, self._inner_value, )) def __aiter__(self) -> AsyncIterator[_ValueType]: # noqa: WPS611 """API for :ref:`do-notation`.""" async def factory() -> AsyncGenerator[_ValueType, None]: for inner_value in (await self._inner_value): yield inner_value # will only yield once return factory() @classmethod def do( cls, expr: AsyncGenerator[_NewValueType, None], ) -> 'FutureResult[_NewValueType, _NewErrorType]': """ Allows working with unwrapped values of containers in a safe way. .. code:: python >>> import anyio >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> async def success() -> bool: ... return await FutureResult.do( ... first + second ... async for first in FutureResult.from_value(2) ... async for second in FutureResult.from_value(3) ... ) == IOSuccess(5) >>> assert anyio.run(success) is True >>> async def failure() -> bool: ... return await FutureResult.do( ... first + second ... async for first in FutureResult.from_value(2) ... async for second in FutureResult.from_failure(3) ... ) == IOFailure(3) >>> assert anyio.run(failure) is True See :ref:`do-notation` to learn more. """ async def factory() -> Result[_NewValueType, _NewErrorType]: try: return Success(await expr.__anext__()) # noqa: WPS609 except UnwrapFailedError as exc: return exc.halted_container # type: ignore return FutureResult(factory()) @classmethod def from_typecast( cls, inner_value: Future[Result[_NewValueType, _NewErrorType]], ) -> 'FutureResult[_NewValueType, _NewErrorType]': """ Creates ``FutureResult[a, b]`` from ``Future[Result[a, b]]``. .. code:: python >>> import anyio >>> from returns.io import IOSuccess, IOFailure >>> from returns.result import Success, Failure >>> from returns.future import Future, FutureResult >>> async def main(): ... assert await FutureResult.from_typecast( ... Future.from_value(Success(1)), ... ) == IOSuccess(1) ... assert await FutureResult.from_typecast( ... Future.from_value(Failure(1)), ... ) == IOFailure(1) >>> anyio.run(main) """ return FutureResult(inner_value._inner_value) @classmethod def from_future( cls, inner_value: Future[_NewValueType], ) -> 'FutureResult[_NewValueType, Any]': """ Creates ``FutureResult`` from successful ``Future`` value. .. code:: python >>> import anyio >>> from returns.io import IOSuccess >>> from returns.future import Future, FutureResult >>> async def main(): ... assert await FutureResult.from_future( ... Future.from_value(1), ... ) == IOSuccess(1) >>> anyio.run(main) """ return FutureResult(_future_result.async_from_success(inner_value)) @classmethod def from_failed_future( cls, inner_value: Future[_NewErrorType], ) -> 'FutureResult[Any, _NewErrorType]': """ Creates ``FutureResult`` from failed ``Future`` value. .. code:: python >>> import anyio >>> from returns.io import IOFailure >>> from returns.future import Future, FutureResult >>> async def main(): ... assert await FutureResult.from_failed_future( ... Future.from_value(1), ... ) == IOFailure(1) >>> anyio.run(main) """ return FutureResult(_future_result.async_from_failure(inner_value)) @classmethod def from_future_result( cls, inner_value: 'FutureResult[_NewValueType, _NewErrorType]', ) -> 'FutureResult[_NewValueType, _NewErrorType]': """ Creates new ``FutureResult`` from existing one. .. code:: python >>> import anyio >>> from returns.io import IOSuccess >>> from returns.future import FutureResult >>> async def main(): ... assert await FutureResult.from_future_result( ... FutureResult.from_value(1), ... ) == IOSuccess(1) >>> anyio.run(main) Part of the ``FutureResultLikeN`` interface. """ return inner_value @classmethod def from_io( cls, inner_value: IO[_NewValueType], ) -> 'FutureResult[_NewValueType, Any]': """ Creates ``FutureResult`` from successful ``IO`` value. .. code:: python >>> import anyio >>> from returns.io import IO, IOSuccess >>> from returns.future import FutureResult >>> async def main(): ... assert await FutureResult.from_io( ... IO(1), ... ) == IOSuccess(1) >>> anyio.run(main) """ return FutureResult.from_value(inner_value._inner_value) @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> 'FutureResult[Any, _NewErrorType]': """ Creates ``FutureResult`` from failed ``IO`` value. .. code:: python >>> import anyio >>> from returns.io import IO, IOFailure >>> from returns.future import FutureResult >>> async def main(): ... assert await FutureResult.from_failed_io( ... IO(1), ... ) == IOFailure(1) >>> anyio.run(main) """ return FutureResult.from_failure(inner_value._inner_value) @classmethod def from_ioresult( cls, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> 'FutureResult[_NewValueType, _NewErrorType]': """ Creates ``FutureResult`` from ``IOResult`` value. .. code:: python >>> import anyio >>> from returns.io import IOSuccess, IOFailure >>> from returns.future import FutureResult >>> async def main(): ... assert await FutureResult.from_ioresult( ... IOSuccess(1), ... ) == IOSuccess(1) ... assert await FutureResult.from_ioresult( ... IOFailure(1), ... ) == IOFailure(1) >>> anyio.run(main) """ return FutureResult(async_identity(inner_value._inner_value)) @classmethod def from_result( cls, inner_value: Result[_NewValueType, _NewErrorType], ) -> 'FutureResult[_NewValueType, _NewErrorType]': """ Creates ``FutureResult`` from ``Result`` value. .. code:: python >>> import anyio >>> from returns.io import IOSuccess, IOFailure >>> from returns.result import Success, Failure >>> from returns.future import FutureResult >>> async def main(): ... assert await FutureResult.from_result( ... Success(1), ... ) == IOSuccess(1) ... assert await FutureResult.from_result( ... Failure(1), ... ) == IOFailure(1) >>> anyio.run(main) """ return FutureResult(async_identity(inner_value)) @classmethod def from_value( cls, inner_value: _NewValueType, ) -> 'FutureResult[_NewValueType, Any]': """ Creates ``FutureResult`` from successful value. .. code:: python >>> import anyio >>> from returns.io import IOSuccess >>> from returns.future import FutureResult >>> async def main(): ... assert await FutureResult.from_value( ... 1, ... ) == IOSuccess(1) >>> anyio.run(main) """ return FutureResult(async_identity(Success(inner_value))) @classmethod def from_failure( cls, inner_value: _NewErrorType, ) -> 'FutureResult[Any, _NewErrorType]': """ Creates ``FutureResult`` from failed value. .. code:: python >>> import anyio >>> from returns.io import IOFailure >>> from returns.future import FutureResult >>> async def main(): ... assert await FutureResult.from_failure( ... 1, ... ) == IOFailure(1) >>> anyio.run(main) """ return FutureResult(async_identity(Failure(inner_value))) def FutureSuccess( # noqa: N802 inner_value: _NewValueType, ) -> FutureResult[_NewValueType, Any]: """ Public unit function to create successful ``FutureResult`` objects. Is the same as :meth:`~FutureResult.from_value`. .. code:: python >>> import anyio >>> from returns.future import FutureResult, FutureSuccess >>> assert anyio.run(FutureSuccess(1).awaitable) == anyio.run( ... FutureResult.from_value(1).awaitable, ... ) """ return FutureResult.from_value(inner_value) def FutureFailure( # noqa: N802 inner_value: _NewErrorType, ) -> FutureResult[Any, _NewErrorType]: """ Public unit function to create failed ``FutureResult`` objects. Is the same as :meth:`~FutureResult.from_failure`. .. code:: python >>> import anyio >>> from returns.future import FutureResult, FutureFailure >>> assert anyio.run(FutureFailure(1).awaitable) == anyio.run( ... FutureResult.from_failure(1).awaitable, ... ) """ return FutureResult.from_failure(inner_value) #: Alias for ``FutureResult[_ValueType, Exception]``. FutureResultE: TypeAlias = FutureResult[_ValueType, Exception] _ExceptionType = TypeVar('_ExceptionType', bound=Exception) # Decorators: @overload def future_safe( exceptions: Callable[ _FuncParams, Coroutine[_FirstType, _SecondType, _ValueType], ], /, ) -> Callable[_FuncParams, FutureResultE[_ValueType]]: """Decorator to convert exception-throwing for any kind of Exception.""" @overload def future_safe( exceptions: tuple[type[_ExceptionType], ...], ) -> Callable[ [ Callable[ _FuncParams, Coroutine[_FirstType, _SecondType, _ValueType], ], ], Callable[_FuncParams, FutureResult[_ValueType, _ExceptionType]], ]: """Decorator to convert exception-throwing just for a set of Exceptions.""" def future_safe( # noqa: C901, WPS212, WPS234, exceptions: ( Callable[ _FuncParams, Coroutine[_FirstType, _SecondType, _ValueType], ] | tuple[type[_ExceptionType], ...] ), ) -> ( Callable[_FuncParams, FutureResultE[_ValueType]] | Callable[ [ Callable[ _FuncParams, Coroutine[_FirstType, _SecondType, _ValueType], ], ], Callable[_FuncParams, FutureResult[_ValueType, _ExceptionType]], ] ): """ Decorator to convert exception-throwing coroutine to ``FutureResult``. Should be used with care, since it only catches ``Exception`` subclasses. It does not catch ``BaseException`` subclasses. If you need to mark sync function as ``safe``, use :func:`returns.future.future_safe` instead. This decorator only works with ``async`` functions. Example: .. code:: python >>> import anyio >>> from returns.future import future_safe >>> from returns.io import IOFailure, IOSuccess >>> @future_safe ... async def might_raise(arg: int) -> float: ... return 1 / arg ... >>> assert anyio.run(might_raise(2).awaitable) == IOSuccess(0.5) >>> assert isinstance( ... anyio.run(might_raise(0).awaitable), ... IOFailure, ... ) You can also use it with explicit exception types as the first argument: .. code:: python >>> from returns.future import future_safe >>> from returns.io import IOFailure, IOSuccess >>> @future_safe(exceptions=(ZeroDivisionError,)) ... async def might_raise(arg: int) -> float: ... return 1 / arg >>> assert anyio.run(might_raise(2).awaitable) == IOSuccess(0.5) >>> assert isinstance( ... anyio.run(might_raise(0).awaitable), ... IOFailure, ... ) In this case, only exceptions that are explicitly listed are going to be caught. Similar to :func:`returns.io.impure_safe` and :func:`returns.result.safe` decorators, but works with ``async`` functions. """ def _future_safe_factory( # noqa: WPS430 function: Callable[ _FuncParams, Coroutine[_FirstType, _SecondType, _ValueType], ], inner_exceptions: tuple[type[_ExceptionType], ...], ) -> Callable[_FuncParams, FutureResult[_ValueType, _ExceptionType]]: async def factory( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> Result[_ValueType, _ExceptionType]: try: return Success(await function(*args, **kwargs)) except inner_exceptions as exc: return Failure(exc) @wraps(function) def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> FutureResult[_ValueType, _ExceptionType]: return FutureResult(factory(*args, **kwargs)) return decorator if isinstance(exceptions, tuple): return lambda function: _future_safe_factory(function, exceptions) return _future_safe_factory( exceptions, (Exception,), # type: ignore[arg-type] ) returns-0.24.0/returns/interfaces/000077500000000000000000000000001472312074000171245ustar00rootroot00000000000000returns-0.24.0/returns/interfaces/__init__.py000066400000000000000000000000001472312074000212230ustar00rootroot00000000000000returns-0.24.0/returns/interfaces/altable.py000066400000000000000000000045111472312074000211030ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable, Sequence from typing import ClassVar, Generic, TypeVar, final from typing_extensions import Never from returns.functions import compose, identity from returns.primitives.asserts import assert_equal from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law1, Law3, Lawful, LawSpecDef, law_definition, ) _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _AltableType = TypeVar('_AltableType', bound='AltableN') # Used in laws: _NewType1 = TypeVar('_NewType1') _NewType2 = TypeVar('_NewType2') @final class _LawSpec(LawSpecDef): """ Mappable or functor laws. https://en.wikibooks.org/wiki/Haskell/The_Functor_class#The_functor_laws """ __slots__ = () @law_definition def identity_law( altable: 'AltableN[_FirstType, _SecondType, _ThirdType]', ) -> None: """Mapping identity over a value must return the value unchanged.""" assert_equal(altable.alt(identity), altable) @law_definition def associative_law( altable: 'AltableN[_FirstType, _SecondType, _ThirdType]', first: Callable[[_SecondType], _NewType1], second: Callable[[_NewType1], _NewType2], ) -> None: """Mapping twice or mapping a composition is the same thing.""" assert_equal( altable.alt(first).alt(second), altable.alt(compose(first, second)), ) class AltableN( Generic[_FirstType, _SecondType, _ThirdType], Lawful['AltableN[_FirstType, _SecondType, _ThirdType]'], ): """Modifies the second type argument with a pure function.""" __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law1(_LawSpec.identity_law), Law3(_LawSpec.associative_law), ) @abstractmethod def alt( self: _AltableType, function: Callable[[_SecondType], _UpdatedType], ) -> KindN[_AltableType, _FirstType, _UpdatedType, _ThirdType]: """Allows to run a pure function over a container.""" #: Type alias for kinds with two type arguments. Altable2 = AltableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. Altable3 = AltableN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/applicative.py000066400000000000000000000116301472312074000220000ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable, Sequence from typing import ClassVar, TypeVar, final from typing_extensions import Never from returns.functions import compose, identity from returns.interfaces import mappable from returns.primitives.asserts import assert_equal from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law1, Law3, Lawful, LawSpecDef, law_definition, ) _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ApplicativeType = TypeVar('_ApplicativeType', bound='ApplicativeN') # Only used in laws: _NewType1 = TypeVar('_NewType1') _NewType2 = TypeVar('_NewType2') @final class _LawSpec(LawSpecDef): """ Applicative mappable laws. Definition: https://bit.ly/3hC8F8E Discussion: https://bit.ly/3jffz3L """ __slots__ = () @law_definition def identity_law( container: 'ApplicativeN[_FirstType, _SecondType, _ThirdType]', ) -> None: """ Identity law. If we apply wrapped ``identity`` function to a container, nothing happens. """ assert_equal( container, container.apply(container.from_value(identity)), ) @law_definition def interchange_law( raw_value: _FirstType, container: 'ApplicativeN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], _NewType1], ) -> None: """ Interchange law. Basically we check that we can start our composition with both ``raw_value`` and ``function``. Great explanation: https://stackoverflow.com/q/27285918/4842742 """ assert_equal( container.from_value(raw_value).apply( container.from_value(function), ), container.from_value(function).apply( container.from_value(lambda inner: inner(raw_value)), ), ) @law_definition def homomorphism_law( raw_value: _FirstType, container: 'ApplicativeN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], _NewType1], ) -> None: """ Homomorphism law. The homomorphism law says that applying a wrapped function to a wrapped value is the same as applying the function to the value in the normal way and then using ``.from_value`` on the result. """ assert_equal( container.from_value(function(raw_value)), container.from_value(raw_value).apply( container.from_value(function), ), ) @law_definition def composition_law( container: 'ApplicativeN[_FirstType, _SecondType, _ThirdType]', first: Callable[[_FirstType], _NewType1], second: Callable[[_NewType1], _NewType2], ) -> None: """ Composition law. Applying two functions twice is the same as applying their composition once. """ assert_equal( container.apply(container.from_value(compose(first, second))), container.apply( container.from_value(first), ).apply( container.from_value(second), ), ) class ApplicativeN( mappable.MappableN[_FirstType, _SecondType, _ThirdType], Lawful['ApplicativeN[_FirstType, _SecondType, _ThirdType]'], ): """ Allows to create unit containers from raw values and to apply wrapped funcs. See also: - https://en.wikipedia.org/wiki/Applicative_functor - http://learnyouahaskell.com/functors-applicative-functors-and-monoids """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law1(_LawSpec.identity_law), Law3(_LawSpec.interchange_law), Law3(_LawSpec.homomorphism_law), Law3(_LawSpec.composition_law), ) @abstractmethod def apply( self: _ApplicativeType, container: KindN[ _ApplicativeType, Callable[[_FirstType], _UpdatedType], _SecondType, _ThirdType, ], ) -> KindN[_ApplicativeType, _UpdatedType, _SecondType, _ThirdType]: """Allows to apply a wrapped function over a container.""" @classmethod @abstractmethod def from_value( cls: type[_ApplicativeType], # noqa: N805 inner_value: _UpdatedType, ) -> KindN[_ApplicativeType, _UpdatedType, _SecondType, _ThirdType]: """Unit method to create new containers from any raw value.""" #: Type alias for kinds with one type argument. Applicative1 = ApplicativeN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. Applicative2 = ApplicativeN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. Applicative3 = ApplicativeN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/bimappable.py000066400000000000000000000015171472312074000215760ustar00rootroot00000000000000from typing import TypeVar from typing_extensions import Never from returns.interfaces import altable, mappable _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') class BiMappableN( mappable.MappableN[_FirstType, _SecondType, _ThirdType], altable.AltableN[_FirstType, _SecondType, _ThirdType], ): """ Allows to change both types of a container at the same time. Uses ``.map`` to change first type and ``.alt`` to change second type. See also: - https://typelevel.org/cats/typeclasses/bifunctor.html """ __slots__ = () #: Type alias for kinds with two type arguments. BiMappable2 = BiMappableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. BiMappable3 = BiMappableN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/bindable.py000066400000000000000000000027561472312074000212500ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable from typing import Generic, TypeVar from typing_extensions import Never from returns.primitives.hkt import KindN _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _BindableType = TypeVar('_BindableType', bound='BindableN') class BindableN(Generic[_FirstType, _SecondType, _ThirdType]): """ Represents a "context" in which calculations can be executed. ``Bindable`` allows you to bind together a series of calculations while maintaining the context of that specific container. In contrast to :class:`returns.interfaces.lashable.LashableN`, works with the first type argument. """ __slots__ = () @abstractmethod def bind( self: _BindableType, function: Callable[ [_FirstType], KindN[_BindableType, _UpdatedType, _SecondType, _ThirdType], ], ) -> KindN[_BindableType, _UpdatedType, _SecondType, _ThirdType]: """ Applies 'function' to the result of a previous calculation. And returns a new container. """ #: Type alias for kinds with one type argument. Bindable1 = BindableN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. Bindable2 = BindableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. Bindable3 = BindableN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/container.py000066400000000000000000000071531472312074000214660ustar00rootroot00000000000000from collections.abc import Callable, Sequence from typing import ClassVar, TypeVar, final from typing_extensions import Never from returns.interfaces import applicative, bindable from returns.primitives.asserts import assert_equal from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law1, Law3, Lawful, LawSpecDef, law_definition, ) _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') # Only used in laws: _NewType1 = TypeVar('_NewType1') _NewType2 = TypeVar('_NewType2') @final class _LawSpec(LawSpecDef): """ Container laws. Definition: https://wiki.haskell.org/Monad_laws Good explanation: https://bit.ly/2Qsi5re """ __slots__ = () @law_definition def left_identity_law( raw_value: _FirstType, container: 'ContainerN[_FirstType, _SecondType, _ThirdType]', function: Callable[ [_FirstType], KindN['ContainerN', _NewType1, _SecondType, _ThirdType], ], ) -> None: """ Left identity. The first law states that if we take a value, put it in a default context with return and then feed it to a function by using ``bind``, it's the same as just taking the value and applying the function to it. """ assert_equal( container.from_value(raw_value).bind(function), function(raw_value), ) @law_definition def right_identity_law( container: 'ContainerN[_FirstType, _SecondType, _ThirdType]', ) -> None: """ Right identity. The second law states that if we have a container value and we use ``bind`` to feed it to ``.from_value``, the result is our original container value. """ assert_equal( container, container.bind( lambda inner: container.from_value(inner), ), ) @law_definition def associative_law( container: 'ContainerN[_FirstType, _SecondType, _ThirdType]', first: Callable[ [_FirstType], KindN['ContainerN', _NewType1, _SecondType, _ThirdType], ], second: Callable[ [_NewType1], KindN['ContainerN', _NewType2, _SecondType, _ThirdType], ], ) -> None: """ Associativity law. The final monad law says that when we have a chain of container functions applications with ``bind``, it shouldn’t matter how they’re nested. """ assert_equal( container.bind(first).bind(second), container.bind(lambda inner: first(inner).bind(second)), ) class ContainerN( applicative.ApplicativeN[_FirstType, _SecondType, _ThirdType], bindable.BindableN[_FirstType, _SecondType, _ThirdType], Lawful['ContainerN[_FirstType, _SecondType, _ThirdType]'], ): """ Handy alias for types with ``.bind``, ``.map``, and ``.apply`` methods. Should be a base class for almost any containers you write. See also: - https://bit.ly/2CTEVov """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law3(_LawSpec.left_identity_law), Law1(_LawSpec.right_identity_law), Law3(_LawSpec.associative_law), ) #: Type alias for kinds with one type argument. Container1 = ContainerN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. Container2 = ContainerN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. Container3 = ContainerN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/equable.py000066400000000000000000000035221472312074000211160ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Sequence from typing import ClassVar, TypeVar, final from returns.primitives.laws import ( Law, Law1, Law2, Law3, Lawful, LawSpecDef, law_definition, ) _EqualType = TypeVar('_EqualType', bound='Equable') @final class _LawSpec(LawSpecDef): """ Equality laws. Description: https://bit.ly/34D40iT """ __slots__ = () @law_definition def reflexive_law( first: _EqualType, ) -> None: """Value should be equal to itself.""" assert first.equals(first) @law_definition def symmetry_law( first: _EqualType, second: _EqualType, ) -> None: """If ``A == B`` then ``B == A``.""" assert first.equals(second) == second.equals(first) @law_definition def transitivity_law( first: _EqualType, second: _EqualType, third: _EqualType, ) -> None: """If ``A == B`` and ``B == C`` then ``A == C``.""" # We use this notation, because `first` might be equal to `third`, # but not to `second`. Example: Some(1), Some(2), Some(1) if first.equals(second) and second.equals(third): assert first.equals(third) class Equable(Lawful['Equable']): """ Interface for types that can be compared with real values. Not all types can, because some don't have the value at a time: - ``Future`` has to be awaited to get the value - ``Reader`` has to be called to get the value """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law1(_LawSpec.reflexive_law), Law2(_LawSpec.symmetry_law), Law3(_LawSpec.transitivity_law), ) @abstractmethod def equals(self: _EqualType, other: _EqualType) -> bool: """Type-safe equality check for values of the same type.""" returns-0.24.0/returns/interfaces/failable.py000066400000000000000000000174371472312074000212510ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable, Sequence from typing import ClassVar, TypeVar, final from typing_extensions import Never from returns.interfaces import container as _container from returns.interfaces import lashable, swappable from returns.primitives.asserts import assert_equal from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law2, Law3, Lawful, LawSpecDef, law_definition, ) _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _SingleFailableType = TypeVar('_SingleFailableType', bound='SingleFailableN') _DiverseFailableType = TypeVar('_DiverseFailableType', bound='DiverseFailableN') # Used in laws: _NewFirstType = TypeVar('_NewFirstType') @final class _FailableLawSpec(LawSpecDef): """ Failable laws. We need to be sure that ``.lash`` won't lash success types. """ __slots__ = () @law_definition def lash_short_circuit_law( raw_value: _FirstType, container: 'FailableN[_FirstType, _SecondType, _ThirdType]', function: Callable[ [_SecondType], KindN['FailableN', _FirstType, _NewFirstType, _ThirdType], ], ) -> None: """Ensures that you cannot lash a success.""" assert_equal( container.from_value(raw_value), container.from_value(raw_value).lash(function), ) class FailableN( _container.ContainerN[_FirstType, _SecondType, _ThirdType], lashable.LashableN[_FirstType, _SecondType, _ThirdType], Lawful['FailableN[_FirstType, _SecondType, _ThirdType]'], ): """ Base type for types that can fail. It is a raw type and should not be used directly. Use ``SingleFailableN`` and ``DiverseFailableN`` instead. """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law3(_FailableLawSpec.lash_short_circuit_law), ) #: Type alias for kinds with two type arguments. Failable2 = FailableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. Failable3 = FailableN[_FirstType, _SecondType, _ThirdType] @final class _SingleFailableLawSpec(LawSpecDef): """ Single Failable laws. We need to be sure that ``.map`` and ``.bind`` works correctly for ``empty`` property. """ __slots__ = () @law_definition def map_short_circuit_law( container: 'SingleFailableN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], _NewFirstType], ) -> None: """Ensures that you cannot map from the `empty` property.""" assert_equal( container.empty, container.empty.map(function), ) @law_definition def bind_short_circuit_law( container: 'SingleFailableN[_FirstType, _SecondType, _ThirdType]', function: Callable[ [_FirstType], KindN['SingleFailableN', _NewFirstType, _SecondType, _ThirdType], ], ) -> None: """Ensures that you cannot bind from the `empty` property.""" assert_equal( container.empty, container.empty.bind(function), ) @law_definition def apply_short_circuit_law( container: 'SingleFailableN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], _NewFirstType], ) -> None: """Ensures that you cannot apply from the `empty` property.""" wrapped_function = container.from_value(function) assert_equal( container.empty, container.empty.apply(wrapped_function), ) class SingleFailableN( FailableN[_FirstType, _SecondType, _ThirdType], ): """ Base type for types that have just only one failed value. Like ``Maybe`` types where the only failed value is ``Nothing``. """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law2(_SingleFailableLawSpec.map_short_circuit_law), Law2(_SingleFailableLawSpec.bind_short_circuit_law), Law2(_SingleFailableLawSpec.apply_short_circuit_law), ) @property @abstractmethod def empty( self: _SingleFailableType, ) -> 'SingleFailableN[_FirstType, _SecondType, _ThirdType]': """This property represents the failed value.""" #: Type alias for kinds with two types arguments. SingleFailable2 = SingleFailableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. SingleFailable3 = SingleFailableN[_FirstType, _SecondType, _ThirdType] @final class _DiverseFailableLawSpec(LawSpecDef): """ Diverse Failable laws. We need to be sure that ``.map``, ``.bind``, ``.apply`` and ``.alt`` works correctly for both success and failure types. """ __slots__ = () @law_definition def map_short_circuit_law( raw_value: _SecondType, container: 'DiverseFailableN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], _NewFirstType], ) -> None: """Ensures that you cannot map a failure.""" assert_equal( container.from_failure(raw_value), container.from_failure(raw_value).map(function), ) @law_definition def bind_short_circuit_law( raw_value: _SecondType, container: 'DiverseFailableN[_FirstType, _SecondType, _ThirdType]', function: Callable[ [_FirstType], KindN['DiverseFailableN', _NewFirstType, _SecondType, _ThirdType], ], ) -> None: """ Ensures that you cannot bind a failure. See: https://wiki.haskell.org/Typeclassopedia#MonadFail """ assert_equal( container.from_failure(raw_value), container.from_failure(raw_value).bind(function), ) @law_definition def apply_short_circuit_law( raw_value: _SecondType, container: 'DiverseFailableN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], _NewFirstType], ) -> None: """Ensures that you cannot apply a failure.""" wrapped_function = container.from_value(function) assert_equal( container.from_failure(raw_value), container.from_failure(raw_value).apply(wrapped_function), ) @law_definition def alt_short_circuit_law( raw_value: _SecondType, container: 'DiverseFailableN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_SecondType], _NewFirstType], ) -> None: """Ensures that you cannot alt a success.""" assert_equal( container.from_value(raw_value), container.from_value(raw_value).alt(function), ) class DiverseFailableN( FailableN[_FirstType, _SecondType, _ThirdType], swappable.SwappableN[_FirstType, _SecondType, _ThirdType], Lawful['DiverseFailableN[_FirstType, _SecondType, _ThirdType]'], ): """ Base type for types that have any failed value. Like ``Result`` types. """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law3(_DiverseFailableLawSpec.map_short_circuit_law), Law3(_DiverseFailableLawSpec.bind_short_circuit_law), Law3(_DiverseFailableLawSpec.apply_short_circuit_law), Law3(_DiverseFailableLawSpec.alt_short_circuit_law), ) @classmethod @abstractmethod def from_failure( cls: type[_DiverseFailableType], inner_value: _UpdatedType, ) -> KindN[_DiverseFailableType, _FirstType, _UpdatedType, _ThirdType]: """Unit method to create new containers from any raw value.""" #: Type alias for kinds with two type arguments. DiverseFailable2 = DiverseFailableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. DiverseFailable3 = DiverseFailableN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/lashable.py000066400000000000000000000026141472312074000212540ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable from typing import Generic, TypeVar from typing_extensions import Never from returns.primitives.hkt import KindN _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _LashableType = TypeVar('_LashableType', bound='LashableN') class LashableN(Generic[_FirstType, _SecondType, _ThirdType]): """ Represents a "context" in which calculations can be executed. ``Rescueable`` allows you to bind together a series of calculations while maintaining the context of that specific container. In contrast to :class:`returns.interfaces.bindable.BinbdaleN`, works with the second type value. """ __slots__ = () @abstractmethod def lash( self: _LashableType, function: Callable[ [_SecondType], KindN[_LashableType, _FirstType, _UpdatedType, _ThirdType], ], ) -> KindN[_LashableType, _FirstType, _UpdatedType, _ThirdType]: """ Applies 'function' to the result of a previous calculation. And returns a new container. """ #: Type alias for kinds with two type arguments. Lashable2 = LashableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. Lashable3 = LashableN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/mappable.py000066400000000000000000000051001472312074000212530ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable, Sequence from typing import ClassVar, Generic, TypeVar, final from typing_extensions import Never from returns.functions import compose, identity from returns.primitives.asserts import assert_equal from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law1, Law3, Lawful, LawSpecDef, law_definition, ) _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _MappableType = TypeVar('_MappableType', bound='MappableN') # Used in laws: _NewType1 = TypeVar('_NewType1') _NewType2 = TypeVar('_NewType2') @final class _LawSpec(LawSpecDef): """ Mappable or functor laws. https://en.wikibooks.org/wiki/Haskell/The_Functor_class#The_functor_laws """ __slots__ = () @law_definition def identity_law( mappable: 'MappableN[_FirstType, _SecondType, _ThirdType]', ) -> None: """Mapping identity over a value must return the value unchanged.""" assert_equal(mappable.map(identity), mappable) @law_definition def associative_law( mappable: 'MappableN[_FirstType, _SecondType, _ThirdType]', first: Callable[[_FirstType], _NewType1], second: Callable[[_NewType1], _NewType2], ) -> None: """Mapping twice or mapping a composition is the same thing.""" assert_equal( mappable.map(first).map(second), mappable.map(compose(first, second)), ) class MappableN( Generic[_FirstType, _SecondType, _ThirdType], Lawful['MappableN[_FirstType, _SecondType, _ThirdType]'], ): """ Allows to chain wrapped values in containers with regular functions. Behaves like a functor. See also: - https://en.wikipedia.org/wiki/Functor """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law1(_LawSpec.identity_law), Law3(_LawSpec.associative_law), ) @abstractmethod # noqa: WPS125 def map( self: _MappableType, function: Callable[[_FirstType], _UpdatedType], ) -> KindN[_MappableType, _UpdatedType, _SecondType, _ThirdType]: """Allows to run a pure function over a container.""" #: Type alias for kinds with one type argument. Mappable1 = MappableN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. Mappable2 = MappableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. Mappable3 = MappableN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/specific/000077500000000000000000000000001472312074000207115ustar00rootroot00000000000000returns-0.24.0/returns/interfaces/specific/__init__.py000066400000000000000000000000001472312074000230100ustar00rootroot00000000000000returns-0.24.0/returns/interfaces/specific/future.py000066400000000000000000000111131472312074000225720ustar00rootroot00000000000000""" Represents the base interfaces for types that do fearless async operations. This type means that ``Future`` cannot fail. Don't use this type for async that can. Instead, use :class:`returns.interfaces.specific.future_result.FutureResultBasedN` type. """ from __future__ import annotations from abc import abstractmethod from collections.abc import Awaitable, Callable, Generator from typing import TYPE_CHECKING, Any, Generic, TypeVar from typing_extensions import Never from returns.interfaces.specific import io from returns.primitives.hkt import KindN if TYPE_CHECKING: from returns.future import Future # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _FutureLikeType = TypeVar('_FutureLikeType', bound='FutureLikeN') _AsyncFutureType = TypeVar('_AsyncFutureType', bound='AwaitableFutureN') _FutureBasedType = TypeVar('_FutureBasedType', bound='FutureBasedN') class FutureLikeN(io.IOLikeN[_FirstType, _SecondType, _ThirdType]): """ Base type for ones that does look like ``Future``. But at the time this is not a real ``Future`` and cannot be awaited. """ __slots__ = () @abstractmethod def bind_future( self: _FutureLikeType, function: Callable[[_FirstType], Future[_UpdatedType]], ) -> KindN[_FutureLikeType, _UpdatedType, _SecondType, _ThirdType]: """Allows to bind ``Future`` returning function over a container.""" @abstractmethod def bind_async_future( self: _FutureLikeType, function: Callable[[_FirstType], Awaitable[Future[_UpdatedType]]], ) -> KindN[_FutureLikeType, _UpdatedType, _SecondType, _ThirdType]: """Allows to bind async ``Future`` returning function over container.""" @abstractmethod def bind_async( self: _FutureLikeType, function: Callable[ [_FirstType], Awaitable[ KindN[_FutureLikeType, _UpdatedType, _SecondType, _ThirdType], ], ], ) -> KindN[_FutureLikeType, _UpdatedType, _SecondType, _ThirdType]: """Binds async function returning the same type of container.""" @abstractmethod def bind_awaitable( self: _FutureLikeType, function: Callable[[_FirstType], Awaitable[_UpdatedType]], ) -> KindN[_FutureLikeType, _UpdatedType, _SecondType, _ThirdType]: """Allows to bind async function over container.""" @classmethod @abstractmethod def from_future( cls: type[_FutureLikeType], # noqa: N805 inner_value: Future[_UpdatedType], ) -> KindN[_FutureLikeType, _UpdatedType, _SecondType, _ThirdType]: """Unit method to create new containers from successful ``Future``.""" #: Type alias for kinds with one type argument. FutureLike1 = FutureLikeN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. FutureLike2 = FutureLikeN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. FutureLike3 = FutureLikeN[_FirstType, _SecondType, _ThirdType] class AwaitableFutureN(Generic[_FirstType, _SecondType, _ThirdType]): """ Type that provides the required API for ``Future`` to be async. Should not be used directly. Use ``FutureBasedN`` instead. """ __slots__ = () @abstractmethod def __await__(self: _AsyncFutureType) -> Generator[ Any, Any, io.IOLikeN[_FirstType, _SecondType, _ThirdType], ]: """Magic method to allow ``await`` expression.""" @abstractmethod async def awaitable( self: _AsyncFutureType, ) -> io.IOLikeN[_FirstType, _SecondType, _ThirdType]: """Underling logic under ``await`` expression.""" #: Type alias for kinds with one type argument. AsyncFuture1 = AwaitableFutureN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. AsyncFuture2 = AwaitableFutureN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. AsyncFuture3 = AwaitableFutureN[_FirstType, _SecondType, _ThirdType] class FutureBasedN( FutureLikeN[_FirstType, _SecondType, _ThirdType], AwaitableFutureN[_FirstType, _SecondType, _ThirdType], ): """ Base type for real ``Future`` objects. They can be awaited. """ __slots__ = () #: Type alias for kinds with one type argument. FutureBased1 = FutureBasedN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. FutureBased2 = FutureBasedN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. FutureBased3 = FutureBasedN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/specific/future_result.py000066400000000000000000000065721472312074000242050ustar00rootroot00000000000000""" Represents the base interfaces for types that do fear-some async operations. This type means that ``FutureResult`` can (and will!) fail with exceptions. Use this type to mark that this specific async opetaion can fail. """ from __future__ import annotations from abc import abstractmethod from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, TypeVar from typing_extensions import Never from returns.interfaces.specific import future, ioresult from returns.primitives.hkt import KindN if TYPE_CHECKING: from returns.future import Future, FutureResult # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') _FutureResultLikeType = TypeVar( '_FutureResultLikeType', bound='FutureResultLikeN', ) class FutureResultLikeN( future.FutureLikeN[_FirstType, _SecondType, _ThirdType], ioresult.IOResultLikeN[_FirstType, _SecondType, _ThirdType], ): """ Base type for ones that does look like ``FutureResult``. But at the time this is not a real ``Future`` and cannot be awaited. It is also cannot be unwrapped, because it is not a real ``IOResult``. """ __slots__ = () @abstractmethod def bind_future_result( self: _FutureResultLikeType, function: Callable[ [_FirstType], FutureResult[_UpdatedType, _SecondType], ], ) -> KindN[_FutureResultLikeType, _UpdatedType, _SecondType, _ThirdType]: """Allows to bind ``FutureResult`` functions over a container.""" @abstractmethod def bind_async_future_result( self: _FutureResultLikeType, function: Callable[ [_FirstType], Awaitable[FutureResult[_UpdatedType, _SecondType]], ], ) -> KindN[_FutureResultLikeType, _UpdatedType, _SecondType, _ThirdType]: """Allows to bind async ``FutureResult`` functions over container.""" @classmethod @abstractmethod def from_failed_future( cls: type[_FutureResultLikeType], # noqa: N805 inner_value: Future[_ErrorType], ) -> KindN[_FutureResultLikeType, _FirstType, _ErrorType, _ThirdType]: """Creates new container from a failed ``Future``.""" @classmethod def from_future_result( cls: type[_FutureResultLikeType], # noqa: N805 inner_value: FutureResult[_ValueType, _ErrorType], ) -> KindN[_FutureResultLikeType, _ValueType, _ErrorType, _ThirdType]: """Creates container from ``FutureResult`` instance.""" #: Type alias for kinds with two type arguments. FutureResultLike2 = FutureResultLikeN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. FutureResultLike3 = FutureResultLikeN[_FirstType, _SecondType, _ThirdType] class FutureResultBasedN( future.FutureBasedN[_FirstType, _SecondType, _ThirdType], FutureResultLikeN[_FirstType, _SecondType, _ThirdType], ): """ Base type for real ``FutureResult`` objects. They can be awaited. Still cannot be unwrapped. """ __slots__ = () #: Type alias for kinds with two type arguments. FutureResultBased2 = FutureResultBasedN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. FutureResultBased3 = FutureResultBasedN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/specific/io.py000066400000000000000000000051041472312074000216720ustar00rootroot00000000000000from __future__ import annotations from abc import abstractmethod from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from typing_extensions import Never from returns.interfaces import container, equable from returns.primitives.hkt import KindN if TYPE_CHECKING: from returns.io import IO # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _IOLikeType = TypeVar('_IOLikeType', bound='IOLikeN') class IOLikeN(container.ContainerN[_FirstType, _SecondType, _ThirdType]): """ Represents interface for types that looks like fearless ``IO``. This type means that ``IO`` cannot fail. Like random numbers, date, etc. Don't use this type for ``IO`` that can. Instead, use :class:`returns.interfaces.specific.ioresult.IOResultBasedN` type. """ __slots__ = () @abstractmethod def bind_io( self: _IOLikeType, function: Callable[[_FirstType], IO[_UpdatedType]], ) -> KindN[_IOLikeType, _UpdatedType, _SecondType, _ThirdType]: """Allows to apply a wrapped function over a container.""" @classmethod @abstractmethod def from_io( cls: type[_IOLikeType], # noqa: N805 inner_value: IO[_UpdatedType], ) -> KindN[_IOLikeType, _UpdatedType, _SecondType, _ThirdType]: """Unit method to create new containers from successful ``IO``.""" #: Type alias for kinds with one type argument. IOLike1 = IOLikeN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. IOLike2 = IOLikeN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. IOLike3 = IOLikeN[_FirstType, _SecondType, _ThirdType] class IOBasedN( IOLikeN[_FirstType, _SecondType, _ThirdType], equable.Equable, ): """ Represents the base interface for types that do fearless ``IO``. This type means that ``IO`` cannot fail. Like random numbers, date, etc. Don't use this type for ``IO`` that can. Instead, use :class:`returns.interfaces.specific.ioresult.IOResultBasedN` type. This interface also supports direct comparison of two values. While ``IOLikeN`` is different. It can be lazy and cannot be compared. """ __slots__ = () #: Type alias for kinds with one type argument. IOBased1 = IOBasedN[_FirstType, Never, Never] #: Type alias for kinds with two type arguments. IOBased2 = IOBasedN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. IOBased3 = IOBasedN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/specific/ioresult.py000066400000000000000000000063511472312074000231360ustar00rootroot00000000000000""" An interface for types that do ``IO`` and can fail. It is a base interface for both sync and async ``IO`` stacks. """ from __future__ import annotations from abc import abstractmethod from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from typing_extensions import Never from returns.interfaces.specific import io, result from returns.primitives.hkt import KindN if TYPE_CHECKING: from returns.io import IO, IOResult # noqa: WPS433 from returns.result import Result # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') _IOResultLikeType = TypeVar('_IOResultLikeType', bound='IOResultLikeN') class IOResultLikeN( io.IOLikeN[_FirstType, _SecondType, _ThirdType], result.ResultLikeN[_FirstType, _SecondType, _ThirdType], ): """ Base type for types that look like ``IOResult`` but cannot be unwrapped. Like ``FutureResult`` or ``RequiresContextIOResult``. """ __slots__ = () @abstractmethod def bind_ioresult( self: _IOResultLikeType, function: Callable[[_FirstType], IOResult[_UpdatedType, _SecondType]], ) -> KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType]: """Runs ``IOResult`` returning function over a container.""" @abstractmethod def compose_result( self: _IOResultLikeType, function: Callable[ [Result[_FirstType, _SecondType]], KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType], ], ) -> KindN[_IOResultLikeType, _UpdatedType, _SecondType, _ThirdType]: """Allows to compose the underlying ``Result`` with a function.""" @classmethod @abstractmethod def from_ioresult( cls: type[_IOResultLikeType], # noqa: N805 inner_value: IOResult[_ValueType, _ErrorType], ) -> KindN[_IOResultLikeType, _ValueType, _ErrorType, _ThirdType]: """Unit method to create new containers from ``IOResult`` type.""" @classmethod @abstractmethod def from_failed_io( cls: type[_IOResultLikeType], # noqa: N805 inner_value: IO[_ErrorType], ) -> KindN[_IOResultLikeType, _FirstType, _ErrorType, _ThirdType]: """Unit method to create new containers from failed ``IO``.""" #: Type alias for kinds with two type arguments. IOResultLike2 = IOResultLikeN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. IOResultLike3 = IOResultLikeN[_FirstType, _SecondType, _ThirdType] class IOResultBasedN( IOResultLikeN[_FirstType, _SecondType, _ThirdType], io.IOBasedN[_FirstType, _SecondType, _ThirdType], result.UnwrappableResult[ _FirstType, _SecondType, _ThirdType, # Unwraps: 'IO[_FirstType]', 'IO[_SecondType]', ], ): """ Base type for real ``IOResult`` types. Can be unwrapped. """ __slots__ = () #: Type alias for kinds with two type arguments. IOResultBased2 = IOResultBasedN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. IOResultBased3 = IOResultBasedN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/specific/maybe.py000066400000000000000000000121121472312074000223550ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable, Sequence from typing import ClassVar, TypeVar, final from typing_extensions import Never from returns.interfaces import equable, failable, unwrappable from returns.primitives.asserts import assert_equal from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law2, Law3, Lawful, LawSpecDef, law_definition, ) _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _MaybeLikeType = TypeVar('_MaybeLikeType', bound='MaybeLikeN') # New values: _ValueType = TypeVar('_ValueType') # Only used in laws: _NewType1 = TypeVar('_NewType1') @final class _LawSpec(LawSpecDef): """ Maybe laws. We need to be sure that ``.map``, ``.bind``, ``.bind_optional``, and ``.lash`` works correctly for both successful and failed types. """ __slots__ = () @law_definition def map_short_circuit_law( container: 'MaybeLikeN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], _NewType1], ) -> None: """Ensures that you cannot map from failures.""" assert_equal( container.from_optional(None).map(function), container.from_optional(None), ) @law_definition def bind_short_circuit_law( container: 'MaybeLikeN[_FirstType, _SecondType, _ThirdType]', function: Callable[ [_FirstType], KindN['MaybeLikeN', _NewType1, _SecondType, _ThirdType], ], ) -> None: """Ensures that you cannot bind from failures.""" assert_equal( container.from_optional(None).bind(function), container.from_optional(None), ) @law_definition def bind_optional_short_circuit_law( container: 'MaybeLikeN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], _NewType1 | None], ) -> None: """Ensures that you cannot bind from failures.""" assert_equal( container.from_optional(None).bind_optional(function), container.from_optional(None), ) @law_definition def lash_short_circuit_law( raw_value: _FirstType, container: 'MaybeLikeN[_FirstType, _SecondType, _ThirdType]', function: Callable[ [_SecondType], KindN['MaybeLikeN', _FirstType, _NewType1, _ThirdType], ], ) -> None: """Ensures that you cannot lash a success.""" assert_equal( container.from_value(raw_value).lash(function), container.from_value(raw_value), ) @law_definition def unit_structure_law( container: 'MaybeLikeN[_FirstType, _SecondType, _ThirdType]', function: Callable[[_FirstType], None], ) -> None: """Ensures ``None`` is treated specially.""" assert_equal( container.bind_optional(function), container.from_optional(None), ) class MaybeLikeN( failable.SingleFailableN[_FirstType, _SecondType, _ThirdType], Lawful['MaybeLikeN[_FirstType, _SecondType, _ThirdType]'], ): """ Type for values that do look like a ``Maybe``. For example, ``RequiresContextMaybe`` should be created from this interface. Cannot be unwrapped or compared. """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law2(_LawSpec.map_short_circuit_law), Law2(_LawSpec.bind_short_circuit_law), Law2(_LawSpec.bind_optional_short_circuit_law), Law3(_LawSpec.lash_short_circuit_law), Law2(_LawSpec.unit_structure_law), ) @abstractmethod def bind_optional( self: _MaybeLikeType, function: Callable[[_FirstType], _UpdatedType | None], ) -> KindN[_MaybeLikeType, _UpdatedType, _SecondType, _ThirdType]: """Binds a function that returns ``Optional`` values.""" @classmethod @abstractmethod def from_optional( cls: type[_MaybeLikeType], # noqa: N805 inner_value: _ValueType | None, ) -> KindN[_MaybeLikeType, _ValueType, _SecondType, _ThirdType]: """Unit method to create containers from ``Optional`` value.""" #: Type alias for kinds with two type arguments. MaybeLike2 = MaybeLikeN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. MaybeLike3 = MaybeLikeN[_FirstType, _SecondType, _ThirdType] class MaybeBasedN( MaybeLikeN[_FirstType, _SecondType, _ThirdType], unwrappable.Unwrappable[_FirstType, None], equable.Equable, ): """ Concrete interface for ``Maybe`` type. Can be unwrapped and compared. """ __slots__ = () @abstractmethod def or_else_call( self, function: Callable[[], _ValueType], ) -> _FirstType | _ValueType: """Calls a function in case there nothing to unwrap.""" #: Type alias for kinds with two type arguments. MaybeBased2 = MaybeBasedN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. MaybeBased3 = MaybeBasedN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/specific/reader.py000066400000000000000000000170641472312074000225350ustar00rootroot00000000000000""" This module is special. ``Reader`` does not produce ``ReaderLikeN`` interface as other containers. Because ``Reader`` can be used with two or three type arguments: - ``RequiresContext[value, env]`` - ``RequiresContextResult[value, error, env]`` Because the second type argument changes its meaning based on the used ``KindN`` instance, we need to have two separate interfaces for two separate use-cases: - ``ReaderLike2`` is used for types where the second type argument is ``env`` - ``ReaderLike3`` is used for types where the third type argument is ``env`` We also have two methods and two poinfree helpers for ``bind_context`` composition: one for each interface. Furthermore, ``Reader`` cannot have ``ReaderLike1`` type, because we need both ``value`` and ``env`` types at all cases. See also: - https://github.com/dry-python/returns/issues/485 """ from __future__ import annotations from abc import abstractmethod from collections.abc import Callable, Sequence from typing import TYPE_CHECKING, ClassVar, Generic, TypeVar, final from returns.interfaces.container import Container2, Container3 from returns.primitives.hkt import Kind2, Kind3 from returns.primitives.laws import ( Law, Law2, Lawful, LawSpecDef, law_definition, ) if TYPE_CHECKING: from returns.context import NoDeps, RequiresContext # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') _EnvType = TypeVar('_EnvType') _ReaderLike2Type = TypeVar('_ReaderLike2Type', bound='ReaderLike2') _ReaderLike3Type = TypeVar('_ReaderLike3Type', bound='ReaderLike3') class Contextable(Generic[_ValueType, _EnvType]): """ Special type we use as a base one for all callble ``Reader`` instances. It only has a single method. And is a base type for every single one of them. But, each ``Reader`` defines the return type differently. For example: - ``Reader`` has just ``_ReturnType`` - ``ReaderResult`` has ``Result[_FirstType, _SecondType]`` - ``ReaderIOResult`` has ``IOResult[_FirstType, _SecondType]`` And so on. """ __slots__ = () @abstractmethod def __call__(self, deps: _EnvType) -> _ValueType: """Receives one parameter, returns a value. As simple as that.""" class ReaderLike2(Container2[_FirstType, _SecondType]): """ Reader interface for ``Kind2`` based types. It has two type arguments and treats the second type argument as env type. """ __slots__ = () @property @abstractmethod def no_args(self: _ReaderLike2Type) -> NoDeps: """Is required to call ``Reader`` with no explicit arguments.""" @abstractmethod def bind_context( self: _ReaderLike2Type, function: Callable[ [_FirstType], RequiresContext[_UpdatedType, _SecondType], ], ) -> Kind2[_ReaderLike2Type, _UpdatedType, _SecondType]: """Allows to apply a wrapped function over a ``Reader`` container.""" @abstractmethod def modify_env( self: _ReaderLike2Type, function: Callable[[_UpdatedType], _SecondType], ) -> Kind2[_ReaderLike2Type, _FirstType, _UpdatedType]: """Transforms the environment before calling the container.""" @classmethod @abstractmethod def ask( cls: type[_ReaderLike2Type], ) -> Kind2[_ReaderLike2Type, _SecondType, _SecondType]: """Returns the dependencies inside the container.""" @classmethod @abstractmethod def from_context( cls: type[_ReaderLike2Type], # noqa: N805 inner_value: RequiresContext[_ValueType, _EnvType], ) -> Kind2[_ReaderLike2Type, _ValueType, _EnvType]: """Unit method to create new containers from successful ``Reader``.""" class CallableReader2( ReaderLike2[_FirstType, _SecondType], Contextable[_ValueType, _EnvType], ): """ Intermediate interface for ``ReaderLike2`` + ``__call__`` method. Has 4 type variables to type ``Reader`` and ``__call__`` independently. Since, we don't have any other fancy ways of doing it. Should not be used directly other than defining your own ``Reader`` interfaces. """ __slots__ = () class ReaderLike3(Container3[_FirstType, _SecondType, _ThirdType]): """ Reader interface for ``Kind3`` based types. It has three type arguments and treats the third type argument as env type. The second type argument is not used here. """ __slots__ = () @property @abstractmethod def no_args(self: _ReaderLike3Type) -> NoDeps: """Is required to call ``Reader`` with no explicit arguments.""" @abstractmethod def bind_context( self: _ReaderLike3Type, function: Callable[ [_FirstType], RequiresContext[_UpdatedType, _ThirdType], ], ) -> Kind3[_ReaderLike3Type, _UpdatedType, _SecondType, _ThirdType]: """Allows to apply a wrapped function over a ``Reader`` container.""" @abstractmethod def modify_env( self: _ReaderLike3Type, function: Callable[[_UpdatedType], _ThirdType], ) -> Kind3[_ReaderLike3Type, _FirstType, _SecondType, _UpdatedType]: """Transforms the environment before calling the container.""" @classmethod @abstractmethod def ask( cls: type[_ReaderLike3Type], ) -> Kind3[_ReaderLike3Type, _ThirdType, _SecondType, _ThirdType]: """Returns the dependencies inside the container.""" @classmethod @abstractmethod def from_context( cls: type[_ReaderLike3Type], # noqa: N805 inner_value: RequiresContext[_ValueType, _EnvType], ) -> Kind3[_ReaderLike3Type, _ValueType, _SecondType, _EnvType]: """Unit method to create new containers from successful ``Reader``.""" class CallableReader3( ReaderLike3[_FirstType, _SecondType, _ThirdType], Contextable[_ValueType, _EnvType], ): """ Intermediate interface for ``ReaderLike3`` + ``__call__`` method. Has 5 type variables to type ``Reader`` and ``__call__`` independently. Since, we don't have any other fancy ways of doing it. Should not be used directly other than defining your own ``Reader`` interfaces. """ __slots__ = () @final class _LawSpec(LawSpecDef): """ Concrete laws for ``ReaderBased2``. See: https://github.com/haskell/mtl/pull/61/files """ __slots__ = () @law_definition def purity_law( container: ReaderBased2[_FirstType, _SecondType], env: _SecondType, ) -> None: """Calling a ``Reader`` twice has the same result with the same env.""" assert container(env) == container(env) @law_definition def asking_law( container: ReaderBased2[_FirstType, _SecondType], env: _SecondType, ) -> None: """Asking for an env, always returns the env.""" assert container.ask().__call__( # noqa: WPS609 env, ) == container.from_value(env).__call__(env) # noqa: WPS609 class ReaderBased2( CallableReader2[ _FirstType, _SecondType, # Used for call typing: _FirstType, _SecondType, ], Lawful['ReaderBased2[_FirstType, _SecondType]'], ): """ This interface is very specific to our ``Reader`` type. The only thing that differs from ``ReaderLike2`` is that we know the specific types for its ``__call__`` method. """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law2(_LawSpec.purity_law), Law2(_LawSpec.asking_law), ) returns-0.24.0/returns/interfaces/specific/reader_future_result.py000066400000000000000000000101541472312074000255160ustar00rootroot00000000000000from __future__ import annotations from abc import abstractmethod from collections.abc import Awaitable, Callable, Sequence from typing import TYPE_CHECKING, ClassVar, TypeVar, final from returns.interfaces.specific import future_result, reader, reader_ioresult from returns.primitives.asserts import assert_equal from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law2, Lawful, LawSpecDef, law_definition, ) if TYPE_CHECKING: from returns.context import ReaderFutureResult # noqa: WPS433 from returns.future import FutureResult # noqa: F401, WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') _EnvType = TypeVar('_EnvType') _ReaderFutureResultLikeType = TypeVar( '_ReaderFutureResultLikeType', bound='ReaderFutureResultLikeN', ) class ReaderFutureResultLikeN( reader_ioresult.ReaderIOResultLikeN[_FirstType, _SecondType, _ThirdType], future_result.FutureResultLikeN[_FirstType, _SecondType, _ThirdType], ): """ Interface for all types that do look like ``ReaderFutureResult`` instance. Cannot be called. """ __slots__ = () @abstractmethod def bind_context_future_result( self: _ReaderFutureResultLikeType, function: Callable[ [_FirstType], ReaderFutureResult[_UpdatedType, _SecondType, _ThirdType], ], ) -> KindN[ _ReaderFutureResultLikeType, _UpdatedType, _SecondType, _ThirdType, ]: """Bind a ``ReaderFutureResult`` returning function over a container.""" @abstractmethod def bind_async_context_future_result( self: _ReaderFutureResultLikeType, function: Callable[ [_FirstType], Awaitable[ ReaderFutureResult[_UpdatedType, _SecondType, _ThirdType], ], ], ) -> KindN[ _ReaderFutureResultLikeType, _UpdatedType, _SecondType, _ThirdType, ]: """Bind async ``ReaderFutureResult`` function.""" @classmethod @abstractmethod def from_future_result_context( cls: type[_ReaderFutureResultLikeType], # noqa: N805 inner_value: ReaderFutureResult[_ValueType, _ErrorType, _EnvType], ) -> KindN[_ReaderFutureResultLikeType, _ValueType, _ErrorType, _EnvType]: """Unit method to create new containers from ``ReaderFutureResult``.""" #: Type alias for kinds with three type arguments. ReaderFutureResultLike3 = ReaderFutureResultLikeN[ _FirstType, _SecondType, _ThirdType, ] @final class _LawSpec(LawSpecDef): """ Concrete laws for ``ReaderFutureResultBasedN``. See: https://github.com/haskell/mtl/pull/61/files """ __slots__ = () @law_definition def asking_law( container: ReaderFutureResultBasedN[_FirstType, _SecondType, _ThirdType], env: _ThirdType, ) -> None: """Asking for an env, always returns the env.""" assert_equal( container.ask().__call__(env), # noqa: WPS609 container.from_value(env).__call__(env), # noqa: WPS609 ) class ReaderFutureResultBasedN( ReaderFutureResultLikeN[_FirstType, _SecondType, _ThirdType], reader.CallableReader3[ _FirstType, _SecondType, _ThirdType, # Calls: 'FutureResult[_FirstType, _SecondType]', _ThirdType, ], Lawful['ReaderFutureResultBasedN[_FirstType, _SecondType, _ThirdType]'], ): """ This interface is very specific to our ``ReaderFutureResult`` type. The only thing that differs from ``ReaderFutureResultLikeN`` is that we know the specific types for its ``__call__`` method. In this case the return type of ``__call__`` is ``FutureResult``. """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law2(_LawSpec.asking_law), ) #: Type alias for kinds with three type arguments. ReaderFutureResultBased3 = ReaderFutureResultBasedN[ _FirstType, _SecondType, _ThirdType, ] returns-0.24.0/returns/interfaces/specific/reader_ioresult.py000066400000000000000000000066001472312074000244550ustar00rootroot00000000000000from __future__ import annotations from abc import abstractmethod from collections.abc import Callable, Sequence from typing import TYPE_CHECKING, ClassVar, TypeVar, final from returns.interfaces.specific import ioresult, reader, reader_result from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law2, Lawful, LawSpecDef, law_definition, ) if TYPE_CHECKING: from returns.context import ReaderIOResult # noqa: WPS433 from returns.io import IOResult # noqa: F401, WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') _EnvType = TypeVar('_EnvType') _ReaderIOResultLikeType = TypeVar( '_ReaderIOResultLikeType', bound='ReaderIOResultLikeN', ) class ReaderIOResultLikeN( reader_result.ReaderResultLikeN[_FirstType, _SecondType, _ThirdType], ioresult.IOResultLikeN[_FirstType, _SecondType, _ThirdType], ): """ Base interface for all types that do look like ``ReaderIOResult`` instance. Cannot be called. """ __slots__ = () @abstractmethod def bind_context_ioresult( self: _ReaderIOResultLikeType, function: Callable[ [_FirstType], ReaderIOResult[_UpdatedType, _SecondType, _ThirdType], ], ) -> KindN[_ReaderIOResultLikeType, _UpdatedType, _SecondType, _ThirdType]: """Binds a ``ReaderIOResult`` returning function over a container.""" @classmethod @abstractmethod def from_ioresult_context( cls: type[_ReaderIOResultLikeType], # noqa: N805 inner_value: ReaderIOResult[_ValueType, _ErrorType, _EnvType], ) -> KindN[_ReaderIOResultLikeType, _ValueType, _ErrorType, _EnvType]: """Unit method to create new containers from ``ReaderIOResult``.""" #: Type alias for kinds with three type arguments. ReaderIOResultLike3 = ReaderIOResultLikeN[_FirstType, _SecondType, _ThirdType] @final class _LawSpec(LawSpecDef): """ Concrete laws for ``ReaderIOResultBasedN``. See: https://github.com/haskell/mtl/pull/61/files """ __slots__ = () @law_definition def asking_law( container: ReaderIOResultBasedN[_FirstType, _SecondType, _ThirdType], env: _ThirdType, ) -> None: """Asking for an env, always returns the env.""" assert container.ask().__call__( # noqa: WPS609 env, ) == container.from_value(env).__call__(env) # noqa: WPS609 class ReaderIOResultBasedN( ReaderIOResultLikeN[_FirstType, _SecondType, _ThirdType], reader.CallableReader3[ _FirstType, _SecondType, _ThirdType, # Calls: 'IOResult[_FirstType, _SecondType]', _ThirdType, ], Lawful['ReaderIOResultBasedN[_FirstType, _SecondType, _ThirdType]'], ): """ This interface is very specific to our ``ReaderIOResult`` type. The only thing that differs from ``ReaderIOResultLikeN`` is that we know the specific types for its ``__call__`` method. In this case the return type of ``__call__`` is ``IOResult``. """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law2(_LawSpec.asking_law), ) #: Type alias for kinds with three type arguments. ReaderIOResultBased3 = ReaderIOResultBasedN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/specific/reader_result.py000066400000000000000000000076571472312074000241420ustar00rootroot00000000000000from __future__ import annotations from abc import abstractmethod from collections.abc import Callable, Sequence from typing import TYPE_CHECKING, ClassVar, TypeVar, final from returns.interfaces.specific import reader, result from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law2, Lawful, LawSpecDef, law_definition, ) if TYPE_CHECKING: from returns.context import Reader, ReaderResult # noqa: WPS433 from returns.result import Result # noqa: F401, WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') _EnvType = TypeVar('_EnvType') _ReaderResultLikeType = TypeVar( '_ReaderResultLikeType', bound='ReaderResultLikeN', ) class ReaderResultLikeN( reader.ReaderLike3[_FirstType, _SecondType, _ThirdType], result.ResultLikeN[_FirstType, _SecondType, _ThirdType], ): """ Base interface for all types that do look like ``ReaderResult`` instance. Cannot be called. """ __slots__ = () @abstractmethod def bind_context_result( self: _ReaderResultLikeType, function: Callable[ [_FirstType], ReaderResult[_UpdatedType, _SecondType, _ThirdType], ], ) -> KindN[_ReaderResultLikeType, _UpdatedType, _SecondType, _ThirdType]: """Binds a ``ReaderResult`` returning function over a container.""" @classmethod @abstractmethod def from_failed_context( cls: type[_ReaderResultLikeType], # noqa: N805 inner_value: Reader[_ErrorType, _EnvType], ) -> KindN[_ReaderResultLikeType, _FirstType, _ErrorType, _EnvType]: """Unit method to create new containers from failed ``Reader``.""" @classmethod @abstractmethod def from_result_context( cls: type[_ReaderResultLikeType], # noqa: N805 inner_value: ReaderResult[_ValueType, _ErrorType, _EnvType], ) -> KindN[_ReaderResultLikeType, _ValueType, _ErrorType, _EnvType]: """Unit method to create new containers from ``ReaderResult``.""" #: Type alias for kinds with three type arguments. ReaderResultLike3 = ReaderResultLikeN[_FirstType, _SecondType, _ThirdType] @final class _LawSpec(LawSpecDef): """ Concrete laws for ``ReaderResulBasedN``. See: https://github.com/haskell/mtl/pull/61/files """ __slots__ = () @law_definition def purity_law( container: ReaderResultBasedN[_FirstType, _SecondType, _ThirdType], env: _ThirdType, ) -> None: """Calling a ``Reader`` twice has the same result with the same env.""" assert container(env) == container(env) @law_definition def asking_law( container: ReaderResultBasedN[_FirstType, _SecondType, _ThirdType], env: _ThirdType, ) -> None: """Asking for an env, always returns the env.""" assert container.ask().__call__( # noqa: WPS609 env, ) == container.from_value(env).__call__(env) # noqa: WPS609 class ReaderResultBasedN( ReaderResultLikeN[_FirstType, _SecondType, _ThirdType], reader.CallableReader3[ _FirstType, _SecondType, _ThirdType, # Calls: 'Result[_FirstType, _SecondType]', _ThirdType, ], Lawful['ReaderResultBasedN[_FirstType, _SecondType, _ThirdType]'], ): """ This interface is very specific to our ``ReaderResult`` type. The only thing that differs from ``ReaderResultLikeN`` is that we know the specific types for its ``__call__`` method. In this case the return type of ``__call__`` is ``Result``. """ __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law2(_LawSpec.purity_law), Law2(_LawSpec.asking_law), ) #: Type alias for kinds with three type arguments. ReaderResultBased3 = ReaderResultBasedN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/specific/result.py000066400000000000000000000055671472312074000226160ustar00rootroot00000000000000""" An interface that represents a pure computation result. For impure result see :class:`returns.interfaces.specific.ioresult.IOResultLikeN` type. """ from __future__ import annotations from abc import abstractmethod from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from typing_extensions import Never from returns.interfaces import equable, failable, unwrappable from returns.primitives.hkt import KindN if TYPE_CHECKING: from returns.result import Result # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ResultLikeType = TypeVar('_ResultLikeType', bound='ResultLikeN') # New values: _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') # Unwrappable: _FirstUnwrappableType = TypeVar('_FirstUnwrappableType') _SecondUnwrappableType = TypeVar('_SecondUnwrappableType') class ResultLikeN( failable.DiverseFailableN[_FirstType, _SecondType, _ThirdType], ): """ Base types for types that looks like ``Result`` but cannot be unwrapped. Like ``RequiresContextResult`` or ``FutureResult``. """ __slots__ = () @abstractmethod def bind_result( self: _ResultLikeType, function: Callable[[_FirstType], Result[_UpdatedType, _SecondType]], ) -> KindN[_ResultLikeType, _UpdatedType, _SecondType, _ThirdType]: """Runs ``Result`` returning function over a container.""" @classmethod @abstractmethod def from_result( cls: type[_ResultLikeType], # noqa: N805 inner_value: Result[_ValueType, _ErrorType], ) -> KindN[_ResultLikeType, _ValueType, _ErrorType, _ThirdType]: """Unit method to create new containers from any raw value.""" #: Type alias for kinds with two type arguments. ResultLike2 = ResultLikeN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. ResultLike3 = ResultLikeN[_FirstType, _SecondType, _ThirdType] class UnwrappableResult( ResultLikeN[_FirstType, _SecondType, _ThirdType], unwrappable.Unwrappable[_FirstUnwrappableType, _SecondUnwrappableType], equable.Equable, ): """ Intermediate type with 5 type arguments that represents unwrappable result. It is a raw type and should not be used directly. Use ``ResultBasedN`` and ``IOResultBasedN`` instead. """ __slots__ = () class ResultBasedN( UnwrappableResult[ _FirstType, _SecondType, _ThirdType, # Unwraps: _FirstType, _SecondType, ], ): """ Base type for real ``Result`` types. Can be unwrapped. """ __slots__ = () #: Type alias for kinds with two type arguments. ResultBased2 = ResultBasedN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. ResultBased3 = ResultBasedN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/swappable.py000066400000000000000000000034101472312074000214520ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Sequence from typing import ClassVar, TypeVar, final from typing_extensions import Never from returns.interfaces import bimappable from returns.primitives.asserts import assert_equal from returns.primitives.hkt import KindN from returns.primitives.laws import ( Law, Law1, Lawful, LawSpecDef, law_definition, ) _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _SwappableType = TypeVar('_SwappableType', bound='SwappableN') @final class _LawSpec(LawSpecDef): """Laws for :class:`~SwappableN` type.""" __slots__ = () @law_definition def double_swap_law( container: 'SwappableN[_FirstType, _SecondType, _ThirdType]', ) -> None: """ Swapping container twice. It ensure that we get the initial value back. In other words, swapping twice does nothing. """ assert_equal( container, container.swap().swap(), ) class SwappableN( bimappable.BiMappableN[_FirstType, _SecondType, _ThirdType], Lawful['SwappableN[_FirstType, _SecondType, _ThirdType]'], ): """Interface that allows swapping first and second type values.""" __slots__ = () _laws: ClassVar[Sequence[Law]] = ( Law1(_LawSpec.double_swap_law), ) @abstractmethod def swap( self: _SwappableType, ) -> KindN[_SwappableType, _SecondType, _FirstType, _ThirdType]: """Swaps first and second types in ``SwappableN``.""" #: Type alias for kinds with two type arguments. Swappable2 = SwappableN[_FirstType, _SecondType, Never] #: Type alias for kinds with three type arguments. Swappable3 = SwappableN[_FirstType, _SecondType, _ThirdType] returns-0.24.0/returns/interfaces/unwrappable.py000066400000000000000000000031021472312074000220120ustar00rootroot00000000000000from abc import abstractmethod from typing import Generic, TypeVar _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _UnwrappableType = TypeVar('_UnwrappableType', bound='Unwrappable') class Unwrappable(Generic[_FirstType, _SecondType]): """ Represents containers that can unwrap and return its wrapped value. There are no aliases or ``UnwrappableN`` for ``Unwrappable`` interface. Because it always uses two and just two types. Not all types can be ``Unwrappable`` because we do require to raise ``UnwrapFailedError`` if unwrap is not possible. """ __slots__ = () @abstractmethod def unwrap(self: _UnwrappableType) -> _FirstType: """ Custom magic method to unwrap inner value from container. Should be redefined for ones that actually have values. And for ones that raise an exception for no values. .. note:: As a part of the contract, failed ``unwrap`` calls must raise :class:`returns.primitives.exceptions.UnwrapFailedError` exception. This method is the opposite of :meth:`~Unwrapable.failure`. """ @abstractmethod def failure(self: _UnwrappableType) -> _SecondType: """ Custom magic method to unwrap inner value from the failed container. .. note:: As a part of the contract, failed ``failure`` calls must raise :class:`returns.primitives.exceptions.UnwrapFailedError` exception. This method is the opposite of :meth:`~Unwrapable.unwrap`. """ returns-0.24.0/returns/io.py000066400000000000000000000721731472312074000157740ustar00rootroot00000000000000from abc import ABCMeta from collections.abc import Callable, Generator, Iterator from functools import wraps from inspect import FrameInfo from typing import TYPE_CHECKING, Any, TypeAlias, TypeVar, final, overload from typing_extensions import ParamSpec from returns.interfaces.specific import io, ioresult from returns.primitives.container import BaseContainer, container_equality from returns.primitives.exceptions import UnwrapFailedError from returns.primitives.hkt import ( Kind1, Kind2, SupportsKind1, SupportsKind2, dekind, ) from returns.result import Failure, Result, Success _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _FuncParams = ParamSpec('_FuncParams') # Result related: _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') class IO( # type: ignore[type-var] BaseContainer, SupportsKind1['IO', _ValueType], io.IOLike1[_ValueType], ): """ Explicit container for impure function results. We also sometimes call it "marker" since once it is marked, it cannot be ever unmarked. There's no way to directly get its internal value. Note that ``IO`` represents a computation that never fails. Examples of such computations are: - read / write to localStorage - get the current time - write to the console - get a random number Use ``IOResult[...]`` for operations that might fail. Like DB access or network operations. See also: - https://dev.to/gcanti/getting-started-with-fp-ts-io-36p6 - https://gist.github.com/chris-taylor/4745921 """ __slots__ = () _inner_value: _ValueType #: Typesafe equality comparison with other `Result` objects. equals = container_equality def __init__(self, inner_value: _ValueType) -> None: """ Public constructor for this type. Also required for typing. .. code:: python >>> from returns.io import IO >>> assert str(IO(1)) == '' """ super().__init__(inner_value) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> 'IO[_NewValueType]': """ Applies function to the inner value. Applies 'function' to the contents of the IO instance and returns a new IO object containing the result. 'function' should accept a single "normal" (non-container) argument and return a non-container result. .. code:: python >>> def mappable(string: str) -> str: ... return string + 'b' >>> assert IO('a').map(mappable) == IO('ab') """ return IO(function(self._inner_value)) def apply( self, container: Kind1['IO', Callable[[_ValueType], _NewValueType]], ) -> 'IO[_NewValueType]': """ Calls a wrapped function in a container on this container. .. code:: python >>> from returns.io import IO >>> assert IO('a').apply(IO(lambda inner: inner + 'b')) == IO('ab') Or more complex example that shows how we can work with regular functions and multiple ``IO`` arguments: .. code:: python >>> from returns.curry import curry >>> @curry ... def appliable(first: str, second: str) -> str: ... return first + second >>> assert IO('b').apply(IO('a').apply(IO(appliable))) == IO('ab') """ return self.map(dekind(container)._inner_value) # noqa: WPS437 def bind( self, function: Callable[[_ValueType], Kind1['IO', _NewValueType]], ) -> 'IO[_NewValueType]': """ Applies 'function' to the result of a previous calculation. 'function' should accept a single "normal" (non-container) argument and return ``IO`` type object. .. code:: python >>> def bindable(string: str) -> IO[str]: ... return IO(string + 'b') >>> assert IO('a').bind(bindable) == IO('ab') """ return dekind(function(self._inner_value)) #: Alias for `bind` method. Part of the `IOLikeN` interface. bind_io = bind def __iter__(self) -> Iterator[_ValueType]: """API for :ref:`do-notation`.""" yield self._inner_value @classmethod def do( cls, expr: Generator[_NewValueType, None, None], ) -> 'IO[_NewValueType]': """ Allows working with unwrapped values of containers in a safe way. .. code:: python >>> from returns.io import IO >>> assert IO.do( ... first + second ... for first in IO(2) ... for second in IO(3) ... ) == IO(5) See :ref:`do-notation` to learn more. """ return IO(next(expr)) @classmethod def from_value(cls, inner_value: _NewValueType) -> 'IO[_NewValueType]': """ Unit function to construct new ``IO`` values. Is the same as regular constructor: .. code:: python >>> from returns.io import IO >>> assert IO(1) == IO.from_value(1) Part of the :class:`returns.interfaces.applicative.ApplicativeN` interface. """ return IO(inner_value) @classmethod def from_io(cls, inner_value: 'IO[_NewValueType]') -> 'IO[_NewValueType]': """ Unit function to construct new ``IO`` values from existing ``IO``. .. code:: python >>> from returns.io import IO >>> assert IO(1) == IO.from_io(IO(1)) Part of the :class:`returns.interfaces.specific.IO.IOLikeN` interface. """ return inner_value @classmethod def from_ioresult( cls, inner_value: 'IOResult[_NewValueType, _NewErrorType]', ) -> 'IO[Result[_NewValueType, _NewErrorType]]': """ Converts ``IOResult[a, b]`` back to ``IO[Result[a, b]]``. Can be really helpful for composition. .. code:: python >>> from returns.io import IO, IOSuccess >>> from returns.result import Success >>> assert IO.from_ioresult(IOSuccess(1)) == IO(Success(1)) Is the reverse of :meth:`returns.io.IOResult.from_typecast`. """ return IO(inner_value._inner_value) # noqa: WPS437 # Helper functions: def impure( function: Callable[_FuncParams, _NewValueType], ) -> Callable[_FuncParams, IO[_NewValueType]]: """ Decorator to mark function that it returns :class:`~IO` container. If you need to mark ``async`` function as impure, use :func:`returns.future.future` instead. This decorator only works with sync functions. Example: .. code:: python >>> from returns.io import IO, impure >>> @impure ... def function(arg: int) -> int: ... return arg + 1 # this action is pure, just an example ... >>> assert function(1) == IO(2) """ @wraps(function) def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> IO[_NewValueType]: return IO(function(*args, **kwargs)) return decorator # IO and Result: class IOResult( # type: ignore[type-var] BaseContainer, SupportsKind2['IOResult', _ValueType, _ErrorType], ioresult.IOResultBased2[_ValueType, _ErrorType], metaclass=ABCMeta, ): """ Explicit container for impure function results that might fail. .. rubric:: Definition This type is similar to :class:`returns.result.Result`. This basically a more useful version of ``IO[Result[a, b]]``. Use this type for ``IO`` computations that might fail. Examples of ``IO`` computations that might fail are: - access database - access network - access filesystem Use :class:`~IO` for operations that do ``IO`` but do not fail. Note, that even methods like :meth:`~IOResult.unwrap`` and :meth:`~IOResult.value_or` return values wrapped in ``IO``. ``IOResult`` is a complex compound value that consists of: - raw value - ``Result`` - ``IO`` This is why it has so many helper and factory methods: - You can construct ``IOResult`` from raw values with :func:`~IOSuccess` and :func:`~IOFailure` public type constructors - You can construct ``IOResult`` from ``IO`` values with :meth:`~IOResult.from_failed_io` and :meth:`IOResult.from_io` - You can construct ``IOResult`` from ``Result`` values with :meth:`~IOResult.from_result` We also have a lot of utility methods for better function composition like: - :meth:`~IOResult.bind_result` to work with functions which return ``Result`` - :meth:`~IOResult.from_typecast` to work with ``IO[Result[...]]`` values See also: https://github.com/gcanti/fp-ts/blob/master/docs/modules/IOEither.ts.md .. rubric:: Implementation This class contains all the methods that can be delegated to ``Result``. But, some methods are not implemented which means that we have to use special :class:`~_IOSuccess` and :class:`~_IOFailure` implementation details to correctly handle these callbacks. Do not rely on them! Use public functions and types instead. """ __slots__ = () _inner_value: Result[_ValueType, _ErrorType] __match_args__ = ('_inner_value',) #: Typesafe equality comparison with other `IOResult` objects. equals = container_equality def __init__(self, inner_value: Result[_ValueType, _ErrorType]) -> None: """ Private type constructor. Use :func:`~IOSuccess` and :func:`~IOFailure` instead. Or :meth:`~IOResult.from_result` factory. """ super().__init__(inner_value) def __repr__(self) -> str: """ Custom ``str`` representation for better readability. .. code:: python >>> from returns.io import IOSuccess, IOFailure >>> assert str(IOSuccess(1)) == '>' >>> assert repr(IOSuccess(1)) == '>' >>> str(IOFailure(ValueError('wrong!'))) '>' """ return ''.format(self._inner_value) @property def trace(self) -> list[FrameInfo] | None: """Returns a stack trace when :func:`~IOFailure` was called.""" return self._inner_value.trace def swap(self) -> 'IOResult[_ErrorType, _ValueType]': """ Swaps value and error types. So, values become errors and errors become values. It is useful when you have to work with errors a lot. And since we have a lot of ``.bind_`` related methods and only a single ``.lash`` - it is easier to work with values. .. code:: python >>> from returns.io import IOSuccess, IOFailure >>> assert IOSuccess(1).swap() == IOFailure(1) >>> assert IOFailure(1).swap() == IOSuccess(1) """ return self.from_result(self._inner_value.swap()) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> 'IOResult[_NewValueType, _ErrorType]': """ Composes successful container with a pure function. .. code:: python >>> from returns.io import IOSuccess >>> assert IOSuccess(1).map(lambda num: num + 1) == IOSuccess(2) """ return self.from_result(self._inner_value.map(function)) def apply( self, container: Kind2[ 'IOResult', Callable[[_ValueType], _NewValueType], _ErrorType, ], ) -> 'IOResult[_NewValueType, _ErrorType]': """ Calls a wrapped function in a container on this container. .. code:: python >>> from returns.io import IOSuccess, IOFailure >>> def appliable(first: str) -> str: ... return first + 'b' >>> assert IOSuccess('a').apply( ... IOSuccess(appliable), ... ) == IOSuccess('ab') >>> assert IOFailure('a').apply( ... IOSuccess(appliable), ... ) == IOFailure('a') >>> assert IOSuccess('a').apply(IOFailure(1)) == IOFailure(1) >>> assert IOFailure('a').apply(IOFailure('b')) == IOFailure('a') """ if isinstance(self, IOFailure): return self if isinstance(container, IOSuccess): return self.from_result( self._inner_value.map( container.unwrap()._inner_value, # noqa: WPS437 ), ) return container # type: ignore def bind( self, function: Callable[ [_ValueType], Kind2['IOResult', _NewValueType, _ErrorType], ], ) -> 'IOResult[_NewValueType, _ErrorType]': """ Composes successful container with a function that returns a container. .. code:: python >>> from returns.io import IOResult, IOFailure, IOSuccess >>> def bindable(string: str) -> IOResult[str, str]: ... if len(string) > 1: ... return IOSuccess(string + 'b') ... return IOFailure(string + 'c') >>> assert IOSuccess('aa').bind(bindable) == IOSuccess('aab') >>> assert IOSuccess('a').bind(bindable) == IOFailure('ac') >>> assert IOFailure('a').bind(bindable) == IOFailure('a') """ #: Alias for `bind_ioresult` method. Part of the `IOResultBasedN` interface. bind_ioresult = bind def bind_result( self, function: Callable[ [_ValueType], Result[_NewValueType, _ErrorType], ], ) -> 'IOResult[_NewValueType, _ErrorType]': """ Composes successful container with a function that returns a container. Similar to :meth:`~IOResult.bind`, but works with containers that return :class:`returns.result.Result` instead of :class:`~IOResult`. .. code:: python >>> from returns.io import IOFailure, IOSuccess >>> from returns.result import Result, Success >>> def bindable(string: str) -> Result[str, str]: ... if len(string) > 1: ... return Success(string + 'b') ... return Failure(string + 'c') >>> assert IOSuccess('aa').bind_result(bindable) == IOSuccess('aab') >>> assert IOSuccess('a').bind_result(bindable) == IOFailure('ac') >>> assert IOFailure('a').bind_result(bindable) == IOFailure('a') """ def bind_io( self, function: Callable[[_ValueType], IO[_NewValueType]], ) -> 'IOResult[_NewValueType, _ErrorType]': """ Composes successful container with a function that returns a container. Similar to :meth:`~IOResult.bind`, but works with containers that return :class:`returns.io.IO` instead of :class:`~IOResult`. .. code:: python >>> from returns.io import IO, IOFailure, IOSuccess >>> def bindable(string: str) -> IO[str]: ... return IO(string + 'z') >>> assert IOSuccess('a').bind_io(bindable) == IOSuccess('az') >>> assert IOFailure('a').bind_io(bindable) == IOFailure('a') """ def alt( self, function: Callable[[_ErrorType], _NewErrorType], ) -> 'IOResult[_ValueType, _NewErrorType]': """ Composes failed container with a pure function to modify failure. .. code:: python >>> from returns.io import IOFailure >>> assert IOFailure(1).alt(float) == IOFailure(1.0) """ return self.from_result(self._inner_value.alt(function)) def lash( self, function: Callable[ [_ErrorType], Kind2['IOResult', _ValueType, _NewErrorType], ], ) -> 'IOResult[_ValueType, _NewErrorType]': """ Composes failed container with a function that returns a container. .. code:: python >>> from returns.io import IOFailure, IOSuccess, IOResult >>> def lashable(state: str) -> IOResult[int, str]: ... if len(state) > 1: ... return IOSuccess(len(state)) ... return IOFailure('oops') >>> assert IOFailure('a').lash(lashable) == IOFailure('oops') >>> assert IOFailure('abc').lash(lashable) == IOSuccess(3) >>> assert IOSuccess('a').lash(lashable) == IOSuccess('a') """ def value_or( self, default_value: _NewValueType, ) -> IO[_ValueType | _NewValueType]: """ Get value from successful container or default value from failed one. .. code:: python >>> from returns.io import IO, IOFailure, IOSuccess >>> assert IOSuccess(1).value_or(None) == IO(1) >>> assert IOFailure(1).value_or(None) == IO(None) """ return IO(self._inner_value.value_or(default_value)) def unwrap(self) -> IO[_ValueType]: """ Get value from successful container or raise exception for failed one. .. code:: pycon :force: >>> from returns.io import IO, IOFailure, IOSuccess >>> assert IOSuccess(1).unwrap() == IO(1) >>> IOFailure(1).unwrap() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError """ # noqa: RST307 return IO(self._inner_value.unwrap()) def failure(self) -> IO[_ErrorType]: """ Get failed value from failed container or raise exception from success. .. code:: pycon :force: >>> from returns.io import IO, IOFailure, IOSuccess >>> assert IOFailure(1).failure() == IO(1) >>> IOSuccess(1).failure() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError """ # noqa: RST307 return IO(self._inner_value.failure()) def compose_result( self, function: Callable[ [Result[_ValueType, _ErrorType]], Kind2['IOResult', _NewValueType, _ErrorType], ], ) -> 'IOResult[_NewValueType, _ErrorType]': """ Composes inner ``Result`` with ``IOResult`` returning function. Can be useful when you need an access to both states of the result. .. code:: python >>> from returns.io import IOResult, IOSuccess, IOFailure >>> from returns.result import Result >>> def count(container: Result[int, int]) -> IOResult[int, int]: ... return IOResult.from_result( ... container.map(lambda x: x + 1).alt(abs), ... ) >>> assert IOSuccess(1).compose_result(count) == IOSuccess(2) >>> assert IOFailure(-1).compose_result(count) == IOFailure(1) """ return dekind(function(self._inner_value)) def __iter__(self) -> Iterator[_ValueType]: """API for :ref:`do-notation`.""" # We also unwrap `IO` here. yield self.unwrap()._inner_value # noqa: WPS437 @classmethod def do( cls, expr: Generator[_NewValueType, None, None], ) -> 'IOResult[_NewValueType, _NewErrorType]': """ Allows working with unwrapped values of containers in a safe way. .. code:: python >>> from returns.io import IOResult, IOFailure, IOSuccess >>> assert IOResult.do( ... first + second ... for first in IOSuccess(2) ... for second in IOSuccess(3) ... ) == IOSuccess(5) >>> assert IOResult.do( ... first + second ... for first in IOFailure('a') ... for second in IOSuccess(3) ... ) == IOFailure('a') See :ref:`do-notation` to learn more. This feature requires our :ref:`mypy plugin `. """ try: return IOResult.from_value(next(expr)) except UnwrapFailedError as exc: return IOResult.from_result(exc.halted_container) # type: ignore @classmethod def from_typecast( cls, inner_value: IO[Result[_NewValueType, _NewErrorType]], ) -> 'IOResult[_NewValueType, _NewErrorType]': """ Converts ``IO[Result[_ValueType, _ErrorType]]`` to ``IOResult``. Also prevails the type of ``Result`` to ``IOResult``, so: ``IO[Result[_ValueType, _ErrorType]]`` would become ``IOResult[_ValueType, _ErrorType]``. .. code:: python >>> from returns.result import Success >>> from returns.io import IO, IOResult, IOSuccess >>> container = IO(Success(1)) >>> assert IOResult.from_typecast(container) == IOSuccess(1) Can be reverted via :meth:`returns.io.IO.from_ioresult` method. """ return cls.from_result(inner_value._inner_value) # noqa: WPS437 @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> 'IOResult[Any, _NewErrorType]': """ Creates new ``IOResult`` from "failed" ``IO`` container. .. code:: python >>> from returns.io import IO, IOResult, IOFailure >>> container = IO(1) >>> assert IOResult.from_failed_io(container) == IOFailure(1) """ return IOFailure(inner_value._inner_value) # noqa: WPS437 @classmethod def from_io( cls, inner_value: IO[_NewValueType], ) -> 'IOResult[_NewValueType, Any]': """ Creates new ``IOResult`` from "successful" ``IO`` container. .. code:: python >>> from returns.io import IO, IOResult, IOSuccess >>> container = IO(1) >>> assert IOResult.from_io(container) == IOSuccess(1) """ return IOSuccess(inner_value._inner_value) # noqa: WPS437 @classmethod def from_result( cls, inner_value: Result[_NewValueType, _NewErrorType], ) -> 'IOResult[_NewValueType, _NewErrorType]': """ Creates ``IOResult`` from ``Result`` value. .. code:: python >>> from returns.io import IOResult, IOSuccess, IOFailure >>> from returns.result import Success, Failure >>> assert IOResult.from_result(Success(1)) == IOSuccess(1) >>> assert IOResult.from_result(Failure(2)) == IOFailure(2) """ if isinstance(inner_value, Success): return IOSuccess(inner_value._inner_value) # noqa: WPS437 return IOFailure(inner_value._inner_value) # type: ignore[arg-type] # noqa: WPS437, E501 @classmethod def from_ioresult( cls, inner_value: 'IOResult[_NewValueType, _NewErrorType]', ) -> 'IOResult[_NewValueType, _NewErrorType]': """ Creates ``IOResult`` from existing ``IOResult`` value. .. code:: python >>> from returns.io import IOResult, IOSuccess, IOFailure >>> assert IOResult.from_ioresult(IOSuccess(1)) == IOSuccess(1) >>> assert IOResult.from_ioresult(IOFailure(2)) == IOFailure(2) """ return inner_value @classmethod def from_value( cls, inner_value: _NewValueType, ) -> 'IOResult[_NewValueType, Any]': """ One more value to create success unit values. It is useful as a united way to create a new value from any container. .. code:: python >>> from returns.io import IOResult, IOSuccess >>> assert IOResult.from_value(1) == IOSuccess(1) You can use this method or :func:`~IOSuccess`, choose the most convenient for you. """ return IOSuccess(inner_value) @classmethod def from_failure( cls, inner_value: _NewErrorType, ) -> 'IOResult[Any, _NewErrorType]': """ One more value to create failure unit values. It is useful as a united way to create a new value from any container. .. code:: python >>> from returns.io import IOResult, IOFailure >>> assert IOResult.from_failure(1) == IOFailure(1) You can use this method or :func:`~IOFailure`, choose the most convenient for you. """ return IOFailure(inner_value) @final class IOFailure(IOResult[Any, _ErrorType]): """``IOFailure`` representation.""" __slots__ = () _inner_value: Result[Any, _ErrorType] def __init__(self, inner_value: _ErrorType) -> None: """IOFailure constructor.""" # noqa: D403 super().__init__(Failure(inner_value)) if not TYPE_CHECKING: # noqa: WPS604 # pragma: no branch def bind(self, function): """Does nothing for ``IOFailure``.""" return self #: Alias for `bind_ioresult` method. Part of the `IOResultBasedN` interface. # noqa: E501 bind_ioresult = bind def bind_result(self, function): """Does nothing for ``IOFailure``.""" return self def bind_io(self, function): """Does nothing for ``IOFailure``.""" return self def lash(self, function): """Composes this container with a function returning ``IOResult``.""" # noqa: E501 return function(self._inner_value.failure()) @final class IOSuccess(IOResult[_ValueType, Any]): """``IOSuccess`` representation.""" __slots__ = () _inner_value: Result[_ValueType, Any] def __init__(self, inner_value: _ValueType) -> None: """IOSuccess constructor.""" # noqa: D403 super().__init__(Success(inner_value)) if not TYPE_CHECKING: # noqa: WPS604 # pragma: no branch def bind(self, function): """Composes this container with a function returning ``IOResult``.""" # noqa: E501 return function(self._inner_value.unwrap()) #: Alias for `bind_ioresult` method. Part of the `IOResultBasedN` interface. # noqa: E501 bind_ioresult = bind def bind_result(self, function): """Binds ``Result`` returning function to current container.""" return self.from_result(function(self._inner_value.unwrap())) def bind_io(self, function): """Binds ``IO`` returning function to current container.""" return self.from_io(function(self._inner_value.unwrap())) def lash(self, function): """Does nothing for ``IOSuccess``.""" return self # Aliases: #: Alias for ``IOResult[_ValueType, Exception]``. IOResultE: TypeAlias = IOResult[_ValueType, Exception] # impure_safe decorator: _ExceptionType = TypeVar('_ExceptionType', bound=Exception) @overload def impure_safe( function: Callable[_FuncParams, _NewValueType], /, ) -> Callable[_FuncParams, IOResultE[_NewValueType]]: """Decorator to convert exception-throwing for any kind of Exception.""" @overload def impure_safe( exceptions: tuple[type[_ExceptionType], ...], ) -> Callable[ [Callable[_FuncParams, _NewValueType]], Callable[_FuncParams, IOResult[_NewValueType, _ExceptionType]], ]: """Decorator to convert exception-throwing just for a set of Exceptions.""" def impure_safe( # noqa: WPS234, C901 exceptions: ( Callable[_FuncParams, _NewValueType] | tuple[type[_ExceptionType], ...] ), ) -> ( Callable[_FuncParams, IOResultE[_NewValueType]] | Callable[ [Callable[_FuncParams, _NewValueType]], Callable[_FuncParams, IOResult[_NewValueType, _ExceptionType]], ] ): """ Decorator to mark function that it returns :class:`~IOResult` container. Should be used with care, since it only catches ``Exception`` subclasses. It does not catch ``BaseException`` subclasses. If you need to mark ``async`` function as impure, use :func:`returns.future.future_safe` instead. This decorator only works with sync functions. Example: .. code:: python >>> from returns.io import IOSuccess, impure_safe >>> @impure_safe ... def function(arg: int) -> float: ... return 1 / arg ... >>> assert function(1) == IOSuccess(1.0) >>> assert function(0).failure() You can also use it with explicit exception types as the first argument: .. code:: python >>> from returns.io import IOSuccess, IOFailure, impure_safe >>> @impure_safe(exceptions=(ZeroDivisionError,)) ... def might_raise(arg: int) -> float: ... return 1 / arg >>> assert might_raise(1) == IOSuccess(1.0) >>> assert isinstance(might_raise(0), IOFailure) In this case, only exceptions that are explicitly listed are going to be caught. Similar to :func:`returns.future.future_safe` and :func:`returns.result.safe` decorators. """ def factory( inner_function: Callable[_FuncParams, _NewValueType], inner_exceptions: tuple[type[_ExceptionType], ...], ) -> Callable[_FuncParams, IOResult[_NewValueType, _ExceptionType]]: @wraps(inner_function) def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> IOResult[_NewValueType, _ExceptionType]: try: return IOSuccess(inner_function(*args, **kwargs)) except inner_exceptions as exc: return IOFailure(exc) return decorator if isinstance(exceptions, tuple): return lambda function: factory(function, exceptions) return factory( exceptions, (Exception,), # type: ignore[arg-type] ) returns-0.24.0/returns/iterables.py000066400000000000000000000305461472312074000173350ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable, Iterable from typing import Tuple, TypeVar, final from returns.interfaces.applicative import ApplicativeN from returns.interfaces.failable import FailableN from returns.primitives.hkt import KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ApplicativeKind = TypeVar('_ApplicativeKind', bound=ApplicativeN) _FailableKind = TypeVar('_FailableKind', bound=FailableN) class AbstractFold: """ A collection of different helpers to write declarative ``Iterable`` actions. Allows to work with iterables. .. rubric:: Implementation ``AbstractFold`` and ``Fold`` types are special. They have double definition for each method: public and protected ones. Why? Because you cannot override ``@kinded`` method due to a ``mypy`` bug. So, there are two opportunities for us here: 1. Declare all method as ``@final`` and do not allow to change anything 2. Use delegation to protected unkinded methods We have chosen the second way! Here's how it works: 1. Public methods are ``@kinded`` for better typing and cannot be overridden 2. Protected methods are unkinded and can be overridden in subtyping Now, if you need to make a change into our implementation, then you can subclass ``Fold`` or ``AbstractFold`` and then change an implementation of any unkinded protected method. """ __slots__ = () @final @kinded @classmethod def loop( cls, iterable: Iterable[ KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType], ], acc: KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType], function: Callable[ [_FirstType], Callable[[_UpdatedType], _UpdatedType], ], ) -> KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType]: """ Allows to make declarative loops for any ``ApplicativeN`` subtypes. Quick example: .. code:: python >>> from typing import Callable >>> from returns.maybe import Some >>> from returns.iterables import Fold >>> def sum_two(first: int) -> Callable[[int], int]: ... return lambda second: first + second >>> assert Fold.loop( ... [Some(1), Some(2), Some(3)], ... Some(10), ... sum_two, ... ) == Some(16) Looks like ``foldl`` in some other languages with some more specifics. See: https://philipschwarz.dev/fpilluminated/?page_id=348#bwg3/137 .. image:: https://i.imgur.com/Tza1isS.jpg Is also quite similar to ``reduce``. Public interface for ``_loop`` method. Cannot be modified directly. """ return cls._loop(iterable, acc, function, _concat_applicative) @final @kinded @classmethod def collect( cls, iterable: Iterable[ KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType], ], acc: KindN[ _ApplicativeKind, 'Tuple[_FirstType, ...]', _SecondType, _ThirdType, ], ) -> KindN[ _ApplicativeKind, 'Tuple[_FirstType, ...]', _SecondType, _ThirdType, ]: """ Transforms an iterable of containers into a single container. Quick example for regular containers: .. code:: python >>> from returns.io import IO >>> from returns.iterables import Fold >>> items = [IO(1), IO(2)] >>> assert Fold.collect(items, IO(())) == IO((1, 2)) If container can have failed values, then this strategy fails on any existing failed like type. It is enough to have even a single failed value in iterable for this type to convert the whole operation result to be a failure. Let's see how it works: .. code:: python >>> from returns.result import Success, Failure >>> from returns.iterables import Fold >>> empty = [] >>> all_success = [Success(1), Success(2), Success(3)] >>> has_failure = [Success(1), Failure('a'), Success(3)] >>> all_failures = [Failure('a'), Failure('b')] >>> acc = Success(()) # empty tuple >>> assert Fold.collect(empty, acc) == Success(()) >>> assert Fold.collect(all_success, acc) == Success((1, 2, 3)) >>> assert Fold.collect(has_failure, acc) == Failure('a') >>> assert Fold.collect(all_failures, acc) == Failure('a') If that's now what you need, check out :meth:`~AbstractFold.collect_all` to force collect all non-failed values. Public interface for ``_collect`` method. Cannot be modified directly. """ return cls._collect(iterable, acc) @final @kinded @classmethod def collect_all( cls, iterable: Iterable[ KindN[_FailableKind, _FirstType, _SecondType, _ThirdType], ], acc: KindN[ _FailableKind, 'Tuple[_FirstType, ...]', _SecondType, _ThirdType, ], ) -> KindN[ _FailableKind, 'Tuple[_FirstType, ...]', _SecondType, _ThirdType, ]: """ Transforms an iterable of containers into a single container. This method only works with ``FailableN`` subtypes, not just any ``ApplicativeN`` like :meth:`~AbstractFold.collect`. Strategy to extract all successful values even if there are failed values. If there's at least one successful value and any amount of failed values, we will still return all collected successful values. We can return failed value for this strategy only in a single case: when default element is a failed value. Let's see how it works: .. code:: python >>> from returns.result import Success, Failure >>> from returns.iterables import Fold >>> empty = [] >>> all_success = [Success(1), Success(2), Success(3)] >>> has_failure = [Success(1), Failure('a'), Success(3)] >>> all_failures = [Failure('a'), Failure('b')] >>> acc = Success(()) # empty tuple >>> assert Fold.collect_all(empty, acc) == Success(()) >>> assert Fold.collect_all(all_success, acc) == Success((1, 2, 3)) >>> assert Fold.collect_all(has_failure, acc) == Success((1, 3)) >>> assert Fold.collect_all(all_failures, acc) == Success(()) >>> assert Fold.collect_all(empty, Failure('c')) == Failure('c') If that's now what you need, check out :meth:`~AbstractFold.collect` to collect only successful values and fail on any failed ones. Public interface for ``_collect_all`` method. Cannot be modified directly. """ return cls._collect_all(iterable, acc) # Protected part # ============== @classmethod @abstractmethod def _loop( cls, iterable: Iterable[ KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType], ], acc: KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType], function: Callable[ [_FirstType], Callable[[_UpdatedType], _UpdatedType], ], concat: Callable[ [ KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType], KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType], KindN[ _ApplicativeKind, Callable[ [_FirstType], Callable[[_UpdatedType], _UpdatedType], ], _SecondType, _ThirdType, ], ], KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType], ], ) -> KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType]: """ Protected part of ``loop`` method. Can be replaced in subclasses for better performance, etc. """ @classmethod def _collect( cls, iterable: Iterable[ KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType], ], acc: KindN[ _ApplicativeKind, 'Tuple[_FirstType, ...]', _SecondType, _ThirdType, ], ) -> KindN[ _ApplicativeKind, 'Tuple[_FirstType, ...]', _SecondType, _ThirdType, ]: return cls._loop( iterable, acc, _concat_sequence, _concat_applicative, ) @classmethod def _collect_all( cls, iterable: Iterable[ KindN[_FailableKind, _FirstType, _SecondType, _ThirdType], ], acc: KindN[ _FailableKind, 'Tuple[_FirstType, ...]', _SecondType, _ThirdType, ], ) -> KindN[ _FailableKind, 'Tuple[_FirstType, ...]', _SecondType, _ThirdType, ]: return cls._loop( iterable, acc, _concat_sequence, _concat_failable_safely, ) class Fold(AbstractFold): """ Concrete implementation of ``AbstractFold`` of end users. Use it by default. """ __slots__ = () @classmethod def _loop( cls, iterable: Iterable[ KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType], ], acc: KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType], function: Callable[ [_FirstType], Callable[[_UpdatedType], _UpdatedType], ], concat: Callable[ [ KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType], KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType], KindN[ _ApplicativeKind, Callable[ [_FirstType], Callable[[_UpdatedType], _UpdatedType], ], _SecondType, _ThirdType, ], ], KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType], ], ) -> KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType]: """ Protected part of ``loop`` method. Can be replaced in subclasses for better performance, etc. """ wrapped = acc.from_value(function) for current in iterable: acc = concat(current, acc, wrapped) return acc # Helper functions # ================ def _concat_sequence( first: _FirstType, ) -> Callable[ ['Tuple[_FirstType, ...]'], 'Tuple[_FirstType, ...]', ]: """ Concats a given item to an existing sequence. We use explicit curring with ``lambda`` function because, ``@curry`` decorator is way slower. And we don't need its features here. But, your functions can use ``@curry`` if you need it. """ return lambda second: second + (first,) def _concat_applicative( current: KindN[ _ApplicativeKind, _FirstType, _SecondType, _ThirdType, ], acc: KindN[ _ApplicativeKind, _UpdatedType, _SecondType, _ThirdType, ], function: KindN[ _ApplicativeKind, Callable[[_FirstType], Callable[[_UpdatedType], _UpdatedType]], _SecondType, _ThirdType, ], ) -> KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType]: """Concats two applicatives using a curried-like function.""" return acc.apply(current.apply(function)) def _concat_failable_safely( current: KindN[ _FailableKind, _FirstType, _SecondType, _ThirdType, ], acc: KindN[ _FailableKind, _UpdatedType, _SecondType, _ThirdType, ], function: KindN[ _FailableKind, Callable[[_FirstType], Callable[[_UpdatedType], _UpdatedType]], _SecondType, _ThirdType, ], ) -> KindN[_FailableKind, _UpdatedType, _SecondType, _ThirdType]: """ Concats two ``FailableN`` using a curried-like function and a fallback. We need both ``.apply`` and ``.lash`` methods here. """ return _concat_applicative(current, acc, function).lash(lambda _: acc) returns-0.24.0/returns/maybe.py000066400000000000000000000320571472312074000164570ustar00rootroot00000000000000from abc import ABCMeta from collections.abc import Callable, Generator, Iterator from functools import wraps from typing import TYPE_CHECKING, Any, ClassVar, Optional, TypeVar, final from typing_extensions import Never, ParamSpec from returns.interfaces.specific.maybe import MaybeBased2 from returns.primitives.container import BaseContainer, container_equality from returns.primitives.exceptions import UnwrapFailedError from returns.primitives.hkt import Kind1, SupportsKind1 # Definitions: _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _FuncParams = ParamSpec('_FuncParams') class Maybe( # type: ignore[type-var] BaseContainer, SupportsKind1['Maybe', _ValueType], MaybeBased2[_ValueType, None], metaclass=ABCMeta, ): """ Represents a result of a series of computations that can return ``None``. An alternative to using exceptions or constant ``is None`` checks. ``Maybe`` is an abstract type and should not be instantiated directly. Instead use ``Some`` and ``Nothing``. See also: - https://github.com/gcanti/fp-ts/blob/master/docs/modules/Option.ts.md """ __slots__ = () _inner_value: _ValueType | None __match_args__ = ('_inner_value',) #: Alias for `Nothing` empty: ClassVar['Maybe[Any]'] #: Typesafe equality comparison with other `Result` objects. equals = container_equality def map( self, function: Callable[[_ValueType], _NewValueType], ) -> 'Maybe[_NewValueType]': """ Composes successful container with a pure function. .. code:: python >>> from returns.maybe import Some, Nothing >>> def mappable(string: str) -> str: ... return string + 'b' >>> assert Some('a').map(mappable) == Some('ab') >>> assert Nothing.map(mappable) == Nothing """ def apply( self, function: Kind1['Maybe', Callable[[_ValueType], _NewValueType]], ) -> 'Maybe[_NewValueType]': """ Calls a wrapped function in a container on this container. .. code:: python >>> from returns.maybe import Some, Nothing >>> def appliable(string: str) -> str: ... return string + 'b' >>> assert Some('a').apply(Some(appliable)) == Some('ab') >>> assert Some('a').apply(Nothing) == Nothing >>> assert Nothing.apply(Some(appliable)) == Nothing >>> assert Nothing.apply(Nothing) == Nothing """ def bind( self, function: Callable[[_ValueType], Kind1['Maybe', _NewValueType]], ) -> 'Maybe[_NewValueType]': """ Composes successful container with a function that returns a container. .. code:: python >>> from returns.maybe import Nothing, Maybe, Some >>> def bindable(string: str) -> Maybe[str]: ... return Some(string + 'b') >>> assert Some('a').bind(bindable) == Some('ab') >>> assert Nothing.bind(bindable) == Nothing """ def bind_optional( self, function: Callable[[_ValueType], _NewValueType | None], ) -> 'Maybe[_NewValueType]': """ Binds a function returning an optional value over a container. .. code:: python >>> from returns.maybe import Some, Nothing >>> from typing import Optional >>> def bindable(arg: str) -> Optional[int]: ... return len(arg) if arg else None >>> assert Some('a').bind_optional(bindable) == Some(1) >>> assert Some('').bind_optional(bindable) == Nothing """ def lash( self, function: Callable[[Any], Kind1['Maybe', _ValueType]], ) -> 'Maybe[_ValueType]': """ Composes failed container with a function that returns a container. .. code:: python >>> from returns.maybe import Maybe, Some, Nothing >>> def lashable(arg=None) -> Maybe[str]: ... return Some('b') >>> assert Some('a').lash(lashable) == Some('a') >>> assert Nothing.lash(lashable) == Some('b') We need this feature to make ``Maybe`` compatible with different ``Result`` like operations. """ def __iter__(self) -> Iterator[_ValueType]: """API for :ref:`do-notation`.""" yield self.unwrap() @classmethod def do( cls, expr: Generator[_NewValueType, None, None], ) -> 'Maybe[_NewValueType]': """ Allows working with unwrapped values of containers in a safe way. .. code:: python >>> from returns.maybe import Maybe, Some, Nothing >>> assert Maybe.do( ... first + second ... for first in Some(2) ... for second in Some(3) ... ) == Some(5) >>> assert Maybe.do( ... first + second ... for first in Some(2) ... for second in Nothing ... ) == Nothing See :ref:`do-notation` to learn more. """ try: return Maybe.from_value(next(expr)) except UnwrapFailedError as exc: return exc.halted_container # type: ignore def value_or( self, default_value: _NewValueType, ) -> _ValueType | _NewValueType: """ Get value from successful container or default value from failed one. .. code:: python >>> from returns.maybe import Nothing, Some >>> assert Some(0).value_or(1) == 0 >>> assert Nothing.value_or(1) == 1 """ def or_else_call( self, function: Callable[[], _NewValueType], ) -> _ValueType | _NewValueType: """ Get value from successful container or default value from failed one. Really close to :meth:`~Maybe.value_or` but works with lazy values. This method is unique to ``Maybe`` container, because other containers do have ``.alt`` method. But, ``Maybe`` does not have this method. There's nothing to ``alt`` in ``Nothing``. Instead, it has this method to execute some function if called on a failed container: .. code:: pycon >>> from returns.maybe import Some, Nothing >>> assert Some(1).or_else_call(lambda: 2) == 1 >>> assert Nothing.or_else_call(lambda: 2) == 2 It might be useful to work with exceptions as well: .. code:: pycon >>> def fallback() -> Never: ... raise ValueError('Nothing!') >>> Nothing.or_else_call(fallback) Traceback (most recent call last): ... ValueError: Nothing! """ def unwrap(self) -> _ValueType: """ Get value from successful container or raise exception for failed one. .. code:: pycon :force: >>> from returns.maybe import Nothing, Some >>> assert Some(1).unwrap() == 1 >>> Nothing.unwrap() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError """ # noqa: RST307 def failure(self) -> None: """ Get failed value from failed container or raise exception from success. .. code:: pycon :force: >>> from returns.maybe import Nothing, Some >>> assert Nothing.failure() is None >>> Some(1).failure() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError """ # noqa: RST307 @classmethod def from_value( cls, inner_value: _NewValueType, ) -> 'Maybe[_NewValueType]': """ Creates new instance of ``Maybe`` container based on a value. .. code:: python >>> from returns.maybe import Maybe, Some >>> assert Maybe.from_value(1) == Some(1) >>> assert Maybe.from_value(None) == Some(None) """ return Some(inner_value) @classmethod def from_optional( cls, inner_value: _NewValueType | None, ) -> 'Maybe[_NewValueType]': """ Creates new instance of ``Maybe`` container based on an optional value. .. code:: python >>> from returns.maybe import Maybe, Some, Nothing >>> assert Maybe.from_optional(1) == Some(1) >>> assert Maybe.from_optional(None) == Nothing """ if inner_value is None: return _Nothing(inner_value) return Some(inner_value) @final class _Nothing(Maybe[Any]): """Represents an empty state.""" __slots__ = () _inner_value: None _instance: Optional['_Nothing'] = None def __new__(cls, *args: Any, **kwargs: Any) -> '_Nothing': if cls._instance is None: cls._instance = object.__new__(cls) # noqa: WPS609 return cls._instance def __init__(self, inner_value: None = None) -> None: # noqa: WPS632 """ Private constructor for ``_Nothing`` type. Use :attr:`~Nothing` instead. Wraps the given value in the ``_Nothing`` container. ``inner_value`` can only be ``None``. """ super().__init__(None) def __repr__(self): """ Custom ``str`` definition without the state inside. .. code:: python >>> from returns.maybe import Nothing >>> assert str(Nothing) == '' >>> assert repr(Nothing) == '' """ return '' def map(self, function): """Does nothing for ``Nothing``.""" return self def apply(self, container): """Does nothing for ``Nothing``.""" return self def bind(self, function): """Does nothing for ``Nothing``.""" return self def bind_optional(self, function): """Does nothing.""" return self def lash(self, function): """Composes this container with a function returning container.""" return function(None) def value_or(self, default_value): """Returns default value.""" return default_value def or_else_call(self, function): """Returns the result of a passed function.""" return function() def unwrap(self): """Raises an exception, since it does not have a value inside.""" raise UnwrapFailedError(self) def failure(self) -> None: """Returns failed value.""" return self._inner_value @final class Some(Maybe[_ValueType]): """ Represents a calculation which has succeeded and contains the value. Quite similar to ``Success`` type. """ __slots__ = () _inner_value: _ValueType def __init__(self, inner_value: _ValueType) -> None: """Some constructor.""" super().__init__(inner_value) if not TYPE_CHECKING: # noqa: WPS604 # pragma: no branch def bind(self, function): """Binds current container to a function that returns container.""" return function(self._inner_value) def bind_optional(self, function): """Binds a function returning an optional value over a container.""" return Maybe.from_optional(function(self._inner_value)) def unwrap(self): """Returns inner value for successful container.""" return self._inner_value def map(self, function): """Composes current container with a pure function.""" return Some(function(self._inner_value)) def apply(self, container): """Calls a wrapped function in a container on this container.""" if isinstance(container, Some): return self.map(container.unwrap()) # type: ignore return container def lash(self, function): """Does nothing for ``Some``.""" return self def value_or(self, default_value): """Returns inner value for successful container.""" return self._inner_value def or_else_call(self, function): """Returns inner value for successful container.""" return self._inner_value def failure(self): """Raises exception for successful container.""" raise UnwrapFailedError(self) #: Public unit value of protected :class:`~_Nothing` type. Nothing: Maybe[Never] = _Nothing() Maybe.empty = Nothing def maybe( function: Callable[_FuncParams, _ValueType | None], ) -> Callable[_FuncParams, Maybe[_ValueType]]: """ Decorator to convert ``None``-returning function to ``Maybe`` container. This decorator works with sync functions only. Example: .. code:: python >>> from typing import Optional >>> from returns.maybe import Nothing, Some, maybe >>> @maybe ... def might_be_none(arg: int) -> Optional[int]: ... if arg == 0: ... return None ... return 1 / arg >>> assert might_be_none(0) == Nothing >>> assert might_be_none(1) == Some(1.0) """ @wraps(function) def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> Maybe[_ValueType]: return Maybe.from_optional(function(*args, **kwargs)) return decorator returns-0.24.0/returns/methods/000077500000000000000000000000001472312074000164445ustar00rootroot00000000000000returns-0.24.0/returns/methods/__init__.py000066400000000000000000000003111472312074000205500ustar00rootroot00000000000000from returns.methods.cond import cond as cond from returns.methods.partition import partition as partition from returns.methods.unwrap_or_failure import ( unwrap_or_failure as unwrap_or_failure, ) returns-0.24.0/returns/methods/cond.py000066400000000000000000000050711472312074000177440ustar00rootroot00000000000000from typing import TypeVar, overload from returns.context import NoDeps from returns.interfaces.failable import DiverseFailableN, SingleFailableN from returns.primitives.hkt import KindN, kinded _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') _SingleFailableKind = TypeVar('_SingleFailableKind', bound=SingleFailableN) _DiverseFailableKind = TypeVar('_DiverseFailableKind', bound=DiverseFailableN) @overload def internal_cond( container_type: type[_SingleFailableKind], is_success: bool, success_value: _ValueType, ) -> KindN[_SingleFailableKind, _ValueType, _ErrorType, NoDeps]: """Reduce the boilerplate when choosing paths with ``SingleFailableN``.""" @overload def internal_cond( container_type: type[_DiverseFailableKind], is_success: bool, success_value: _ValueType, error_value: _ErrorType, ) -> KindN[_DiverseFailableKind, _ValueType, _ErrorType, NoDeps]: """Reduce the boilerplate when choosing paths with ``DiverseFailableN``.""" def internal_cond( container_type: ( type[_SingleFailableKind] | type[_DiverseFailableKind] ), is_success: bool, success_value: _ValueType, error_value: _ErrorType | None = None, ): """ Reduce the boilerplate when choosing paths. Works with ``SingleFailableN`` (e.g. ``Maybe``) and ``DiverseFailableN`` (e.g. ``Result``). Example using ``cond`` with the ``Result`` container: .. code:: python >>> from returns.methods import cond >>> from returns.result import Failure, Result, Success >>> def is_numeric(string: str) -> Result[str, str]: ... return cond( ... Result, ... string.isnumeric(), ... 'It is a number', ... 'It is not a number', ... ) >>> assert is_numeric('42') == Success('It is a number') >>> assert is_numeric('non numeric') == Failure('It is not a number') Example using ``cond`` with the ``Maybe`` container: .. code:: python >>> from returns.maybe import Maybe, Some, Nothing >>> def is_positive(number: int) -> Maybe[int]: ... return cond(Maybe, number > 0, number) >>> assert is_positive(10) == Some(10) >>> assert is_positive(-10) == Nothing """ if is_success: return container_type.from_value(success_value) if issubclass(container_type, DiverseFailableN): return container_type.from_failure(error_value) return container_type.empty #: Kinded version of :func:`~internal_cond`, use it to infer real return type. cond = kinded(internal_cond) returns-0.24.0/returns/methods/partition.py000066400000000000000000000020641472312074000210310ustar00rootroot00000000000000from collections.abc import Iterable from typing import TypeVar from returns.interfaces.unwrappable import Unwrappable from returns.primitives.exceptions import UnwrapFailedError _ValueType = TypeVar('_ValueType', covariant=True) _ErrorType = TypeVar('_ErrorType', covariant=True) def partition( containers: Iterable[ Unwrappable[_ValueType, _ErrorType], ], ) -> tuple[list[_ValueType], list[_ErrorType]]: """ Partition a list of unwrappables into successful and failed values. Preserves order. .. code:: python >>> from returns.result import Failure, Success >>> from returns.methods import partition >>> results = [Success(1), Failure(2), Success(3), Failure(4)] >>> partition(results) ([1, 3], [2, 4]) """ successes: list[_ValueType] = [] failures: list[_ErrorType] = [] for container in containers: try: successes.append(container.unwrap()) except UnwrapFailedError: failures.append(container.failure()) return successes, failures returns-0.24.0/returns/methods/unwrap_or_failure.py000066400000000000000000000013301472312074000225360ustar00rootroot00000000000000from typing import TypeVar from returns.interfaces.unwrappable import Unwrappable from returns.pipeline import is_successful _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') def unwrap_or_failure( container: Unwrappable[_FirstType, _SecondType], ) -> _FirstType | _SecondType: """ Unwraps either successful or failed value. .. code:: python >>> from returns.io import IO, IOSuccess, IOFailure >>> from returns.methods import unwrap_or_failure >>> assert unwrap_or_failure(IOSuccess(1)) == IO(1) >>> assert unwrap_or_failure(IOFailure('a')) == IO('a') """ if is_successful(container): return container.unwrap() return container.failure() returns-0.24.0/returns/pipeline.py000066400000000000000000000022671472312074000171670ustar00rootroot00000000000000from typing import Any from returns._internal.pipeline.flow import flow as flow from returns._internal.pipeline.managed import managed as managed from returns._internal.pipeline.pipe import pipe as pipe from returns.interfaces.unwrappable import Unwrappable from returns.primitives.exceptions import UnwrapFailedError # TODO: add overloads for specific types, so it can narrow them with `TypeIs` def is_successful(container: Unwrappable[Any, Any]) -> bool: """ Determines if a container was successful or not. .. code:: python >>> from returns.maybe import Some, Nothing >>> from returns.result import Failure, Success >>> from returns.io import IOSuccess, IOFailure >>> assert is_successful(Some(1)) >>> assert not is_successful(Nothing) >>> assert is_successful(Success(1)) >>> assert not is_successful(Failure(1)) >>> assert is_successful(IOSuccess(1)) >>> assert not is_successful(IOFailure(1)) This function can work with containers that are instance of :class:`returns.interfaces.unwrappable.Unwrappable`. """ try: container.unwrap() except UnwrapFailedError: return False return True returns-0.24.0/returns/pointfree/000077500000000000000000000000001472312074000167745ustar00rootroot00000000000000returns-0.24.0/returns/pointfree/__init__.py000066400000000000000000000041641472312074000211120ustar00rootroot00000000000000from returns.pointfree.alt import alt as alt from returns.pointfree.apply import apply as apply from returns.pointfree.bimap import bimap as bimap from returns.pointfree.bind import bind as bind from returns.pointfree.bind_async import bind_async as bind_async from returns.pointfree.bind_async_context_future_result import ( bind_async_context_future_result as bind_async_context_future_result, ) from returns.pointfree.bind_async_future import ( bind_async_future as bind_async_future, ) from returns.pointfree.bind_async_future_result import ( bind_async_future_result as bind_async_future_result, ) from returns.pointfree.bind_awaitable import bind_awaitable as bind_awaitable from returns.pointfree.bind_context import bind_context as bind_context from returns.pointfree.bind_context import bind_context2 as bind_context2 from returns.pointfree.bind_context import bind_context3 as bind_context3 from returns.pointfree.bind_context_future_result import ( bind_context_future_result as bind_context_future_result, ) from returns.pointfree.bind_context_ioresult import ( bind_context_ioresult as bind_context_ioresult, ) from returns.pointfree.bind_context_result import ( bind_context_result as bind_context_result, ) from returns.pointfree.bind_future import bind_future as bind_future from returns.pointfree.bind_future_result import ( bind_future_result as bind_future_result, ) from returns.pointfree.bind_io import bind_io as bind_io from returns.pointfree.bind_ioresult import bind_ioresult as bind_ioresult from returns.pointfree.bind_optional import bind_optional as bind_optional from returns.pointfree.bind_result import bind_result as bind_result from returns.pointfree.compose_result import compose_result as compose_result from returns.pointfree.cond import cond as cond from returns.pointfree.lash import lash as lash from returns.pointfree.map import map_ as map_ from returns.pointfree.modify_env import modify_env as modify_env from returns.pointfree.modify_env import modify_env2 as modify_env2 from returns.pointfree.modify_env import modify_env3 as modify_env3 from returns.pointfree.unify import unify as unify returns-0.24.0/returns/pointfree/alt.py000066400000000000000000000030411472312074000201240ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.altable import AltableN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _AltableKind = TypeVar('_AltableKind', bound=AltableN) def alt( function: Callable[[_SecondType], _UpdatedType], ) -> Kinded[Callable[ [KindN[_AltableKind, _FirstType, _SecondType, _ThirdType]], KindN[_AltableKind, _FirstType, _UpdatedType, _ThirdType], ]]: """ Lifts function to be wrapped in a container for better composition. In other words, it modifies the function's signature from: ``a -> b`` to: ``Container[a] -> Container[b]`` This is how it should be used: .. code:: python >>> from returns.io import IOFailure, IOSuccess >>> from returns.pointfree import alt >>> def example(argument: int) -> float: ... return argument / 2 >>> assert alt(example)(IOSuccess(1)) == IOSuccess(1) >>> assert alt(example)(IOFailure(4)) == IOFailure(2.0) Note, that this function works for all containers with ``.alt`` method. See :class:`returns.primitives.interfaces.altable.AltableN` for more info. """ @kinded def factory( container: KindN[_AltableKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_AltableKind, _FirstType, _UpdatedType, _ThirdType]: return container.alt(function) return factory returns-0.24.0/returns/pointfree/apply.py000066400000000000000000000033521472312074000204760ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.applicative import ApplicativeN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ApplicativeKind = TypeVar('_ApplicativeKind', bound=ApplicativeN) def apply( container: KindN[ _ApplicativeKind, Callable[[_FirstType], _UpdatedType], _SecondType, _ThirdType, ], ) -> Kinded[Callable[ [KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType]], KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Turns container containing a function into a callable. In other words, it modifies the function signature from: ``Container[a -> b]`` to: ``Container[a] -> Container[b]`` This is how it should be used: .. code:: python >>> from returns.pointfree import apply >>> from returns.maybe import Some, Nothing >>> def example(argument: int) -> int: ... return argument + 1 >>> assert apply(Some(example))(Some(1)) == Some(2) >>> assert apply(Some(example))(Nothing) == Nothing >>> assert apply(Nothing)(Some(1)) == Nothing >>> assert apply(Nothing)(Nothing) == Nothing Note, that this function works for all containers with ``.apply`` method. See :class:`returns.interfaces.applicative.ApplicativeN` for more info. """ @kinded def factory( other: KindN[_ApplicativeKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_ApplicativeKind, _UpdatedType, _SecondType, _ThirdType]: return other.apply(container) return factory returns-0.24.0/returns/pointfree/bimap.py000066400000000000000000000033621472312074000204420ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.bimappable import BiMappableN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType1 = TypeVar('_UpdatedType1') _UpdatedType2 = TypeVar('_UpdatedType2') _BiMappableKind = TypeVar('_BiMappableKind', bound=BiMappableN) def bimap( on_first: Callable[[_FirstType], _UpdatedType1], on_second: Callable[[_SecondType], _UpdatedType2], ) -> Kinded[Callable[ [KindN[_BiMappableKind, _FirstType, _SecondType, _ThirdType]], KindN[_BiMappableKind, _UpdatedType1, _UpdatedType2, _ThirdType], ]]: """ Maps container on both: first and second arguments. Can be used to synchronize state on both success and failure. This is how it should be used: .. code:: python >>> from returns.io import IOSuccess, IOFailure >>> from returns.pointfree import bimap >>> def first(argument: int) -> float: ... return argument / 2 >>> def second(argument: str) -> bool: ... return bool(argument) >>> assert bimap(first, second)(IOSuccess(1)) == IOSuccess(0.5) >>> assert bimap(first, second)(IOFailure('')) == IOFailure(False) Note, that this function works for all containers with ``.map`` and ``.alt`` methods. See :class:`returns.primitives.interfaces.bimappable.BiMappableN` for more info. """ @kinded def factory( container: KindN[_BiMappableKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_BiMappableKind, _UpdatedType1, _UpdatedType2, _ThirdType]: return container.map(on_first).alt(on_second) return factory returns-0.24.0/returns/pointfree/bind.py000066400000000000000000000033101472312074000202570ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.bindable import BindableN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _BindableKind = TypeVar('_BindableKind', bound=BindableN) def bind( function: Callable[ [_FirstType], KindN[_BindableKind, _UpdatedType, _SecondType, _ThirdType], ], ) -> Kinded[Callable[ [KindN[_BindableKind, _FirstType, _SecondType, _ThirdType]], KindN[_BindableKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Turns function's input parameter from a regular value to a container. In other words, it modifies the function signature from: ``a -> Container[b]`` to: ``Container[a] -> Container[b]`` Similar to :func:`returns.pointfree.lash`, but works for successful containers. This is how it should be used: .. code:: python >>> from returns.pointfree import bind >>> from returns.maybe import Maybe, Some, Nothing >>> def example(argument: int) -> Maybe[int]: ... return Some(argument + 1) >>> assert bind(example)(Some(1)) == Some(2) >>> assert bind(example)(Nothing) == Nothing Note, that this function works for all containers with ``.bind`` method. See :class:`returns.primitives.interfaces.bindable.BindableN` for more info. """ @kinded def factory( container: KindN[_BindableKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_BindableKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind(function) return factory returns-0.24.0/returns/pointfree/bind_async.py000066400000000000000000000034161472312074000214630ustar00rootroot00000000000000from collections.abc import Awaitable, Callable from typing import TypeVar from returns.interfaces.specific.future import FutureLikeN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _FutureKind = TypeVar('_FutureKind', bound=FutureLikeN) def bind_async( function: Callable[ [_FirstType], Awaitable[KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType]], ], ) -> Kinded[Callable[ [KindN[_FutureKind, _FirstType, _SecondType, _ThirdType]], KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Compose a container and ``async`` function returning a container. In other words, it modifies the function's signature from: ``a -> Awaitable[Container[b]]`` to: ``Container[a] -> Container[b]`` This is how it should be used: .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> from returns.pointfree import bind_async >>> async def coroutine(x: int) -> Future[str]: ... return Future.from_value(str(x + 1)) >>> bound = bind_async(coroutine)(Future.from_value(1)) >>> assert anyio.run(bound.awaitable) == IO('2') Note, that this function works for all containers with ``.bind_async`` method. See :class:`returns.primitives.interfaces.specific.future.FutureLikeN` for more info. """ @kinded def factory( container: KindN[_FutureKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_async(function) return factory returns-0.24.0/returns/pointfree/bind_async_context_future_result.py000066400000000000000000000046131472312074000262170ustar00rootroot00000000000000from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, TypeVar from returns.interfaces.specific.reader_future_result import ( ReaderFutureResultLikeN, ) from returns.primitives.hkt import Kinded, KindN, kinded if TYPE_CHECKING: from returns.context import ReaderFutureResult # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ReaderFutureResultLikeKind = TypeVar( '_ReaderFutureResultLikeKind', bound=ReaderFutureResultLikeN, ) def bind_async_context_future_result( function: Callable[ [_FirstType], Awaitable['ReaderFutureResult[_UpdatedType, _SecondType, _ThirdType]'], ], ) -> Kinded[Callable[ [KindN[_ReaderFutureResultLikeKind, _FirstType, _SecondType, _ThirdType]], KindN[_ReaderFutureResultLikeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Lifts function from ``RequiresContextFutureResult`` for better composition. In other words, it modifies the function's signature from: ``async a -> RequiresContextFutureResult[env, b, c]`` to: ``Container[env, a, c]`` -> ``Container[env, b, c]`` .. code:: python >>> import anyio >>> from returns.context import ReaderFutureResult >>> from returns.io import IOSuccess, IOFailure >>> from returns.future import FutureResult >>> from returns.pointfree import bind_async_context_future_result >>> async def function(arg: int) -> ReaderFutureResult[str, int, str]: ... return ReaderFutureResult( ... lambda deps: FutureResult.from_value(len(deps) + arg), ... ) >>> assert anyio.run(bind_async_context_future_result(function)( ... ReaderFutureResult.from_value(2), ... )('abc').awaitable) == IOSuccess(5) >>> assert anyio.run(bind_async_context_future_result(function)( ... ReaderFutureResult.from_failure(0), ... )('abc').awaitable) == IOFailure(0) """ @kinded def factory( container: KindN[ _ReaderFutureResultLikeKind, _FirstType, _SecondType, _ThirdType, ], ) -> KindN[ _ReaderFutureResultLikeKind, _UpdatedType, _SecondType, _ThirdType, ]: return container.bind_async_context_future_result(function) return factory returns-0.24.0/returns/pointfree/bind_async_future.py000066400000000000000000000034331472312074000230540ustar00rootroot00000000000000from collections.abc import Awaitable, Callable from typing import TypeVar from returns.future import Future from returns.interfaces.specific.future import FutureLikeN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _FutureKind = TypeVar('_FutureKind', bound=FutureLikeN) def bind_async_future( function: Callable[ [_FirstType], Awaitable[Future[_UpdatedType]], ], ) -> Kinded[Callable[ [KindN[_FutureKind, _FirstType, _SecondType, _ThirdType]], KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Compose a container and async function returning ``Future``. In other words, it modifies the function signature from: ``a -> Awaitable[Future[b]]`` to: ``Container[a] -> Container[b]`` This is how it should be used: .. code:: python >>> import anyio >>> from returns.pointfree import bind_async_future >>> from returns.future import Future >>> from returns.io import IO >>> async def example(argument: int) -> Future[int]: ... return Future.from_value(argument + 1) >>> assert anyio.run( ... bind_async_future(example)(Future.from_value(1)).awaitable, ... ) == IO(2) Note, that this function works for all containers with ``.bind_async_future`` method. See :class:`returns.primitives.interfaces.specific.future.FutureLikeN` for more info. """ @kinded def factory( container: KindN[_FutureKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_async_future(function) return factory returns-0.24.0/returns/pointfree/bind_async_future_result.py000066400000000000000000000043461472312074000244560ustar00rootroot00000000000000from collections.abc import Awaitable, Callable from typing import TypeVar from returns.future import FutureResult from returns.interfaces.specific.future_result import FutureResultLikeN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _FutureResultKind = TypeVar('_FutureResultKind', bound=FutureResultLikeN) def bind_async_future_result( function: Callable[ [_FirstType], Awaitable[FutureResult[_UpdatedType, _SecondType]], ], ) -> Kinded[Callable[ [KindN[_FutureResultKind, _FirstType, _SecondType, _ThirdType]], KindN[_FutureResultKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Compose a container and async function returning ``FutureResult``. In other words, it modifies the function signature from: ``a -> Awaitable[FutureResult[b, c]]`` to: ``Container[a, c] -> Container[b, c]`` This is how it should be used: .. code:: python >>> import anyio >>> from returns.pointfree import bind_async_future_result >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> async def example(argument: int) -> FutureResult[int, str]: ... return FutureResult.from_value(argument + 1) >>> assert anyio.run( ... bind_async_future_result(example)( ... FutureResult.from_value(1), ... ).awaitable, ... ) == IOSuccess(2) >>> assert anyio.run( ... bind_async_future_result(example)( ... FutureResult.from_failure('a'), ... ).awaitable, ... ) == IOFailure('a') .. currentmodule: returns.primitives.interfaces.specific.future_result Note, that this function works for all containers with ``.bind_async_future`` method. See :class:`~FutureResultLikeN` for more info. """ @kinded def factory( container: KindN[ _FutureResultKind, _FirstType, _SecondType, _ThirdType, ], ) -> KindN[_FutureResultKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_async_future_result(function) return factory returns-0.24.0/returns/pointfree/bind_awaitable.py000066400000000000000000000033441472312074000222770ustar00rootroot00000000000000from collections.abc import Awaitable, Callable from typing import TypeVar from returns.interfaces.specific.future import FutureLikeN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _FutureKind = TypeVar('_FutureKind', bound=FutureLikeN) def bind_awaitable( function: Callable[[_FirstType], Awaitable[_UpdatedType]], ) -> Kinded[Callable[ [KindN[_FutureKind, _FirstType, _SecondType, _ThirdType]], KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Composes a container a regular ``async`` function. This function should return plain, non-container value. In other words, it modifies the function's signature from: ``a -> Awaitable[b]`` to: ``Container[a] -> Container[b]`` This is how it should be used: .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> from returns.pointfree import bind_awaitable >>> async def coroutine(x: int) -> int: ... return x + 1 >>> assert anyio.run( ... bind_awaitable(coroutine)(Future.from_value(1)).awaitable, ... ) == IO(2) Note, that this function works for all containers with ``.bind_awaitable`` method. See :class:`returns.primitives.interfaces.specific.future.FutureLikeN` for more info. """ @kinded def factory( container: KindN[_FutureKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_awaitable(function) return factory returns-0.24.0/returns/pointfree/bind_context.py000066400000000000000000000066141472312074000220350ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from returns.interfaces.specific.reader import ReaderLike2, ReaderLike3 from returns.primitives.hkt import Kind2, Kind3, Kinded, kinded if TYPE_CHECKING: from returns.context import RequiresContext # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _Reader2Kind = TypeVar('_Reader2Kind', bound=ReaderLike2) _Reader3Kind = TypeVar('_Reader3Kind', bound=ReaderLike3) def bind_context2( function: Callable[ [_FirstType], RequiresContext[_UpdatedType, _SecondType], ], ) -> Kinded[Callable[ [Kind2[_Reader2Kind, _FirstType, _SecondType]], Kind2[_Reader2Kind, _UpdatedType, _SecondType], ]]: """ Composes successful container with a function that returns a container. In other words, it modifies the function's signature from: ``a -> RequresContext[b, c]`` to: ``Container[a, c] -> Container[b, c]`` .. code:: python >>> from returns.pointfree import bind_context2 >>> from returns.context import Reader >>> def example(argument: int) -> Reader[int, int]: ... return Reader(lambda deps: argument + deps) >>> assert bind_context2(example)(Reader.from_value(2))(3) == 5 Note, that this function works with only ``Kind2`` containers with ``.bind_context`` method. See :class:`returns.primitives.interfaces.specific.reader.ReaderLike2` for more info. """ @kinded def factory( container: Kind2[_Reader2Kind, _FirstType, _SecondType], ) -> Kind2[_Reader2Kind, _UpdatedType, _SecondType]: return container.bind_context(function) return factory def bind_context3( function: Callable[ [_FirstType], RequiresContext[_UpdatedType, _ThirdType], ], ) -> Kinded[Callable[ [Kind3[_Reader3Kind, _FirstType, _SecondType, _ThirdType]], Kind3[_Reader3Kind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Composes successful container with a function that returns a container. In other words, it modifies the function's signature from: ``a -> RequresContext[b, c]`` to: ``Container[a, c] -> Container[b, c]`` .. code:: python >>> from returns.context import RequiresContext, RequiresContextResult >>> from returns.result import Success, Failure >>> from returns.pointfree import bind_context >>> def function(arg: int) -> RequiresContext[str, int]: ... return RequiresContext(lambda deps: len(deps) + arg) >>> assert bind_context(function)( ... RequiresContextResult.from_value(2), ... )('abc') == Success(5) >>> assert bind_context(function)( ... RequiresContextResult.from_failure(0), ... )('abc') == Failure(0) Note, that this function works with only ``Kind3`` containers with ``.bind_context`` method. See :class:`returns.primitives.interfaces.specific.reader.ReaderLike3` for more info. """ @kinded def factory( container: Kind3[_Reader3Kind, _FirstType, _SecondType, _ThirdType], ) -> Kind3[_Reader3Kind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_context(function) return factory #: Useful alias for :func:`~bind_context3`. bind_context = bind_context3 returns-0.24.0/returns/pointfree/bind_context_future_result.py000066400000000000000000000045131472312074000250210ustar00rootroot00000000000000from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from returns.interfaces.specific.reader_future_result import ( ReaderFutureResultLikeN, ) from returns.primitives.hkt import Kinded, KindN, kinded if TYPE_CHECKING: from returns.context import ReaderFutureResult # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ReaderFutureResultLikeKind = TypeVar( '_ReaderFutureResultLikeKind', bound=ReaderFutureResultLikeN, ) def bind_context_future_result( function: Callable[ [_FirstType], 'ReaderFutureResult[_UpdatedType, _SecondType, _ThirdType]', ], ) -> Kinded[Callable[ [KindN[_ReaderFutureResultLikeKind, _FirstType, _SecondType, _ThirdType]], KindN[_ReaderFutureResultLikeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Lifts function from ``RequiresContextFutureResult`` for better composition. In other words, it modifies the function's signature from: ``a -> RequiresContextFutureResult[env, b, c]`` to: ``Container[env, a, c]`` -> ``Container[env, b, c]`` .. code:: python >>> import anyio >>> from returns.context import ReaderFutureResult >>> from returns.io import IOSuccess, IOFailure >>> from returns.future import FutureResult >>> from returns.pointfree import bind_context_future_result >>> def function(arg: int) -> ReaderFutureResult[str, int, str]: ... return ReaderFutureResult( ... lambda deps: FutureResult.from_value(len(deps) + arg), ... ) >>> assert anyio.run(bind_context_future_result(function)( ... ReaderFutureResult.from_value(2), ... )('abc').awaitable) == IOSuccess(5) >>> assert anyio.run(bind_context_future_result(function)( ... ReaderFutureResult.from_failure(0), ... )('abc').awaitable) == IOFailure(0) """ @kinded def factory( container: KindN[ _ReaderFutureResultLikeKind, _FirstType, _SecondType, _ThirdType, ], ) -> KindN[ _ReaderFutureResultLikeKind, _UpdatedType, _SecondType, _ThirdType, ]: return container.bind_context_future_result(function) return factory returns-0.24.0/returns/pointfree/bind_context_ioresult.py000066400000000000000000000044301472312074000237550ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from returns.interfaces.specific.reader_ioresult import ReaderIOResultLikeN from returns.primitives.hkt import Kinded, KindN, kinded if TYPE_CHECKING: from returns.context import ReaderIOResult # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ReaderIOResultLikeKind = TypeVar( '_ReaderIOResultLikeKind', bound=ReaderIOResultLikeN, ) def bind_context_ioresult( function: Callable[ [_FirstType], ReaderIOResult[_UpdatedType, _SecondType, _ThirdType], ], ) -> Kinded[Callable[ [KindN[_ReaderIOResultLikeKind, _FirstType, _SecondType, _ThirdType]], KindN[_ReaderIOResultLikeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Lifts function from ``RequiresContextIOResult`` for better composition. In other words, it modifies the function's signature from: ``a -> RequiresContextIOResult[env, b, c]`` to: ``Container[env, a, c]`` -> ``Container[env, b, c]`` .. code:: python >>> import anyio >>> from returns.context import ( ... RequiresContextFutureResult, ... RequiresContextIOResult, ... ) >>> from returns.io import IOSuccess, IOFailure >>> from returns.pointfree import bind_context_ioresult >>> def function(arg: int) -> RequiresContextIOResult[str, int, str]: ... return RequiresContextIOResult( ... lambda deps: IOSuccess(len(deps) + arg), ... ) >>> assert anyio.run(bind_context_ioresult(function)( ... RequiresContextFutureResult.from_value(2), ... )('abc').awaitable) == IOSuccess(5) >>> assert anyio.run(bind_context_ioresult(function)( ... RequiresContextFutureResult.from_failure(0), ... )('abc').awaitable) == IOFailure(0) """ @kinded def factory( container: KindN[ _ReaderIOResultLikeKind, _FirstType, _SecondType, _ThirdType, ], ) -> KindN[_ReaderIOResultLikeKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_context_ioresult(function) return factory returns-0.24.0/returns/pointfree/bind_context_result.py000066400000000000000000000037761472312074000234410ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from returns.interfaces.specific.reader_result import ReaderResultLikeN from returns.primitives.hkt import Kinded, KindN, kinded if TYPE_CHECKING: from returns.context import ReaderResult # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ReaderResultLikeKind = TypeVar( '_ReaderResultLikeKind', bound=ReaderResultLikeN, ) def bind_context_result( function: Callable[ [_FirstType], ReaderResult[_UpdatedType, _SecondType, _ThirdType], ], ) -> Kinded[Callable[ [KindN[_ReaderResultLikeKind, _FirstType, _SecondType, _ThirdType]], KindN[_ReaderResultLikeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Composes successful container with a function that returns a container. In other words, it modifies the function's signature from: ``a -> ReaderResult[b, c, e]`` to: ``Container[a, c, e] -> Container[b, c, e]`` .. code:: python >>> from returns.pointfree import bind_context_result >>> from returns.context import ReaderIOResult, ReaderResult >>> from returns.io import IOSuccess, IOFailure >>> def example(argument: int) -> ReaderResult[int, str, str]: ... return ReaderResult.from_value(argument + 1) >>> assert bind_context_result(example)( ... ReaderIOResult.from_value(1), ... )(...) == IOSuccess(2) >>> assert bind_context_result(example)( ... ReaderIOResult.from_failure('a'), ... )(...) == IOFailure('a') """ @kinded def factory( container: KindN[ _ReaderResultLikeKind, _FirstType, _SecondType, _ThirdType, ], ) -> KindN[_ReaderResultLikeKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_context_result(function) return factory returns-0.24.0/returns/pointfree/bind_future.py000066400000000000000000000034261472312074000216610ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.future import Future from returns.interfaces.specific.future import FutureLikeN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _FutureKind = TypeVar('_FutureKind', bound=FutureLikeN) def bind_future( function: Callable[[_FirstType], Future[_UpdatedType]], ) -> Kinded[Callable[ [KindN[_FutureKind, _FirstType, _SecondType, _ThirdType]], KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Compose a container and sync function returning ``Future``. In other words, it modifies the function signature from: ``a -> Future[b]`` to: ``Container[a] -> Container[b]`` Similar to :func:`returns.pointfree.lash`, but works for successful containers. This is how it should be used: .. code:: python >>> import anyio >>> from returns.pointfree import bind_future >>> from returns.future import Future >>> from returns.io import IO >>> def example(argument: int) -> Future[int]: ... return Future.from_value(argument + 1) >>> assert anyio.run( ... bind_future(example)(Future.from_value(1)).awaitable, ... ) == IO(2) Note, that this function works for all containers with ``.bind_future`` method. See :class:`returns.primitives.interfaces.specific.future.FutureLikeN` for more info. """ @kinded def factory( container: KindN[_FutureKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_FutureKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_future(function) return factory returns-0.24.0/returns/pointfree/bind_future_result.py000066400000000000000000000042121472312074000232510ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.future import FutureResult from returns.interfaces.specific.future_result import FutureResultLikeN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _FutureResultKind = TypeVar('_FutureResultKind', bound=FutureResultLikeN) def bind_future_result( function: Callable[[_FirstType], FutureResult[_UpdatedType, _SecondType]], ) -> Kinded[Callable[ [KindN[_FutureResultKind, _FirstType, _SecondType, _ThirdType]], KindN[_FutureResultKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Compose a container and async function returning ``FutureResult``. In other words, it modifies the function signature from: ``a -> FutureResult[b, c]`` to: ``Container[a, c] -> Container[b, c]`` This is how it should be used: .. code:: python >>> import anyio >>> from returns.pointfree import bind_future_result >>> from returns.future import FutureResult >>> from returns.io import IOSuccess, IOFailure >>> def example(argument: int) -> FutureResult[int, str]: ... return FutureResult.from_value(argument + 1) >>> assert anyio.run( ... bind_future_result(example)( ... FutureResult.from_value(1), ... ).awaitable, ... ) == IOSuccess(2) >>> assert anyio.run( ... bind_future_result(example)( ... FutureResult.from_failure('a'), ... ).awaitable, ... ) == IOFailure('a') .. currentmodule: returns.primitives.interfaces.specific.future_result Note, that this function works for all containers with ``.bind_async_future`` method. See :class:`~FutureResultLikeN` for more info. """ @kinded def factory( container: KindN[ _FutureResultKind, _FirstType, _SecondType, _ThirdType, ], ) -> KindN[_FutureResultKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_future_result(function) return factory returns-0.24.0/returns/pointfree/bind_io.py000066400000000000000000000031741472312074000207560ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from returns.interfaces.specific.io import IOLikeN from returns.primitives.hkt import Kinded, KindN, kinded if TYPE_CHECKING: from returns.io import IO # noqa: WPS433 _FirstType = TypeVar('_FirstType', contravariant=True) _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType', contravariant=True) _UpdatedType = TypeVar('_UpdatedType', covariant=True) _IOLikeKind = TypeVar('_IOLikeKind', bound=IOLikeN) def bind_io( function: Callable[[_FirstType], IO[_UpdatedType]], ) -> Kinded[Callable[ [KindN[_IOLikeKind, _FirstType, _SecondType, _ThirdType]], KindN[_IOLikeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Composes successful container with a function that returns a container. In other words, it modifies the function's signature from: ``a -> IO[b]`` to: ``Container[a, c] -> Container[b, c]`` .. code:: python >>> from returns.io import IOSuccess, IOFailure >>> from returns.io import IO >>> from returns.pointfree import bind_io >>> def returns_io(arg: int) -> IO[int]: ... return IO(arg + 1) >>> bound = bind_io(returns_io) >>> assert bound(IO(1)) == IO(2) >>> assert bound(IOSuccess(1)) == IOSuccess(2) >>> assert bound(IOFailure(1)) == IOFailure(1) """ @kinded def factory( container: KindN[_IOLikeKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_IOLikeKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_io(function) return factory returns-0.24.0/returns/pointfree/bind_ioresult.py000066400000000000000000000034701472312074000222140ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from returns.interfaces.specific.ioresult import IOResultLikeN from returns.primitives.hkt import Kinded, KindN, kinded if TYPE_CHECKING: from returns.io import IOResult # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _IOResultLikeKind = TypeVar('_IOResultLikeKind', bound=IOResultLikeN) def bind_ioresult( function: Callable[[_FirstType], IOResult[_UpdatedType, _SecondType]], ) -> Kinded[Callable[ [KindN[_IOResultLikeKind, _FirstType, _SecondType, _ThirdType]], KindN[_IOResultLikeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Composes successful container with a function that returns a container. In other words, it modifies the function's signature from: ``a -> IOResult[b, c]`` to: ``Container[a, c] -> Container[b, c]`` .. code:: python >>> from returns.io import IOResult, IOSuccess >>> from returns.context import RequiresContextIOResult >>> from returns.pointfree import bind_ioresult >>> def returns_ioresult(arg: int) -> IOResult[int, str]: ... return IOSuccess(arg + 1) >>> bound = bind_ioresult(returns_ioresult) >>> assert bound(IOSuccess(1)) == IOSuccess(2) >>> assert bound( ... RequiresContextIOResult.from_value(1), ... )(...) == IOSuccess(2) """ @kinded def factory( container: KindN[ _IOResultLikeKind, _FirstType, _SecondType, _ThirdType, ], ) -> KindN[_IOResultLikeKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_ioresult(function) return factory returns-0.24.0/returns/pointfree/bind_optional.py000066400000000000000000000033151472312074000221710ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.specific.maybe import MaybeLikeN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _MaybeLikeKind = TypeVar('_MaybeLikeKind', bound=MaybeLikeN) def bind_optional( function: Callable[[_FirstType], _UpdatedType | None], ) -> Kinded[Callable[ [KindN[_MaybeLikeKind, _FirstType, _SecondType, _ThirdType]], KindN[_MaybeLikeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Binds a function returning optional value over a container. In other words, it modifies the function's signature from: ``a -> Optional[b]`` to: ``Container[a] -> Container[b]`` .. code:: python >>> from typing import Optional >>> from returns.pointfree import bind_optional >>> from returns.maybe import Some, Nothing >>> def example(argument: int) -> Optional[int]: ... return argument + 1 if argument > 0 else None >>> assert bind_optional(example)(Some(1)) == Some(2) >>> assert bind_optional(example)(Some(0)) == Nothing >>> assert bind_optional(example)(Nothing) == Nothing Note, that this function works for all containers with ``.bind_optional`` method. See :class:`returns.primitives.interfaces.specific.maybe._MaybeLikeKind` for more info. """ @kinded def factory( container: KindN[_MaybeLikeKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_MaybeLikeKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_optional(function) return factory returns-0.24.0/returns/pointfree/bind_result.py000066400000000000000000000033431472312074000216630ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, TypeVar from returns.interfaces.specific.result import ResultLikeN from returns.primitives.hkt import Kinded, KindN, kinded if TYPE_CHECKING: from returns.result import Result # noqa: WPS433 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _ResultLikeKind = TypeVar('_ResultLikeKind', bound=ResultLikeN) def bind_result( function: Callable[[_FirstType], Result[_UpdatedType, _SecondType]], ) -> Kinded[Callable[ [KindN[_ResultLikeKind, _FirstType, _SecondType, _ThirdType]], KindN[_ResultLikeKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Composes successful container with a function that returns a container. In other words, it modifies the function's signature from: ``a -> Result[b, c]`` to: ``Container[a, c] -> Container[b, c]`` .. code:: python >>> from returns.io import IOSuccess >>> from returns.context import RequiresContextResult >>> from returns.result import Result, Success >>> from returns.pointfree import bind_result >>> def returns_result(arg: int) -> Result[int, str]: ... return Success(arg + 1) >>> bound = bind_result(returns_result) >>> assert bound(IOSuccess(1)) == IOSuccess(2) >>> assert bound(RequiresContextResult.from_value(1))(...) == Success(2) """ @kinded def factory( container: KindN[_ResultLikeKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_ResultLikeKind, _UpdatedType, _SecondType, _ThirdType]: return container.bind_result(function) return factory returns-0.24.0/returns/pointfree/compose_result.py000066400000000000000000000034561472312074000224210ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.specific.ioresult import IOResultLikeN from returns.primitives.hkt import Kind3, Kinded, kinded from returns.result import Result _FirstType = TypeVar('_FirstType') _NewFirstType = TypeVar('_NewFirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _IOResultLikeKind = TypeVar('_IOResultLikeKind', bound=IOResultLikeN) def compose_result( function: Callable[ [Result[_FirstType, _SecondType]], Kind3[_IOResultLikeKind, _NewFirstType, _SecondType, _ThirdType], ], ) -> Kinded[Callable[ [Kind3[_IOResultLikeKind, _FirstType, _SecondType, _ThirdType]], Kind3[_IOResultLikeKind, _NewFirstType, _SecondType, _ThirdType], ]]: """ Composes inner ``Result`` with ``IOResultLike`` returning function. Can be useful when you need an access to both states of the result. .. code:: python >>> from returns.io import IOResult, IOSuccess, IOFailure >>> from returns.pointfree import compose_result >>> from returns.result import Result >>> def modify_string(container: Result[str, str]) -> IOResult[str, str]: ... return IOResult.from_result( ... container.map(str.upper).alt(str.lower), ... ) >>> assert compose_result(modify_string)( ... IOSuccess('success') ... ) == IOSuccess('SUCCESS') >>> assert compose_result(modify_string)( ... IOFailure('FAILURE') ... ) == IOFailure('failure') """ @kinded def factory( container: Kind3[ _IOResultLikeKind, _FirstType, _SecondType, _ThirdType, ], ) -> Kind3[_IOResultLikeKind, _NewFirstType, _SecondType, _ThirdType]: return container.compose_result(function) return factory returns-0.24.0/returns/pointfree/cond.py000066400000000000000000000042641472312074000202770ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar, overload from returns.context import NoDeps from returns.interfaces.failable import DiverseFailableN, SingleFailableN from returns.methods.cond import internal_cond from returns.primitives.hkt import Kinded, KindN _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') _DiverseFailableKind = TypeVar('_DiverseFailableKind', bound=DiverseFailableN) _SingleFailableKind = TypeVar('_SingleFailableKind', bound=SingleFailableN) @overload def cond( container_type: type[_SingleFailableKind], success_value: _ValueType, ) -> Kinded[ Callable[ [bool], KindN[_SingleFailableKind, _ValueType, _ErrorType, NoDeps], ] ]: """Reduce the boilerplate when choosing paths with ``SingleFailableN``.""" @overload def cond( container_type: type[_DiverseFailableKind], success_value: _ValueType, error_value: _ErrorType, ) -> Kinded[ Callable[ [bool], KindN[_DiverseFailableKind, _ValueType, _ErrorType, NoDeps], ] ]: """Reduce the boilerplate when choosing paths with ``DiverseFailableN``.""" def cond( container_type: ( type[_SingleFailableKind] | type[_DiverseFailableKind] ), success_value: _ValueType, error_value: _ErrorType | None = None, ): """ Reduce the boilerplate when choosing paths. Works with ``SingleFailableN`` (e.g. ``Maybe``) and ``DiverseFailableN`` (e.g. ``Result``). Example using ``cond`` with the ``Result`` container: .. code:: python >>> from returns.pointfree import cond >>> from returns.result import Failure, Result, Success >>> assert cond(Result, 'success', 'failure')(True) == Success('success') >>> assert cond(Result, 'success', 'failure')(False) == Failure('failure') Example using ``cond`` with the ``Maybe`` container: .. code:: python >>> from returns.maybe import Maybe, Some, Nothing >>> assert cond(Maybe, 10.0)(True) == Some(10.0) >>> assert cond(Maybe, 10.0)(False) == Nothing """ def factory(is_success: bool): return internal_cond( container_type, is_success, success_value, error_value, ) return factory returns-0.24.0/returns/pointfree/lash.py000066400000000000000000000033211472312074000202740ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.lashable import LashableN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _LashableKind = TypeVar('_LashableKind', bound=LashableN) def lash( function: Callable[ [_SecondType], KindN[_LashableKind, _FirstType, _UpdatedType, _ThirdType], ], ) -> Kinded[Callable[ [KindN[_LashableKind, _FirstType, _SecondType, _ThirdType]], KindN[_LashableKind, _FirstType, _UpdatedType, _ThirdType], ]]: """ Turns function's input parameter from a regular value to a container. In other words, it modifies the function signature from: ``a -> Container[b]`` to: ``Container[a] -> Container[b]`` Similar to :func:`returns.pointfree.bind`, but works for failed containers. This is how it should be used: .. code:: python >>> from returns.pointfree import lash >>> from returns.result import Success, Failure, Result >>> def example(argument: int) -> Result[str, int]: ... return Success(argument + 1) >>> assert lash(example)(Success('a')) == Success('a') >>> assert lash(example)(Failure(1)) == Success(2) Note, that this function works for all containers with ``.lash`` method. See :class:`returns.interfaces.lashable.Lashable` for more info. """ @kinded def factory( container: KindN[_LashableKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_LashableKind, _FirstType, _UpdatedType, _ThirdType]: return container.lash(function) return factory returns-0.24.0/returns/pointfree/map.py000066400000000000000000000030101472312074000201150ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.mappable import MappableN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _MappableKind = TypeVar('_MappableKind', bound=MappableN) def map_( function: Callable[[_FirstType], _UpdatedType], ) -> Kinded[Callable[ [KindN[_MappableKind, _FirstType, _SecondType, _ThirdType]], KindN[_MappableKind, _UpdatedType, _SecondType, _ThirdType], ]]: """ Lifts function to be wrapped in a container for better composition. In other words, it modifies the function's signature from: ``a -> b`` to: ``Container[a] -> Container[b]`` This is how it should be used: .. code:: python >>> from returns.io import IO >>> from returns.pointfree import map_ >>> def example(argument: int) -> float: ... return argument / 2 >>> assert map_(example)(IO(1)) == IO(0.5) Note, that this function works for all containers with ``.map`` method. See :class:`returns.primitives.interfaces.mappable.MappableN` for more info. See also: - https://wiki.haskell.org/Lifting """ @kinded def factory( container: KindN[_MappableKind, _FirstType, _SecondType, _ThirdType], ) -> KindN[_MappableKind, _UpdatedType, _SecondType, _ThirdType]: return container.map(function) return factory returns-0.24.0/returns/pointfree/modify_env.py000066400000000000000000000056531472312074000215160ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.specific.reader import ReaderLike2, ReaderLike3 from returns.primitives.hkt import Kind2, Kind3, Kinded, kinded _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') _Reader2Kind = TypeVar('_Reader2Kind', bound=ReaderLike2) _Reader3Kind = TypeVar('_Reader3Kind', bound=ReaderLike3) def modify_env2( function: Callable[[_UpdatedType], _SecondType], ) -> Kinded[Callable[ [Kind2[_Reader2Kind, _FirstType, _SecondType]], Kind2[_Reader2Kind, _FirstType, _UpdatedType], ]]: """ Modifies the second type argument of a ``ReaderLike2``. In other words, it modifies the function's signature from: ``a -> b`` to: ``Container[x, a] -> Container[x, b]`` .. code:: python >>> from returns.pointfree import modify_env2 >>> from returns.context import RequiresContext >>> def multiply(arg: int) -> RequiresContext[int, int]: ... return RequiresContext(lambda deps: arg * deps) >>> assert modify_env2(int)(multiply(3))('4') == 12 Note, that this function works with only ``Kind2`` containers with ``.modify_env`` method. See :class:`returns.primitives.interfaces.specific.reader.ReaderLike2` for more info. """ @kinded def factory( container: Kind2[_Reader2Kind, _FirstType, _SecondType], ) -> Kind2[_Reader2Kind, _FirstType, _UpdatedType]: return container.modify_env(function) return factory def modify_env3( function: Callable[[_UpdatedType], _ThirdType], ) -> Kinded[Callable[ [Kind3[_Reader3Kind, _FirstType, _SecondType, _ThirdType]], Kind3[_Reader3Kind, _FirstType, _SecondType, _UpdatedType], ]]: """ Modifies the third type argument of a ``ReaderLike3``. In other words, it modifies the function's signature from: ``a -> b`` to: ``Container[x, a] -> Container[x, b]`` .. code:: python >>> from returns.pointfree import modify_env >>> from returns.context import RequiresContextResultE >>> from returns.result import Success, safe >>> def divide(arg: int) -> RequiresContextResultE[float, int]: ... return RequiresContextResultE(safe(lambda deps: arg / deps)) >>> assert modify_env(int)(divide(3))('2') == Success(1.5) >>> assert modify_env(int)(divide(3))('0').failure() Note, that this function works with only ``Kind3`` containers with ``.modify_env`` method. See :class:`returns.primitives.interfaces.specific.reader.ReaderLike3` for more info. """ @kinded def factory( container: Kind3[_Reader3Kind, _FirstType, _SecondType, _ThirdType], ) -> Kind3[_Reader3Kind, _FirstType, _SecondType, _UpdatedType]: return container.modify_env(function) return factory #: Useful alias for :func:`~modify_env3`. modify_env = modify_env3 returns-0.24.0/returns/pointfree/unify.py000066400000000000000000000041471472312074000205060ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.interfaces.failable import DiverseFailableN from returns.primitives.hkt import Kinded, KindN, kinded _FirstType = TypeVar('_FirstType') _NewFirstType = TypeVar('_NewFirstType') _SecondType = TypeVar('_SecondType') _NewSecondType = TypeVar('_NewSecondType') _ThirdType = TypeVar('_ThirdType') _NewThirdType = TypeVar('_NewThirdType') _DiverseFailableKind = TypeVar('_DiverseFailableKind', bound=DiverseFailableN) def unify( # noqa: WPS234 function: Callable[ [_FirstType], KindN[ _DiverseFailableKind, _NewFirstType, _NewSecondType, _NewThirdType, ], ], ) -> Kinded[ Callable[ [KindN[_DiverseFailableKind, _FirstType, _SecondType, _ThirdType]], KindN[ _DiverseFailableKind, _NewFirstType, _SecondType | _NewSecondType, _NewThirdType, ], ] ]: """ Composes successful container with a function that returns a container. Similar to :func:`~returns.pointfree.bind` but has different type. It returns ``Result[ValueType, Union[OldErrorType, NewErrorType]]`` instead of ``Result[ValueType, OldErrorType]``. So, it can be more useful in some situations. Probably with specific exceptions. .. code:: python >>> from returns.methods import cond >>> from returns.pointfree import unify >>> from returns.result import Result, Success, Failure >>> def bindable(arg: int) -> Result[int, int]: ... return cond(Result, arg % 2 == 0, arg + 1, arg - 1) >>> assert unify(bindable)(Success(2)) == Success(3) >>> assert unify(bindable)(Success(1)) == Failure(0) >>> assert unify(bindable)(Failure(42)) == Failure(42) """ @kinded def factory( container: KindN[ _DiverseFailableKind, _FirstType, _SecondType, _ThirdType, ], ) -> KindN[ _DiverseFailableKind, _NewFirstType, _SecondType | _NewSecondType, _NewThirdType, ]: return container.bind(function) # type: ignore return factory returns-0.24.0/returns/primitives/000077500000000000000000000000001472312074000171745ustar00rootroot00000000000000returns-0.24.0/returns/primitives/__init__.py000066400000000000000000000000001472312074000212730ustar00rootroot00000000000000returns-0.24.0/returns/primitives/asserts.py000066400000000000000000000025501472312074000212340ustar00rootroot00000000000000def assert_equal( first, second, *, deps=None, backend: str = 'asyncio', ) -> None: """ Custom ``assert`` function to compare two any containers. The important note here is that this ``assert`` should probably used in tests. Not real application code. It will call all ``Reader`` based containers and ``await`` all ``Future`` based ones. It also works recursively. For example, ``ReaderFutureResult`` will be called and then awaited. You can specify different dependencies to call your containers. And different backends to ``await`` then using ``anyio``. By the way, ``anyio`` should be installed separately. """ assert _convert( first, deps=deps, backend=backend, ) == _convert( second, deps=deps, backend=backend, ), '{0} == {1}'.format(first, second) def _convert(container, *, deps, backend: str): from returns.interfaces.specific import future, reader if isinstance(container, future.AwaitableFutureN): import anyio return _convert( anyio.run(container.awaitable, backend=backend), deps=deps, backend=backend, ) elif isinstance(container, reader.Contextable): return _convert( container(deps), deps=deps, backend=backend, ) return container returns-0.24.0/returns/primitives/container.py000066400000000000000000000053021472312074000215300ustar00rootroot00000000000000from abc import ABCMeta from typing import Any, TypeVar from typing_extensions import TypedDict from returns.interfaces.equable import Equable from returns.primitives.hkt import Kind1 from returns.primitives.types import Immutable _EqualType = TypeVar('_EqualType', bound=Equable) class _PickleState(TypedDict): """Dict to represent the `BaseContainer` state to be pickled.""" # TODO: Remove `__slots__` from here when `slotscheck` allow ignore classes # by using comments. We don't need the slots here since this class is just # a representation of a dictionary and should not be instantiated by any # means. # See: https://github.com/ariebovenberg/slotscheck/issues/71 __slots__ = ('container_value',) # type: ignore container_value: Any class BaseContainer(Immutable, metaclass=ABCMeta): """Utility class to provide all needed magic methods to the context.""" __slots__ = ('_inner_value',) _inner_value: Any def __init__(self, inner_value) -> None: """ Wraps the given value in the Container. 'value' is any arbitrary value of any type including functions. """ object.__setattr__(self, '_inner_value', inner_value) # noqa: WPS609 def __repr__(self) -> str: """Used to display details of object.""" return '<{0}: {1}>'.format( self.__class__.__qualname__.strip('_'), str(self._inner_value), ) def __eq__(self, other: Any) -> bool: """Used to compare two 'Container' objects.""" return container_equality(self, other) # type: ignore def __hash__(self) -> int: """Used to use this value as a key.""" return hash(self._inner_value) def __getstate__(self) -> _PickleState: """That's how this object will be pickled.""" return {'container_value': self._inner_value} # type: ignore def __setstate__(self, state: _PickleState | Any) -> None: """Loading state from pickled data.""" if isinstance(state, dict) and 'container_value' in state: object.__setattr__( # noqa: WPS609 self, '_inner_value', state['container_value'], ) else: # backward compatibility with 0.19.0 and earlier object.__setattr__(self, '_inner_value', state) # noqa: WPS609 def container_equality( self: Kind1[_EqualType, Any], other: Kind1[_EqualType, Any], ) -> bool: """ Function to compare similar containers. Compares both their types and their inner values. """ if type(self) != type(other): # noqa: WPS516, E721 return False return bool( self._inner_value == other._inner_value, # type: ignore # noqa: WPS437 ) returns-0.24.0/returns/primitives/exceptions.py000066400000000000000000000020671472312074000217340ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING if TYPE_CHECKING: from returns.interfaces.unwrappable import Unwrappable # noqa: WPS433 class UnwrapFailedError(Exception): """Raised when a container can not be unwrapped into a meaningful value.""" __slots__ = ('halted_container',) def __init__(self, container: Unwrappable) -> None: """ Saves halted container in the inner state. So, this container can later be unpacked from this exception and used as a regular value. """ super().__init__() self.halted_container = container class ImmutableStateError(AttributeError): """ Raised when a container is forced to be mutated. It is a sublclass of ``AttributeError`` for two reasons: 1. It seems kinda reasonable to expect ``AttributeError`` on attribute modification 2. It is used inside ``typing.py`` this way, we do have several typing features that requires that behaviour See: https://github.com/dry-python/returns/issues/394 """ returns-0.24.0/returns/primitives/hkt.py000066400000000000000000000204771472312074000203460ustar00rootroot00000000000000from collections.abc import Callable from typing import TYPE_CHECKING, Any, Generic, Protocol, TypeVar from typing_extensions import Never, TypeVarTuple, Unpack _InstanceType = TypeVar('_InstanceType', covariant=True) _TypeArgType1 = TypeVar('_TypeArgType1', covariant=True) _TypeArgType2 = TypeVar('_TypeArgType2', covariant=True) _TypeArgType3 = TypeVar('_TypeArgType3', covariant=True) _FunctionDefType = TypeVar( '_FunctionDefType', bound=Callable, covariant=True, # This is a must! Otherwise it would not work. ) _FunctionType = TypeVar( '_FunctionType', bound=Callable, ) _UpdatedType = TypeVar('_UpdatedType') _FirstKind = TypeVar('_FirstKind') _SecondKind = TypeVar('_SecondKind') _TypeVars = TypeVarTuple('_TypeVars') class KindN(Generic[_InstanceType, Unpack[_TypeVars]]): """ Emulation support for Higher Kinded Types. Consider ``KindN`` to be an alias of ``Generic`` type. But with some extra goodies. ``KindN`` is the top-most type for other ``Kind`` types like ``Kind1``, ``Kind2``, ``Kind3``, etc. The only difference between them is how many type arguments they can hold. ``Kind1`` can hold just two type arguments: ``Kind1[IO, int]`` which is almost equals to ``IO[int]``. ``Kind2`` can hold just two type arguments: ``Kind2[IOResult, int, str]`` which is almost equals to ``IOResult[int, str]``. And so on. The idea behind ``KindN`` is that one cannot write this code: .. code:: python from typing import TypeVar T = TypeVar('T') V = TypeVar('V') def impossible(generic: T, value: V) -> T[V]: return generic(value) But, with ``KindN`` this becomes possible in a form of ``Kind1[T, V]``. .. note:: To make sure it works correctly, your type has to be a subtype of ``KindN``. We use a custom ``mypy`` plugin to make sure types are correct. Otherwise, it is currently impossible to properly type this. We use "emulated Higher Kinded Types" concept. Read the whitepaper: https://bit.ly/2ABACx2 ``KindN`` does not exist in runtime. It is used just for typing. There are (and must be) no instances of this type directly. .. rubric:: Implementation details We didn't use ``ABCMeta`` to disallow its creation, because we don't want to have a possible metaclass conflict with other metaclasses. Current API allows you to mix ``KindN`` anywhere. We allow ``_InstanceType`` of ``KindN`` to be ``Instance`` type or ``TypeVarType`` with ``bound=...``. See also: - https://arrow-kt.io/docs/0.10/patterns/glossary/#higher-kinds - https://github.com/gcanti/fp-ts/blob/master/docs/guides/HKT.md - https://bow-swift.io/docs/fp-concepts/higher-kinded-types - https://github.com/pelotom/hkts """ __slots__ = () if TYPE_CHECKING: # noqa: WPS604 # pragma: no cover def __getattr__(self, attrname: str): """ This function is required for ``get_attribute_hook`` in mypy plugin. It is never called in real-life, because ``KindN`` is abstract. It only exists during the type-checking phase. """ #: Type alias for kinds with one type argument. Kind1 = KindN[_InstanceType, _TypeArgType1, Any, Any] #: Type alias for kinds with two type arguments. Kind2 = KindN[_InstanceType, _TypeArgType1, _TypeArgType2, Any] #: Type alias for kinds with three type arguments. Kind3 = KindN[_InstanceType, _TypeArgType1, _TypeArgType2, _TypeArgType3] class SupportsKindN(KindN[_InstanceType, Unpack[_TypeVars]]): """ Base class for your containers. Notice, that we use ``KindN`` / ``Kind1`` to annotate values, but we use ``SupportsKindN`` / ``SupportsKind1`` to inherit from. .. rubric:: Implementation details The only thing this class does is: making sure that the resulting classes won't have ``__getattr__`` available during the typechecking phase. Needless to say, that ``__getattr__`` during runtime - never exists at all. """ __slots__ = () __getattr__: None # type: ignore #: Type alias used for inheritance with one type argument. SupportsKind1 = SupportsKindN[ _InstanceType, _TypeArgType1, Never, Never, ] #: Type alias used for inheritance with two type arguments. SupportsKind2 = SupportsKindN[ _InstanceType, _TypeArgType1, _TypeArgType2, Never, ] #: Type alias used for inheritance with three type arguments. SupportsKind3 = SupportsKindN[ _InstanceType, _TypeArgType1, _TypeArgType2, _TypeArgType3, ] def dekind( kind: KindN[_InstanceType, _TypeArgType1, _TypeArgType2, _TypeArgType3], ) -> _InstanceType: """ Turns ``Kind1[IO, int]`` type into real ``IO[int]`` type. Should be used when you are left with accidental ``KindN`` instance when you really want to have the real type. Works with type arguments of any length. We use a custom ``mypy`` plugin to make sure types are correct. Otherwise, it is currently impossible to properly type this. In runtime it just returns the passed argument, nothing really happens: .. code:: python >>> from returns.io import IO >>> from returns.primitives.hkt import Kind1 >>> container: Kind1[IO, int] = IO(1) >>> assert dekind(container) is container However, please, do not use this function unless you know exactly what you are doing and why do you need it. """ return kind # type: ignore # Utils to define kinded functions # ================================ # TODO: in the future we would be able to write a custom plugin # with `transform_kind(T) -> T'` support. # It would visit all the possible `KindN[]` types in any type and run `dekind` # on them, so this will be how it works: # in: => Callable[[KindN[IO[Any], int]], KindN[IO[Any], str]] # out: => Callable[[IO[int]], IO[str]] # This will allow to have better support for callable protocols and similar. # Blocked by: https://github.com/python/mypy/issues/9001 class Kinded(Protocol[_FunctionDefType]): # type: ignore """ Protocol that tracks kinded functions calls. We use a custom ``mypy`` plugin to make sure types are correct. Otherwise, it is currently impossible to properly type this. """ __slots__ = () #: Used to translate `KindN` into real types. __call__: _FunctionDefType def __get__( self, instance: _UpdatedType, type_, ) -> Callable[..., _UpdatedType]: """Used to decorate and properly analyze method calls.""" def kinded(function: _FunctionType) -> Kinded[_FunctionType]: """ Decorator to be used when you want to dekind the function's return type. Does nothing in runtime, just returns its argument. We use a custom ``mypy`` plugin to make sure types are correct. Otherwise, it is currently impossible to properly type this. Here's an example of how it should be used: .. code:: python >>> from typing import TypeVar >>> from returns.primitives.hkt import KindN, kinded >>> from returns.interfaces.bindable import BindableN >>> _Binds = TypeVar('_Binds', bound=BindableN) # just an example >>> _Type1 = TypeVar('_Type1') >>> _Type2 = TypeVar('_Type2') >>> _Type3 = TypeVar('_Type3') >>> @kinded ... def bindable_identity( ... container: KindN[_Binds, _Type1, _Type2, _Type3], ... ) -> KindN[_Binds, _Type1, _Type2, _Type3]: ... return container # just do nothing As you can see, here we annotate our return type as ``-> KindN[_Binds, _Type1, _Type2, _Type3]``, it would be true without ``@kinded`` decorator. But, ``@kinded`` decorator dekinds the return type and infers the real type behind it: .. code:: python >>> from returns.io import IO, IOResult >>> assert bindable_identity(IO(1)) == IO(1) >>> # => Revealed type: 'IO[int]' >>> iores: IOResult[int, str] = IOResult.from_value(1) >>> assert bindable_identity(iores) == iores >>> # => Revealed type: 'IOResult[int, str]' The difference is very clear in ``methods`` modules, like: - Raw :func:`returns.methods.bind.internal_bind` that returns ``KindN`` instance - User-facing :func:`returns.methods.bind.bind` that returns the container type You must use this decorator for your own kinded functions as well. """ return function # type: ignore returns-0.24.0/returns/primitives/laws.py000066400000000000000000000073021472312074000205160ustar00rootroot00000000000000from collections.abc import Callable, Sequence from typing import ClassVar, Generic, TypeVar, final from returns.primitives.types import Immutable _Caps = TypeVar('_Caps') _ReturnType = TypeVar('_ReturnType') _TypeArgType1 = TypeVar('_TypeArgType1') _TypeArgType2 = TypeVar('_TypeArgType2') _TypeArgType3 = TypeVar('_TypeArgType3') #: Special alias to define laws as functions even inside a class law_definition = staticmethod class Law(Immutable): """ Base class for all laws. Does not have an attached signature. Should not be used directly. Use ``Law1``, ``Law2`` or ``Law3`` instead. """ __slots__ = ('definition', ) #: Function used to define this law. definition: Callable def __init__(self, function) -> None: """Saves function to the inner state.""" object.__setattr__(self, 'definition', function) # noqa: WPS609 @final @property def name(self) -> str: """Returns a name of the given law. Basically a name of the function.""" return self.definition.__name__ @final class Law1( Law, Generic[_TypeArgType1, _ReturnType], ): """Law definition for functions with a single argument.""" __slots__ = () definition: Callable[['Law1', _TypeArgType1], _ReturnType] def __init__( self, function: Callable[[_TypeArgType1], _ReturnType], ) -> None: """Saves function of one argument to the inner state.""" super().__init__(function) @final class Law2( Law, Generic[_TypeArgType1, _TypeArgType2, _ReturnType], ): """Law definition for functions with two arguments.""" __slots__ = () definition: Callable[['Law2', _TypeArgType1, _TypeArgType2], _ReturnType] def __init__( self, function: Callable[[_TypeArgType1, _TypeArgType2], _ReturnType], ) -> None: """Saves function of two arguments to the inner state.""" super().__init__(function) @final class Law3( Law, Generic[_TypeArgType1, _TypeArgType2, _TypeArgType3, _ReturnType], ): """Law definition for functions with three argument.""" __slots__ = () definition: Callable[ ['Law3', _TypeArgType1, _TypeArgType2, _TypeArgType3], _ReturnType, ] def __init__( self, function: Callable[ [_TypeArgType1, _TypeArgType2, _TypeArgType3], _ReturnType, ], ) -> None: """Saves function of three arguments to the inner state.""" super().__init__(function) class Lawful(Generic[_Caps]): """ Base class for all lawful classes. Allows to smartly collect all defined laws from all parent classes. """ __slots__ = () #: Some classes and interfaces might have laws, some might not have any. _laws: ClassVar[Sequence[Law]] @final # noqa: WPS210 @classmethod def laws(cls) -> dict[type['Lawful'], Sequence[Law]]: # noqa: WPS210 """ Collects all laws from all parent classes. Algorithm: 1. First, we collect all unique parents in ``__mro__`` 2. Then we get the laws definition from each of them 3. Then we structure them in a ``type: its_laws`` way """ seen = { '{0}.{1}'.format( parent.__module__, # noqa: WPS609 parent.__qualname__, ): parent for parent in cls.__mro__ } laws = {} for klass in seen.values(): current_laws = klass.__dict__.get('_laws', ()) # noqa: WPS609 if not current_laws: continue laws[klass] = current_laws return laws class LawSpecDef: """Base class for all collection of laws aka LawSpecs.""" __slots__ = () returns-0.24.0/returns/primitives/reawaitable.py000066400000000000000000000071241472312074000220320ustar00rootroot00000000000000from collections.abc import Awaitable, Callable, Generator from typing import NewType, TypeVar, cast, final _ValueType = TypeVar('_ValueType') _FunctionCoroType = TypeVar('_FunctionCoroType', bound=Callable[..., Awaitable]) _Sentinel = NewType('_Sentinel', object) _sentinel: _Sentinel = cast(_Sentinel, object()) @final class ReAwaitable: """ Allows to write coroutines that can be awaited multiple times. It works by actually caching the ``await`` result and reusing it. So, in reality we still ``await`` once, but pretending to do it multiple times. Why is that required? Because otherwise, ``Future`` containers would be unusable: .. code:: python >>> import anyio >>> from returns.future import Future >>> from returns.io import IO >>> async def example(arg: int) -> int: ... return arg >>> instance = Future(example(1)) >>> two = instance.map(lambda x: x + 1) >>> zero = instance.map(lambda x: x - 1) >>> assert anyio.run(two.awaitable) == IO(2) >>> assert anyio.run(zero.awaitable) == IO(0) In this example we ``await`` our ``Future`` twice. It happens in each ``.map`` call. Without this class (that is used inside ``Future``) it would result in ``RuntimeError: cannot reuse already awaited coroutine``. We try to make this type transparent. It should not actually be visible to any of its users. """ __slots__ = ('_coro', '_cache') def __init__(self, coro: Awaitable[_ValueType]) -> None: """We need just an awaitable to work with.""" self._coro = coro self._cache: _ValueType | _Sentinel = _sentinel def __await__(self) -> Generator[None, None, _ValueType]: """ Allows to use ``await`` multiple times. .. code:: python >>> import anyio >>> from returns.primitives.reawaitable import ReAwaitable >>> async def say_hello() -> str: ... return 'Hello' >>> async def main(): ... instance = ReAwaitable(say_hello()) ... print(await instance) ... print(await instance) ... print(await instance) >>> anyio.run(main) Hello Hello Hello """ return self._awaitable().__await__() # noqa: WPS609 def __repr__(self) -> str: """ Formats this type the same way as the coroutine underneath. .. code:: python >>> from returns.primitives.reawaitable import ReAwaitable >>> async def test() -> int: ... return 1 >>> assert repr(test) == repr(ReAwaitable(test)) >>> repr(ReAwaitable(test)) '' """ return repr(self._coro) async def _awaitable(self) -> _ValueType: """Caches the once awaited value forever.""" if self._cache is _sentinel: self._cache = await self._coro return self._cache # type: ignore def reawaitable(coro: _FunctionCoroType) -> _FunctionCoroType: """ Allows to decorate coroutine functions to be awaitable multiple times. .. code:: python >>> import anyio >>> from returns.primitives.reawaitable import reawaitable >>> @reawaitable ... async def return_int() -> int: ... return 1 >>> async def main(): ... instance = return_int() ... return await instance + await instance + await instance >>> assert anyio.run(main) == 3 """ return lambda *args, **kwargs: ReAwaitable( # type: ignore coro(*args, **kwargs), ) returns-0.24.0/returns/primitives/tracing.py000066400000000000000000000061301472312074000211750ustar00rootroot00000000000000import types from collections.abc import Callable, Iterator from contextlib import contextmanager from inspect import FrameInfo, stack from typing import ContextManager, TypeVar, overload from returns.result import Failure _FunctionType = TypeVar('_FunctionType', bound=Callable) @overload def collect_traces() -> ContextManager[None]: """Context Manager to active traces collect to the Failures.""" @overload def collect_traces(function: _FunctionType) -> _FunctionType: """Decorator to active traces collect to the Failures.""" def collect_traces( function: _FunctionType | None = None, ) -> _FunctionType | ContextManager[None]: # noqa: DAR101, DAR201, DAR301 """ Context Manager/Decorator to active traces collect to the Failures. .. code:: python >>> from inspect import FrameInfo >>> from returns.io import IOResult >>> from returns.result import Result >>> from returns.primitives.tracing import collect_traces >>> with collect_traces(): ... traced_failure = Result.from_failure('Traced Failure') >>> non_traced_failure = IOResult.from_failure('Non Traced Failure') >>> assert non_traced_failure.trace is None >>> assert isinstance(traced_failure.trace, list) >>> assert all(isinstance(trace_line, FrameInfo) for trace_line in traced_failure.trace) >>> for trace_line in traced_failure.trace: ... print( # doctest: +SKIP ... '{0}:{1} in `{2}`'.format( ... trace_line.filename, ... trace_line.lineno, ... trace_line.function, ... ), ... ) ... /returns/returns/result.py:525 in `Failure` /returns/returns/result.py:322 in `from_failure` /example_folder/example.py:1 in `` # doctest: # noqa: DAR301, E501 """ @contextmanager def factory() -> Iterator[None]: unpatched_get_trace = getattr(Failure, '_get_trace') # noqa: B009 substitute_get_trace = types.MethodType(_get_trace, Failure) setattr(Failure, '_get_trace', substitute_get_trace) # noqa: B010 try: # noqa: WPS501 yield finally: setattr(Failure, '_get_trace', unpatched_get_trace) # noqa: B010 return factory()(function) if function else factory() def _get_trace(_self: Failure) -> list[FrameInfo] | None: """ Function to be used on Monkey Patching. This function is the substitute for '_get_trace' method from ``Failure`` class on Monkey Patching promoted by :func:`returns.primitives.tracing.collect_traces` function. We get all the call stack from the current call and return it from the third position, to avoid two useless calls on the call stack. Those useless calls are a call to this function and a call to `__init__` method from ``Failure`` class. We're just interested in the call stack ending on ``Failure`` function call! See also: - https://github.com/dry-python/returns/issues/409 """ current_stack = stack() return current_stack[2:] returns-0.24.0/returns/primitives/types.py000066400000000000000000000024151472312074000207140ustar00rootroot00000000000000from typing import Any from typing_extensions import Never, Self from returns.primitives.exceptions import ImmutableStateError class Immutable: """ Helper type for objects that should be immutable. When applied, each instance becomes immutable. Nothing can be added or deleted from it. .. code:: pycon :force: >>> from returns.primitives.types import Immutable >>> class MyModel(Immutable): ... ... >>> model = MyModel() >>> model.prop = 1 Traceback (most recent call last): ... returns.primitives.exceptions.ImmutableStateError See :class:`returns.primitives.container.BaseContainer` for examples. """ # noqa: RST307 __slots__ = () def __copy__(self) -> Self: """Returns itself.""" return self def __deepcopy__(self, memo: dict[Any, Any]) -> Self: """Returns itself.""" return self def __setattr__(self, attr_name: str, attr_value: Any) -> Never: """Makes inner state of the containers immutable for modification.""" raise ImmutableStateError() def __delattr__(self, attr_name: str) -> Never: # noqa: WPS603 """Makes inner state of the containers immutable for deletion.""" raise ImmutableStateError() returns-0.24.0/returns/py.typed000066400000000000000000000000001472312074000164660ustar00rootroot00000000000000returns-0.24.0/returns/result.py000066400000000000000000000424671472312074000167060ustar00rootroot00000000000000from abc import ABCMeta from collections.abc import Callable, Generator, Iterator from functools import wraps from inspect import FrameInfo from typing import TYPE_CHECKING, Any, TypeAlias, TypeVar, final, overload from typing_extensions import Never, ParamSpec from returns.interfaces.specific import result from returns.primitives.container import BaseContainer, container_equality from returns.primitives.exceptions import UnwrapFailedError from returns.primitives.hkt import Kind2, SupportsKind2 # Definitions: _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') _FirstType = TypeVar('_FirstType') _FuncParams = ParamSpec('_FuncParams') class Result( # type: ignore[type-var] BaseContainer, SupportsKind2['Result', _ValueType, _ErrorType], result.ResultBased2[_ValueType, _ErrorType], metaclass=ABCMeta, ): """ Base class for :class:`~Failure` and :class:`~Success`. :class:`~Result` does not have a public constructor. Use :func:`~Success` and :func:`~Failure` to construct the needed values. See also: - https://bit.ly/361qQhi - https://hackernoon.com/the-throw-keyword-was-a-mistake-l9e532di """ __slots__ = ('_trace',) __match_args__ = ('_inner_value',) _inner_value: _ValueType | _ErrorType _trace: list[FrameInfo] | None #: Typesafe equality comparison with other `Result` objects. equals = container_equality @property def trace(self) -> list[FrameInfo] | None: """Returns a list with stack trace when :func:`~Failure` was called.""" return self._trace def swap(self) -> 'Result[_ErrorType, _ValueType]': """ Swaps value and error types. So, values become errors and errors become values. It is useful when you have to work with errors a lot. And since we have a lot of ``.bind_`` related methods and only a single ``.lash`` - it is easier to work with values. .. code:: python >>> from returns.result import Success, Failure >>> assert Success(1).swap() == Failure(1) >>> assert Failure(1).swap() == Success(1) """ def map( self, function: Callable[[_ValueType], _NewValueType], ) -> 'Result[_NewValueType, _ErrorType]': """ Composes successful container with a pure function. .. code:: python >>> from returns.result import Failure, Success >>> def mappable(string: str) -> str: ... return string + 'b' >>> assert Success('a').map(mappable) == Success('ab') >>> assert Failure('a').map(mappable) == Failure('a') """ def apply( self, container: Kind2[ 'Result', Callable[[_ValueType], _NewValueType], _ErrorType, ], ) -> 'Result[_NewValueType, _ErrorType]': """ Calls a wrapped function in a container on this container. .. code:: python >>> from returns.result import Failure, Success >>> def appliable(string: str) -> str: ... return string + 'b' >>> assert Success('a').apply(Success(appliable)) == Success('ab') >>> assert Failure('a').apply(Success(appliable)) == Failure('a') >>> assert Success('a').apply(Failure(1)) == Failure(1) >>> assert Failure(1).apply(Failure(2)) == Failure(1) """ def bind( self, function: Callable[ [_ValueType], Kind2['Result', _NewValueType, _ErrorType], ], ) -> 'Result[_NewValueType, _ErrorType]': """ Composes successful container with a function that returns a container. .. code:: python >>> from returns.result import Result, Success, Failure >>> def bindable(arg: str) -> Result[str, str]: ... if len(arg) > 1: ... return Success(arg + 'b') ... return Failure(arg + 'c') >>> assert Success('aa').bind(bindable) == Success('aab') >>> assert Success('a').bind(bindable) == Failure('ac') >>> assert Failure('a').bind(bindable) == Failure('a') """ #: Alias for `bind_result` method, it is the same as `bind` here. bind_result = bind def alt( self, function: Callable[[_ErrorType], _NewErrorType], ) -> 'Result[_ValueType, _NewErrorType]': """ Composes failed container with a pure function to modify failure. .. code:: python >>> from returns.result import Failure, Success >>> def altable(arg: str) -> str: ... return arg + 'b' >>> assert Success('a').alt(altable) == Success('a') >>> assert Failure('a').alt(altable) == Failure('ab') """ def lash( self, function: Callable[ [_ErrorType], Kind2['Result', _ValueType, _NewErrorType], ], ) -> 'Result[_ValueType, _NewErrorType]': """ Composes failed container with a function that returns a container. .. code:: python >>> from returns.result import Result, Success, Failure >>> def lashable(arg: str) -> Result[str, str]: ... if len(arg) > 1: ... return Success(arg + 'b') ... return Failure(arg + 'c') >>> assert Success('a').lash(lashable) == Success('a') >>> assert Failure('a').lash(lashable) == Failure('ac') >>> assert Failure('aa').lash(lashable) == Success('aab') """ def __iter__(self) -> Iterator[_ValueType]: """API for :ref:`do-notation`.""" yield self.unwrap() @classmethod def do( cls, expr: Generator[_NewValueType, None, None], ) -> 'Result[_NewValueType, _NewErrorType]': """ Allows working with unwrapped values of containers in a safe way. .. code:: python >>> from returns.result import Result, Failure, Success >>> assert Result.do( ... first + second ... for first in Success(2) ... for second in Success(3) ... ) == Success(5) >>> assert Result.do( ... first + second ... for first in Failure('a') ... for second in Success(3) ... ) == Failure('a') See :ref:`do-notation` to learn more. This feature requires our :ref:`mypy plugin `. """ try: return Result.from_value(next(expr)) except UnwrapFailedError as exc: return exc.halted_container # type: ignore def value_or( self, default_value: _NewValueType, ) -> _ValueType | _NewValueType: """ Get value or default value. .. code:: python >>> from returns.result import Failure, Success >>> assert Success(1).value_or(2) == 1 >>> assert Failure(1).value_or(2) == 2 """ def unwrap(self) -> _ValueType: """ Get value or raise exception. .. code:: pycon :force: >>> from returns.result import Failure, Success >>> assert Success(1).unwrap() == 1 >>> Failure(1).unwrap() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError """ # noqa: RST307 def failure(self) -> _ErrorType: """ Get failed value or raise exception. .. code:: pycon :force: >>> from returns.result import Failure, Success >>> assert Failure(1).failure() == 1 >>> Success(1).failure() Traceback (most recent call last): ... returns.primitives.exceptions.UnwrapFailedError """ # noqa: RST307 @classmethod def from_value( cls, inner_value: _NewValueType, ) -> 'Result[_NewValueType, Any]': """ One more value to create success unit values. It is useful as a united way to create a new value from any container. .. code:: python >>> from returns.result import Result, Success >>> assert Result.from_value(1) == Success(1) You can use this method or :func:`~Success`, choose the most convenient for you. """ return Success(inner_value) @classmethod def from_failure( cls, inner_value: _NewErrorType, ) -> 'Result[Any, _NewErrorType]': """ One more value to create failure unit values. It is useful as a united way to create a new value from any container. .. code:: python >>> from returns.result import Result, Failure >>> assert Result.from_failure(1) == Failure(1) You can use this method or :func:`~Failure`, choose the most convenient for you. """ return Failure(inner_value) @classmethod def from_result( cls, inner_value: 'Result[_NewValueType, _NewErrorType]', ) -> 'Result[_NewValueType, _NewErrorType]': """ Creates a new ``Result`` instance from existing ``Result`` instance. .. code:: python >>> from returns.result import Result, Failure, Success >>> assert Result.from_result(Success(1)) == Success(1) >>> assert Result.from_result(Failure(1)) == Failure(1) This is a part of :class:`returns.interfaces.specific.result.ResultBasedN` interface. """ return inner_value @final # noqa: WPS338 class Failure(Result[Any, _ErrorType]): # noqa: WPS338 """ Represents a calculation which has failed. It should contain an error code or message. """ __slots__ = () _inner_value: _ErrorType def __init__(self, inner_value: _ErrorType) -> None: """Failure constructor.""" super().__init__(inner_value) object.__setattr__(self, '_trace', self._get_trace()) # noqa: WPS609 if not TYPE_CHECKING: # noqa: C901, WPS604 # pragma: no branch def alt(self, function): """Composes failed container with a pure function to modify failure.""" # noqa: E501 return Failure(function(self._inner_value)) def map(self, function): """Does nothing for ``Failure``.""" return self def bind(self, function): """Does nothing for ``Failure``.""" return self #: Alias for `bind` method. Part of the `ResultBasedN` interface. bind_result = bind def lash(self, function): """Composes this container with a function returning container.""" return function(self._inner_value) def apply(self, container): """Does nothing for ``Failure``.""" return self def value_or(self, default_value): """Returns default value for failed container.""" return default_value def swap(self): """Failures swap to :class:`Success`.""" return Success(self._inner_value) def unwrap(self) -> Never: """Raises an exception, since it does not have a value inside.""" if isinstance(self._inner_value, Exception): raise UnwrapFailedError(self) from self._inner_value raise UnwrapFailedError(self) def failure(self) -> _ErrorType: """Returns failed value.""" return self._inner_value def _get_trace(self) -> list[FrameInfo] | None: """Method that will be monkey patched when trace is active.""" @final class Success(Result[_ValueType, Any]): """ Represents a calculation which has succeeded and contains the result. Contains the computation value. """ __slots__ = () _inner_value: _ValueType def __init__(self, inner_value: _ValueType) -> None: """Success constructor.""" super().__init__(inner_value) if not TYPE_CHECKING: # noqa: C901, WPS604 # pragma: no branch def alt(self, function): """Does nothing for ``Success``.""" return self def map(self, function): """Composes current container with a pure function.""" return Success(function(self._inner_value)) def bind(self, function): """Binds current container to a function that returns container.""" return function(self._inner_value) #: Alias for `bind` method. Part of the `ResultBasedN` interface. bind_result = bind def lash(self, function): """Does nothing for ``Success``.""" return self def apply(self, container): """Calls a wrapped function in a container on this container.""" if isinstance(container, Success): return self.map(container.unwrap()) return container def value_or(self, default_value): """Returns the value for successful container.""" return self._inner_value def swap(self): """Successes swap to :class:`Failure`.""" return Failure(self._inner_value) def unwrap(self) -> _ValueType: """Returns the unwrapped value from successful container.""" return self._inner_value def failure(self) -> Never: """Raises an exception for successful container.""" raise UnwrapFailedError(self) # Aliases: #: Alias for ``Result[_ValueType, Exception]``. ResultE: TypeAlias = Result[_ValueType, Exception] # Decorators: _ExceptionType = TypeVar('_ExceptionType', bound=Exception) @overload def safe( function: Callable[_FuncParams, _ValueType], /, ) -> Callable[_FuncParams, ResultE[_ValueType]]: """Decorator to convert exception-throwing for any kind of Exception.""" @overload def safe( exceptions: tuple[type[_ExceptionType], ...], ) -> Callable[ [Callable[_FuncParams, _ValueType]], Callable[_FuncParams, Result[_ValueType, _ExceptionType]], ]: """Decorator to convert exception-throwing just for a set of Exceptions.""" def safe( # noqa: WPS234, C901 exceptions: ( Callable[_FuncParams, _ValueType] | tuple[type[_ExceptionType], ...] ), ) -> ( Callable[_FuncParams, ResultE[_ValueType]] | Callable[ [Callable[_FuncParams, _ValueType]], Callable[_FuncParams, Result[_ValueType, _ExceptionType]], ] ): """ Decorator to convert exception-throwing function to ``Result`` container. Should be used with care, since it only catches ``Exception`` subclasses. It does not catch ``BaseException`` subclasses. If you need to mark ``async`` function as ``safe``, use :func:`returns.future.future_safe` instead. This decorator only works with sync functions. Example: .. code:: python >>> from returns.result import Failure, Success, safe >>> @safe ... def might_raise(arg: int) -> float: ... return 1 / arg >>> assert might_raise(1) == Success(1.0) >>> assert isinstance(might_raise(0), Failure) You can also use it with explicit exception types as the first argument: .. code:: python >>> from returns.result import Failure, Success, safe >>> @safe(exceptions=(ZeroDivisionError,)) ... def might_raise(arg: int) -> float: ... return 1 / arg >>> assert might_raise(1) == Success(1.0) >>> assert isinstance(might_raise(0), Failure) In this case, only exceptions that are explicitly listed are going to be caught. Similar to :func:`returns.io.impure_safe` and :func:`returns.future.future_safe` decorators. """ def factory( inner_function: Callable[_FuncParams, _ValueType], inner_exceptions: tuple[type[_ExceptionType], ...], ) -> Callable[_FuncParams, Result[_ValueType, _ExceptionType]]: @wraps(inner_function) def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> Result[_ValueType, _ExceptionType]: try: return Success(inner_function(*args, **kwargs)) except inner_exceptions as exc: return Failure(exc) return decorator if isinstance(exceptions, tuple): return lambda function: factory(function, exceptions) return factory( exceptions, (Exception,), # type: ignore[arg-type] ) def attempt( func: Callable[[_FirstType], _NewValueType], ) -> Callable[[_FirstType], Result[_NewValueType, _FirstType]]: """ Decorator to convert exception-throwing function to ``Result`` container. It's very similar with :func:`returns.result.safe`, the difference is when an exception is raised it won't wrap that given exception into a Failure, it'll wrap the argument that lead to the exception. .. code:: python >>> import json >>> from typing import Dict, Any >>> from returns.result import Failure, Success, attempt >>> @attempt ... def parse_json(string: str) -> Dict[str, Any]: ... return json.loads(string) >>> assert parse_json('{"key": "value"}') == Success({'key': 'value'}) >>> assert parse_json('incorrect input') == Failure('incorrect input') """ @wraps(func) def decorator(arg: _FirstType) -> Result[_NewValueType, _FirstType]: try: return Success(func(arg)) except Exception: return Failure(arg) return decorator returns-0.24.0/returns/trampolines.py000066400000000000000000000055421472312074000177160ustar00rootroot00000000000000from collections.abc import Callable from functools import wraps from typing import Generic, TypeVar, final from typing_extensions import ParamSpec _ReturnType = TypeVar('_ReturnType') _FuncParams = ParamSpec('_FuncParams') @final class Trampoline(Generic[_ReturnType]): """ Represents a wrapped function call. Primitive to convert recursion into an actual object. """ __slots__ = ('func', 'args', 'kwargs') def __init__( # noqa: WPS451 self, func: Callable[_FuncParams, _ReturnType], /, # We use pos-only here to be able to store `kwargs` correctly. *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> None: """Save function and given arguments.""" self.func = getattr(func, '_orig_func', func) self.args = args self.kwargs = kwargs def __call__(self) -> _ReturnType: """Call wrapped function with given arguments.""" return self.func(*self.args, **self.kwargs) def trampoline( func: Callable[_FuncParams, _ReturnType | Trampoline[_ReturnType]], ) -> Callable[_FuncParams, _ReturnType]: """ Convert functions using recursion to regular functions. Trampolines allow to unwrap recursion into a regular ``while`` loop, which does not raise any ``RecursionError`` ever. Since python does not have TCO (tail call optimization), we have to provide this helper. This is done by wrapping real function calls into :class:`returns.trampolines.Trampoline` objects: .. code:: python >>> from typing import Union >>> from returns.trampolines import Trampoline, trampoline >>> @trampoline ... def get_factorial( ... for_number: int, ... current_number: int = 0, ... acc: int = 1, ... ) -> Union[int, Trampoline[int]]: ... assert for_number >= 0 ... if for_number <= current_number: ... return acc ... return Trampoline( ... get_factorial, ... for_number, ... current_number=current_number + 1, ... acc=acc * (current_number + 1), ... ) >>> assert get_factorial(0) == 1 >>> assert get_factorial(3) == 6 >>> assert get_factorial(4) == 24 See also: - eli.thegreenplace.net/2017/on-recursion-continuations-and-trampolines - https://en.wikipedia.org/wiki/Tail_call """ @wraps(func) def decorator( *args: _FuncParams.args, **kwargs: _FuncParams.kwargs, ) -> _ReturnType: trampoline_result = func(*args, **kwargs) while isinstance(trampoline_result, Trampoline): trampoline_result = trampoline_result() return trampoline_result decorator._orig_func = func # type: ignore[attr-defined] # noqa: WPS437 return decorator returns-0.24.0/returns/unsafe.py000066400000000000000000000014751472312074000166430ustar00rootroot00000000000000from typing import TypeVar from returns.io import IO _ValueType = TypeVar('_ValueType') def unsafe_perform_io(wrapped_in_io: IO[_ValueType]) -> _ValueType: """ Compatibility utility and escape mechanism from ``IO`` world. Just unwraps the internal value from :class:`returns.io.IO` container. Should be used with caution! Since it might be overused by lazy and ignorant developers. It is recommended to have only one place (module / file) in your program where you allow unsafe operations. We recommend to use ``import-linter`` to enforce this rule. .. code:: python >>> from returns.io import IO >>> assert unsafe_perform_io(IO(1)) == 1 See also: - https://github.com/seddonym/import-linter """ return wrapped_in_io._inner_value # noqa: WPS437 returns-0.24.0/setup.cfg000066400000000000000000000122341472312074000151220ustar00rootroot00000000000000# All configuration for plugins and other utils is defined here. # Read more about `setup.cfg`: # https://docs.python.org/3/distutils/configfile.html [flake8] format = wemake show-source = true doctests = true statistics = false # darglint configuration: # https://github.com/terrencepreilly/darglint strictness = long docstring-style = numpy # Plugins: max-complexity = 6 max-line-length = 80 staticmethod-decorators = staticmethod law_definition # wemake-python-styleguide max-annotation-complexity = 4 i-control-code = false allowed-domain-names = some, result, do extend-exclude = .venv build # Bad code that I write to test things: ex.py experiments ignore = D100, D104, D401, W504, X100, Y021, WPS113, WPS201, WPS202, WPS214, WPS215, WPS235, WPS320, WPS433, WPS436, WPS440, WPS450, WPS612, RST303, RST304, DAR103, DAR203, per-file-ignores = # We allow reexport: returns/pointfree/__init__.py: F401, WPS201 returns/methods/__init__.py: F401, WPS201 returns/pipeline.py: F401 returns/context/__init__.py: F401, WPS201 # Disable some quality checks for the most heavy parts: returns/io.py: WPS402 returns/iterables.py: WPS234 # Interfaces and asserts can have assert statements: returns/interfaces/*.py: S101 returns/primitives/asserts.py: S101 # Some rules cannot be applied to context: returns/context/*.py: WPS201, WPS204, WPS226, WPS326, WPS430 # We allow `futures` to do attribute access: returns/future.py: WPS437 returns/_internal/futures/*.py: WPS204, WPS433, WPS437 # We allow a lot of durty hacks in our plugins: returns/contrib/mypy/*.py: S101, WPS201 returns/contrib/pytest/__init__.py: F401 returns/contrib/pytest/plugin.py: WPS201, WPS430, WPS437, WPS609 returns/contrib/hypothesis/*.py: WPS437, WPS609 # TODO: remove after mypy@0.800 returns/contrib/mypy/_typeops/visitor.py: S101, WPS232 # Allow class attributes literals for slots and setattr: returns/primitives/container.py: WPS226 # There are multiple assert's in tests: tests/*.py: S101, WPS204, WPS218, WPS226, WPS432, WPS436 # Some examples don't have any docs on purpose: tests/test_examples/*: D102 # Pattern matching, flake8 and friends are not ready to deal with it tests/test_examples/test_result/test_result_pattern_matching.py: D103, WPS110, WPS125, WPS421, WPS432 tests/test_examples/test_maybe/test_maybe_pattern_matching.py: D101, D103, F811, WPS306, WPS421 tests/test_examples/test_io/test_ioresult_container/test_ioresult_pattern_matching.py: WPS110, WPS421, WPS432 tests/test_pattern_matching.py: S101, WPS110, WPS218, WPS432 # Annotations: *.pyi: D103, WPS112, WPS211, WPS428 [isort] # isort configuration: # https://pycqa.github.io/isort/docs/configuration/profiles.html profile = wemake line_length = 80 [tool:pytest] # ignores some directories: norecursedirs = *.egg .eggs dist build docs .tox .git __pycache__ # Active the strict mode of xfail xfail_strict = true # Adds these options to each `pytest` run: addopts = --strict-markers --strict-config --doctest-modules --doctest-glob='*.rst' # pytest-cov: --cov=returns --cov-report=term-missing:skip-covered --cov-report=html --cov-report=xml --cov-branch --cov-fail-under=100 # pytest-mypy-plugin: --mypy-ini-file=setup.cfg # Ignores some warnings inside: filterwarnings = ignore:coroutine '\w+' was never awaited:RuntimeWarning [coverage:run] omit = # We test mypy plugins with `pytest-mypy-plugins`, # which does not work with coverage: returns/contrib/mypy/* # pytest cannot measure self coverage: returns/contrib/pytest/*.py # Hypothesis is also excluded: returns/contrib/hypothesis/* [coverage:report] exclude_lines = # a more strict default pragma \# pragma: no cover\b ^if TYPE_CHECKING: [mypy] # mypy configurations: http://bit.ly/2zEl9WI # Custom plugins: plugins = mypy.plugins.proper_plugin, returns.contrib.mypy.returns_plugin, enable_error_code = truthy-bool, truthy-iterable, redundant-expr, # We don't want "Are you missing an await?" errors, # because we can't disable them for tests only. # It is passed as a CLI arg in CI. # unused-awaitable, # ignore-without-code, possibly-undefined, redundant-self, # We run mypy on all python versions, but only 3.10+ supports pattern matching: exclude = .*test_.*pattern_matching disable_error_code = empty-body, no-untyped-def # We cannot work without explicit `Any` types and plain generics: disallow_any_explicit = false disallow_any_generics = false follow_imports = silent ignore_missing_imports = true strict = true warn_unreachable = true # TODO: update our output assertions to match a new syntax force_uppercase_builtins = true force_union_syntax = true # TODO: Enable this later, it's disabled temporarily while we don't discover why # the explicit restriction on `typeshed.stdlib.unittest.mock`, # which is the next section, is not working properly when running # with `pytest`. disallow_subclassing_any = False [mypy-typeshed.stdlib.unittest.mock] disallow_subclassing_any = False [codespell] # codespell configuration: https://pypi.org/project/codespell ignore-words-list = appliable,falsy skip = __pycache__,_build,.mypy_cache returns-0.24.0/tests/000077500000000000000000000000001472312074000144415ustar00rootroot00000000000000returns-0.24.0/tests/test_context/000077500000000000000000000000001472312074000171645ustar00rootroot00000000000000returns-0.24.0/tests/test_context/test_requires_context/000077500000000000000000000000001472312074000236265ustar00rootroot00000000000000returns-0.24.0/tests/test_context/test_requires_context/test_context.py000066400000000000000000000014051472312074000267230ustar00rootroot00000000000000from copy import copy, deepcopy import pytest from returns.context import RequiresContext from returns.primitives.exceptions import ImmutableStateError def test_requires_context_immutable() -> None: """Ensures that Context is immutable.""" with pytest.raises(ImmutableStateError): RequiresContext.from_value(1).abc = 1 def test_requires_context_immutable_copy() -> None: """Ensures that Context returns it self when passed to copy function.""" context = RequiresContext.from_value(1) assert context is copy(context) def test_requires_context_immutable_deepcopy() -> None: """Ensures that Context returns it self when passed to deepcopy function.""" context = RequiresContext.from_value(1) assert context is deepcopy(context) returns-0.24.0/tests/test_context/test_requires_context/test_context_equality.py000066400000000000000000000010771472312074000306450ustar00rootroot00000000000000from collections.abc import Callable from returns.context import RequiresContext def _same_function(some_arg: int) -> Callable[[float], float]: return lambda other: other / some_arg def test_equality() -> None: """Ensures that containers can be compared.""" assert RequiresContext(_same_function) == RequiresContext(_same_function) def test_nonequality() -> None: """Ensures that containers can be compared.""" assert RequiresContext(_same_function) != RequiresContext(str) assert RequiresContext.from_value(1) != RequiresContext.from_value(1) returns-0.24.0/tests/test_context/test_requires_context/test_context_utils.py000066400000000000000000000007071472312074000301470ustar00rootroot00000000000000from returns.context import RequiresContext def test_context_ask(): """Ensures that ``ask`` method works correctly.""" assert RequiresContext[str, int].ask()(1) == 1 assert RequiresContext[int, str].ask()('a') == 'a' def test_requires_context_from_value(): """Ensures that ``from_value`` method works correctly.""" assert RequiresContext.from_value(1)(RequiresContext.no_args) == 1 assert RequiresContext.from_value(2)(1) == 2 returns-0.24.0/tests/test_context/test_requires_context_ioresult/000077500000000000000000000000001472312074000255545ustar00rootroot00000000000000returns-0.24.0/tests/test_context/test_requires_context_ioresult/test_context_ioresult.py000066400000000000000000000015501472312074000326000ustar00rootroot00000000000000from copy import copy, deepcopy import pytest from returns.context import RequiresContextIOResult from returns.primitives.exceptions import ImmutableStateError def test_requires_context_result_immutable(): """Ensures that container is immutable.""" with pytest.raises(ImmutableStateError): RequiresContextIOResult.from_value(1).abc = 1 def test_requires_context_result_immutable_copy(): """Ensures that helper returns it self when passed to copy function.""" context_ioresult = RequiresContextIOResult.from_value(1) assert context_ioresult is copy(context_ioresult) def test_requires_context_result_immutable_deepcopy(): # noqa: WPS118 """Ensures that helper returns it self when passed to deepcopy function.""" requires_context = RequiresContextIOResult.from_value(1) assert requires_context is deepcopy(requires_context) returns-0.24.0/tests/test_context/test_requires_context_ioresult/test_requires_context_ioresult.py000066400000000000000000000016201472312074000345150ustar00rootroot00000000000000from returns.context import RequiresContextIOResultE from returns.io import IOSuccess def test_regression394(): """ It used to raise ``ImmutableStateError`` for type aliases. Here we use the minimal reproduction sample. .. code:: python Traceback (most recent call last): File "ex.py", line 18, in get_ip_addr("https://google.com") File "ex.py", line 13, in get_ip_addr return RequiresContextIOResultE(lambda _: IOSuccess(1)) File "../3.7.7/lib/python3.7/typing.py", line 677, in __call__ result.__orig_class__ = self File "../returns/returns/primitives/types.py", line 42, in __setattr__ raise ImmutableStateError() returns.primitives.exceptions.ImmutableStateError See: https://github.com/dry-python/returns/issues/394 """ RequiresContextIOResultE(lambda _: IOSuccess(1)) test_requires_context_ioresult_bind.py000066400000000000000000000074111472312074000354360ustar00rootroot00000000000000returns-0.24.0/tests/test_context/test_requires_context_ioresultfrom returns.context import RequiresContext from returns.context import RequiresContextIOResult as RCR # noqa: N814 from returns.context import RequiresContextResult from returns.io import IOFailure, IOResult, IOSuccess from returns.result import Failure, Result, Success def test_bind(): """Ensures that bind works.""" def factory(inner_value: int) -> RCR[float, str, int]: if inner_value > 0: return RCR(lambda deps: IOSuccess(inner_value / deps)) return RCR.from_failure(str(inner_value)) input_value = 5 bound: RCR[int, str, int] = RCR.from_value(input_value) assert bound.bind(factory)(2) == factory(input_value)(2) assert bound.bind(factory)(2) == IOSuccess(2.5) assert RCR.from_value(0).bind( factory, )(2) == factory(0)(2) == IOFailure('0') def test_bind_regular_result(): """Ensures that regular ``Result`` can be bound.""" def factory(inner_value: int) -> Result[int, str]: if inner_value > 0: return Success(inner_value + 1) return Failure('nope') first: RCR[int, str, int] = RCR.from_value(1) third: RCR[int, str, int] = RCR.from_failure('a') assert first.bind_result(factory)(RCR.no_args) == IOSuccess(2) assert RCR.from_value(0).bind_result( factory, )(RCR.no_args) == IOFailure('nope') assert third.bind_result(factory)(RCR.no_args) == IOFailure('a') def test_bind_ioresult(): """Ensures that io ``Result`` can be bound.""" def factory(inner_value: int) -> IOResult[int, str]: if inner_value > 0: return IOSuccess(inner_value + 1) return IOFailure('nope') first: RCR[int, str, int] = RCR.from_value(1) third: RCR[int, str, int] = RCR.from_failure('a') assert first.bind_ioresult(factory)(RCR.no_args) == IOSuccess(2) assert RCR.from_value(0).bind_ioresult( factory, )(RCR.no_args) == IOFailure('nope') assert third.bind_ioresult(factory)(RCR.no_args) == IOFailure('a') def test_bind_regular_context(): """Ensures that regular ``RequiresContext`` can be bound.""" def factory(inner_value: int) -> RequiresContext[float, int]: return RequiresContext(lambda deps: inner_value / deps) first: RCR[int, str, int] = RCR.from_value(1) third: RCR[int, str, int] = RCR.from_failure('a') assert first.bind_context(factory)(2) == IOSuccess(0.5) assert RCR.from_value(2).bind_context( factory, )(1) == IOSuccess(2.0) assert third.bind_context(factory)(1) == IOFailure('a') def test_bind_result_context(): """Ensures that ``RequiresContextResult`` can be bound.""" def factory(inner_value: int) -> RequiresContextResult[float, str, int]: return RequiresContextResult(lambda deps: Success(inner_value / deps)) first: RCR[int, str, int] = RCR.from_value(1) third: RCR[int, str, int] = RCR.from_failure('a') assert first.bind_context_result(factory)(2) == IOSuccess(0.5) assert RCR.from_value(2).bind_context_result( factory, )(1) == IOSuccess(2.0) assert third.bind_context_result(factory)(1) == IOFailure('a') def test_lash_success(): """Ensures that lash works for Success container.""" def factory(inner_value) -> RCR[int, str, int]: return RCR.from_value(inner_value * 2) assert RCR.from_value(5).lash( factory, )(0) == RCR.from_value(5)(0) assert RCR.from_failure(5).lash( factory, )(0) == RCR.from_value(10)(0) def test_lash_failure(): """Ensures that lash works for Failure container.""" def factory(inner_value) -> RCR[int, str, int]: return RCR.from_failure(inner_value * 2) assert RCR.from_value(5).lash( factory, )(0) == RCR.from_value(5)(0) assert RCR.from_failure(5).lash( factory, )(0) == RCR.from_failure(10)(0) test_requires_context_ioresult_cast.py000066400000000000000000000022411472312074000354500ustar00rootroot00000000000000returns-0.24.0/tests/test_context/test_requires_context_ioresultfrom returns.context import ( ReaderIOResult, ReaderIOResultE, RequiresContextIOResult, RequiresContextIOResultE, ) def _function(arg: int) -> RequiresContextIOResultE[float, int]: if arg == 0: return RequiresContextIOResult.from_failure( ZeroDivisionError('Divided by 0'), ) return RequiresContextIOResult.from_value(10 / arg) def test_requires_context_ioresulte(): """Ensures that RequiresContextIOResultE correctly typecast.""" container: RequiresContextIOResult[float, Exception, int] = _function(1) assert container(0) == RequiresContextIOResult.from_value(10.0)(0) def test_requires_context_io_aliases(): """Ensures that ReaderIOResult correctly typecast.""" container: ReaderIOResultE[float, int] = _function(1) container2: ReaderIOResult[float, Exception, int] = _function(1) container3: ReaderIOResultE[float, int] = ReaderIOResultE.from_value( 10.0, ) container4: ReaderIOResultE[float, int] = ReaderIOResult.from_value(10.0) assert container(0) == container2(0) == container3(0) == container4(0) assert container(0) == RequiresContextIOResult.from_value(10.0)(0) returns-0.24.0/tests/test_context/test_requires_context_result/000077500000000000000000000000001472312074000252245ustar00rootroot00000000000000returns-0.24.0/tests/test_context/test_requires_context_result/test_context_result.py000066400000000000000000000014241472312074000317200ustar00rootroot00000000000000from copy import copy, deepcopy import pytest from returns.context import RequiresContextResult from returns.primitives.exceptions import ImmutableStateError def test_immutable_copy(): """Ensures that helper returns it self when passed to copy function.""" context_result = RequiresContextResult.from_value(1) assert context_result is copy(context_result) def test_immutable_deepcopy(): """Ensures that helper returns it self when passed to deepcopy function.""" context_result = RequiresContextResult.from_value(1) assert context_result is deepcopy(context_result) def test_requires_context_result_immutable(): """Ensures that container is immutable.""" with pytest.raises(ImmutableStateError): RequiresContextResult.from_value(1).abc = 1 returns-0.24.0/tests/test_context/test_requires_context_result/test_requires_context_result_bind.py000066400000000000000000000047641472312074000346450ustar00rootroot00000000000000from returns.context import RequiresContext from returns.context import RequiresContextResult as RCR # noqa: N817 from returns.result import Failure, Result, Success def test_bind(): """Ensures that bind works.""" def factory(inner_value: int) -> RCR[float, str, int]: if inner_value > 0: return RCR(lambda deps: Success(inner_value / deps)) return RCR.from_failure(str(inner_value)) input_value = 5 bound: RCR[int, str, int] = RCR.from_value(input_value) assert bound.bind(factory)(2) == factory(input_value)(2) assert bound.bind(factory)(2) == Success(2.5) assert RCR.from_value(0).bind( factory, )(2) == factory(0)(2) == Failure('0') def test_bind_regular_result(): """Ensures that regular ``Result`` can be bound.""" def factory(inner_value: int) -> Result[int, str]: if inner_value > 0: return Success(inner_value + 1) return Failure('nope') first: RCR[int, str, int] = RCR.from_value(1) third: RCR[int, str, int] = RCR.from_failure('a') assert first.bind_result(factory)(RCR.no_args) == Success(2) assert RCR.from_value(0).bind_result( factory, )(RCR.no_args) == Failure('nope') assert third.bind_result(factory)(RCR.no_args) == Failure('a') def test_bind_regular_context(): """Ensures that regular ``RequiresContext`` can be bound.""" def factory(inner_value: int) -> RequiresContext[float, int]: return RequiresContext(lambda deps: inner_value / deps) first: RCR[int, str, int] = RCR.from_value(1) third: RCR[int, str, int] = RCR.from_failure('a') assert first.bind_context(factory)(2) == Success(0.5) assert RCR.from_value(2).bind_context( factory, )(1) == Success(2.0) assert third.bind_context(factory)(1) == Failure('a') def test_lash_success(): """Ensures that lash works for Success container.""" def factory(inner_value) -> RCR[int, str, int]: return RCR.from_value(inner_value * 2) assert RCR.from_value(5).lash( factory, )(0) == RCR.from_value(5)(0) assert RCR.from_failure(5).lash( factory, )(0) == RCR.from_value(10)(0) def test_lash_failure(): """Ensures that lash works for Failure container.""" def factory(inner_value) -> RCR[int, str, int]: return RCR.from_failure(inner_value * 2) assert RCR.from_value(5).lash( factory, )(0) == RCR.from_value(5)(0) assert RCR.from_failure(5).lash( factory, )(0) == RCR.from_failure(10)(0) returns-0.24.0/tests/test_context/test_requires_context_result/test_requires_context_result_cast.py000066400000000000000000000021701472312074000346500ustar00rootroot00000000000000from returns.context import ( ReaderResult, ReaderResultE, RequiresContextResult, RequiresContextResultE, ) def _function(arg: int) -> RequiresContextResultE[float, int]: if arg == 0: return RequiresContextResult.from_failure( ZeroDivisionError('Divided by 0'), ) return RequiresContextResult.from_value(10 / arg) def test_requires_context_resulte(): """Ensures that RequiresContextResultE correctly typecast.""" container: RequiresContextResult[float, Exception, int] = _function(1) assert container(0) == RequiresContextResult.from_value(10.0)(0) def test_requires_context_aliases(): """Ensures that ReaderResult correctly typecast.""" container: ReaderResultE[float, int] = _function(1) container2: ReaderResult[float, Exception, int] = _function(1) container3: ReaderResultE[float, int] = ReaderResultE.from_value( 10.0, ) container4: ReaderResultE[float, int] = ReaderResult.from_value(10.0) assert container(0) == container2(0) == container3(0) == container4(0) assert container(0) == RequiresContextResult.from_value(10.0)(0) returns-0.24.0/tests/test_contrib/000077500000000000000000000000001472312074000171405ustar00rootroot00000000000000returns-0.24.0/tests/test_contrib/test_hypothesis/000077500000000000000000000000001472312074000223765ustar00rootroot00000000000000returns-0.24.0/tests/test_contrib/test_hypothesis/test_laws/000077500000000000000000000000001472312074000244035ustar00rootroot00000000000000returns-0.24.0/tests/test_contrib/test_hypothesis/test_laws/test_custom_type_applicative.py000066400000000000000000000023041472312074000327470ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.contrib.hypothesis.laws import check_all_laws from returns.interfaces import applicative from returns.primitives.container import BaseContainer from returns.primitives.hkt import Kind1, SupportsKind1 _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') class _Wrapper( BaseContainer, SupportsKind1['_Wrapper', _ValueType], applicative.Applicative1[_ValueType], ): _inner_value: _ValueType def __init__(self, inner_value: _ValueType) -> None: super().__init__(inner_value) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> '_Wrapper[_NewValueType]': return _Wrapper(function(self._inner_value)) def apply( self, container: Kind1['_Wrapper', Callable[[_ValueType], _NewValueType]], ) -> '_Wrapper[_NewValueType]': function = container._inner_value # noqa: WPS437 return _Wrapper(function(self._inner_value)) @classmethod def from_value( cls, inner_value: _NewValueType, ) -> '_Wrapper[_NewValueType]': return _Wrapper(inner_value) check_all_laws(_Wrapper) returns-0.24.0/tests/test_contrib/test_hypothesis/test_laws/test_custom_type_with_init.py000066400000000000000000000015701472312074000324500ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar from returns.contrib.hypothesis.laws import check_all_laws from returns.interfaces import equable, mappable from returns.primitives.container import BaseContainer, container_equality from returns.primitives.hkt import SupportsKind1 _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') class _Wrapper( BaseContainer, SupportsKind1['_Wrapper', _ValueType], mappable.Mappable1[_ValueType], equable.Equable, ): _inner_value: _ValueType def __init__(self, inner_value: _ValueType) -> None: super().__init__(inner_value) equals = container_equality def map( self, function: Callable[[_ValueType], _NewValueType], ) -> '_Wrapper[_NewValueType]': return _Wrapper(function(self._inner_value)) check_all_laws(_Wrapper, use_init=True) returns-0.24.0/tests/test_contrib/test_hypothesis/test_laws/test_unsatisfiable_type.py000066400000000000000000000016171472312074000317130ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar import pytest from hypothesis.errors import ResolutionFailed from returns.contrib.hypothesis.laws import check_all_laws from returns.interfaces import mappable from returns.primitives.container import BaseContainer from returns.primitives.hkt import SupportsKind1 pytestmark = pytest.mark.xfail(raises=ResolutionFailed) _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') class _WithInitNoFlag( BaseContainer, SupportsKind1['_WithInitNoFlag', _ValueType], mappable.Mappable1[_ValueType], ): """Does not have any ways to be constructed.""" def map( self, function: Callable[[_ValueType], _NewValueType], ) -> '_WithInitNoFlag[_NewValueType]': """We need `map` to have `laws`, should not be called.""" raise NotImplementedError check_all_laws(_WithInitNoFlag) returns-0.24.0/tests/test_contrib/test_hypothesis/test_laws/test_wrong_custom_type_with_init.py000066400000000000000000000016461472312074000336700ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar import pytest from returns.contrib.hypothesis.laws import check_all_laws from returns.interfaces import mappable from returns.primitives.container import BaseContainer from returns.primitives.hkt import SupportsKind1 pytestmark = pytest.mark.xfail(raises=AssertionError) _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') class _Wrapper( BaseContainer, SupportsKind1['_Wrapper', _ValueType], mappable.Mappable1[_ValueType], ): _inner_value: _ValueType def __init__(self, inner_value: _ValueType) -> None: super().__init__(inner_value) def map( self, function: Callable[[_ValueType], _NewValueType], ) -> '_Wrapper[_NewValueType]': return _Wrapper( 'wrong-{0}'.format(function(self._inner_value)), # type: ignore ) check_all_laws(_Wrapper, use_init=True) returns-0.24.0/tests/test_contrib/test_hypothesis/test_type_resolution.py000066400000000000000000000054211472312074000272550ustar00rootroot00000000000000from collections.abc import Sequence from typing import Any import pytest from hypothesis import given from hypothesis import strategies as st from returns.context import ( Reader, RequiresContext, RequiresContextFutureResult, RequiresContextFutureResultE, RequiresContextIOResult, RequiresContextIOResultE, RequiresContextResult, RequiresContextResultE, ) from returns.future import Future, FutureResult from returns.io import IO, IOResult, IOResultE from returns.maybe import Maybe from returns.pipeline import is_successful from returns.primitives.laws import Lawful from returns.result import Result, ResultE _all_containers: Sequence[type[Lawful]] = ( Maybe, Result, IO, IOResult, Future, FutureResult, RequiresContext, RequiresContextResult, RequiresContextIOResult, RequiresContextFutureResult, # Aliases: ResultE, IOResultE, Reader, RequiresContextResultE, RequiresContextIOResultE, RequiresContextFutureResultE, ) @pytest.mark.filterwarnings('ignore:.*') @pytest.mark.parametrize('container_type', _all_containers) def test_all_containers_resolves(container_type: type[Lawful]) -> None: """Ensures all containers do resolve.""" assert st.from_type(container_type).example() is not None @given( st.from_type(ResultE).filter( lambda container: not is_successful(container), ), ) def test_result_error_alias_resolves(thing: ResultE[Any]) -> None: """Ensures that type aliases are resolved correctly.""" assert isinstance(thing.failure(), Exception) CustomResult = Result[int, str] @given(st.from_type(CustomResult)) def test_custom_result_error_types_resolve(thing: CustomResult) -> None: """Ensures that type aliases are resolved correctly.""" if is_successful(thing): assert isinstance(thing.unwrap(), int) else: assert isinstance(thing.failure(), str) @given( st.from_type(RequiresContextResultE).filter( lambda container: not is_successful( container(RequiresContextResultE.no_args), ), ), ) def test_reader_result_error_alias_resolves( thing: RequiresContextResultE, ) -> None: """Ensures that type aliases are resolved correctly.""" real_result = thing(RequiresContextResultE.no_args) assert isinstance(real_result.failure(), Exception) CustomReaderResult = RequiresContextResult[int, str, bool] @given(st.from_type(CustomReaderResult)) def test_custom_readerresult_types_resolve( thing: CustomReaderResult, ) -> None: """Ensures that type aliases are resolved correctly.""" real_result = thing(RequiresContextResultE.no_args) if is_successful(real_result): assert isinstance(real_result.unwrap(), int) else: assert isinstance(real_result.failure(), str) returns-0.24.0/tests/test_contrib/test_pytest/000077500000000000000000000000001472312074000215275ustar00rootroot00000000000000returns-0.24.0/tests/test_contrib/test_pytest/test_plugin_error_handler.py000066400000000000000000000061551472312074000273530ustar00rootroot00000000000000import pytest from returns.context import ( RequiresContextFutureResult, RequiresContextIOResult, RequiresContextResult, ) from returns.contrib.pytest import ReturnsAsserts from returns.functions import identity from returns.future import FutureResult from returns.io import IOFailure, IOSuccess from returns.result import Failure, Success def _under_test( container, *, should_lash: bool = False, ): if should_lash: return container.lash(lambda inner: container.from_failure(inner)) return container.bind(lambda inner: container.from_value(inner)) @pytest.mark.parametrize('container', [ Success(1), Failure(1), IOSuccess(1), IOFailure(1), RequiresContextIOResult.from_value(1), RequiresContextIOResult.from_failure(1), RequiresContextFutureResult.from_value(1), RequiresContextFutureResult.from_failure(1), RequiresContextResult.from_value(1), RequiresContextResult.from_failure(1), FutureResult.from_value(1), FutureResult.from_failure(1), ]) @pytest.mark.parametrize('kwargs', [ {'should_lash': True}, ]) def test_error_handled(returns: ReturnsAsserts, container, kwargs): """Demo on how to use ``pytest`` helpers to work with error handling.""" assert not returns._errors_handled # noqa: WPS437 error_handled = _under_test(container, **kwargs) assert returns.is_error_handled(error_handled) assert returns.is_error_handled(error_handled.map(identity)) assert returns.is_error_handled(error_handled.alt(identity)) assert returns._errors_handled # noqa: WPS437 @pytest.mark.parametrize('container', [ Success(1), Failure(1), IOSuccess(1), IOFailure(1), RequiresContextIOResult.from_value(1), RequiresContextIOResult.from_failure(1), RequiresContextFutureResult.from_value(1), RequiresContextFutureResult.from_failure(1), RequiresContextResult.from_value(1), RequiresContextResult.from_failure(1), ]) def test_error_not_handled(returns: ReturnsAsserts, container): """Demo on how to use ``pytest`` helpers to work with error handling.""" assert not returns._errors_handled # noqa: WPS437 error_handled = _under_test(container) assert not returns.is_error_handled(container) assert not returns.is_error_handled(error_handled) assert not returns.is_error_handled(error_handled.map(identity)) assert not returns.is_error_handled(error_handled.alt(identity)) assert not returns._errors_handled # noqa: WPS437 @pytest.mark.anyio @pytest.mark.parametrize('container', [ FutureResult.from_value(1), FutureResult.from_failure(1), RequiresContextFutureResult.from_value(1), RequiresContextFutureResult.from_failure(1), ]) async def test_error_not_handled_async(returns: ReturnsAsserts, container): """Demo on how to use ``pytest`` helpers to work with error handling.""" error_handled = _under_test(container) assert not returns.is_error_handled(container) assert not returns.is_error_handled(error_handled) assert not returns.is_error_handled(error_handled.map(identity)) assert not returns.is_error_handled(error_handled.alt(identity)) returns-0.24.0/tests/test_contrib/test_pytest/test_plugin_has_trace.py000066400000000000000000000056161472312074000264570ustar00rootroot00000000000000import pytest from returns.contrib.pytest import ReturnsAsserts from returns.io import IOFailure, IOSuccess from returns.result import Failure, Success, safe def _create_container_function(container_type, container_value): return container_type(container_value) def _create_container_function_intermediate(container_type, container_value): return _create_container_function( # type: ignore container_type, container_value, ) @safe def _safe_decorated_function(return_failure: bool = False): if return_failure: raise ValueError('Function failed') @pytest.mark.parametrize('container_type', [ # noqa: WPS118 Success, Failure, IOSuccess, IOFailure, ]) def test_assert_trace1(container_type, returns: ReturnsAsserts): """Test if our plugin will identify the container creation correctly.""" with returns.assert_trace(container_type, _create_container_function): _create_container_function(container_type, 1) # type: ignore @pytest.mark.parametrize('container_type', [ # noqa: WPS118 Success, Failure, IOSuccess, IOFailure, ]) def test_assert_trace2(container_type, returns: ReturnsAsserts): """Test if our plugin will identify the container creation correctly.""" with returns.assert_trace(container_type, _create_container_function): _create_container_function_intermediate( # type: ignore container_type, 1, ) @pytest.mark.parametrize(('desired_type', 'wrong_type'), [ (Success, Failure), (Failure, Success), (IOSuccess, IOFailure), (IOFailure, IOSuccess), ]) def test_failed_assert_trace1( desired_type, wrong_type, returns: ReturnsAsserts, ): """Test if our plugin will identify the container was not created.""" with pytest.raises(pytest.fail.Exception): # noqa: PT012 with returns.assert_trace(desired_type, _create_container_function): _create_container_function(wrong_type, 1) # type: ignore @pytest.mark.parametrize(('desired_type', 'wrong_type'), [ (Success, Failure), (Failure, Success), (IOSuccess, IOFailure), (IOFailure, IOSuccess), ]) def test_failed_assert_trace2( desired_type, wrong_type, returns: ReturnsAsserts, ): """Test if our plugin will identify the container was not created.""" with pytest.raises(pytest.fail.Exception): # noqa: PT012 with returns.assert_trace(desired_type, _create_container_function): _create_container_function_intermediate( # type: ignore wrong_type, 1, ) @pytest.mark.parametrize('container_type', [ # noqa: WPS118 Success, Failure, ]) def test_safe_decorated_assert(container_type, returns: ReturnsAsserts): """Test if our plugin will catch containers from @safe-wrapped functions.""" with returns.assert_trace(container_type, _safe_decorated_function): _safe_decorated_function(return_failure=container_type is Failure) returns-0.24.0/tests/test_converters/000077500000000000000000000000001472312074000176725ustar00rootroot00000000000000returns-0.24.0/tests/test_converters/test_flatten.py000066400000000000000000000077611472312074000227530ustar00rootroot00000000000000import pytest from returns.context import ( RequiresContext, RequiresContextFutureResult, RequiresContextIOResult, RequiresContextResult, ) from returns.converters import flatten from returns.future import Future, FutureResult from returns.io import IO, IOFailure, IOSuccess from returns.maybe import Nothing, Some from returns.result import Failure, Success @pytest.mark.parametrize(('container', 'merged'), [ # Flattens: (IO(IO(1)), IO(1)), (Success(Success({})), Success({})), (IOSuccess(IOSuccess(1)), IOSuccess(1)), (Some(Some(None)), Some(None)), (Some(Some([])), Some([])), # Nope: (Nothing, Nothing), (Failure(Failure('a')), Failure(Failure('a'))), (Failure(Success('a')), Failure(Success('a'))), (IOFailure(IOFailure('a')), IOFailure(IOFailure('a'))), (IOFailure(IOSuccess('a')), IOFailure(IOSuccess('a'))), ]) def test_flatten(container, merged): """Ensures that `flatten` is always returning the correct type.""" assert flatten(container) == merged @pytest.mark.parametrize(('container', 'merged'), [ ( RequiresContextResult.from_value( RequiresContextResult.from_value(1), ), RequiresContextResult.from_value(1), ), ( RequiresContextIOResult.from_value( RequiresContextIOResult.from_value(1), ), RequiresContextIOResult.from_value(1), ), ( RequiresContext.from_value(RequiresContext.from_value(1)), RequiresContext.from_value(1), ), ]) def test_flatten_context(container, merged): """Ensures that `flatten` is always returning the correct type.""" assert flatten(container)(...) == merged(...) @pytest.mark.anyio async def test_flatten_future(subtests): """Ensures that `flatten` is always returning the correct type.""" futures = [ # Flattens: (Future.from_value(Future.from_value(1)), Future.from_value(1)), ( FutureResult.from_value(FutureResult.from_value(1)), FutureResult.from_value(1), ), ] for container, merged in futures: with subtests.test(container=container, merged=merged): assert await flatten(container) == await merged # type: ignore @pytest.mark.anyio async def test_flatten_context_future_result(subtests): """Ensures that `flatten` is always returning the correct type.""" futures = [ # Flattens: ( RequiresContextFutureResult.from_value( RequiresContextFutureResult.from_value(1), ), RequiresContextFutureResult.from_value(1), ), ] for container, merged in futures: with subtests.test(container=container, merged=merged): assert await flatten( container, )(...) == await merged(...) @pytest.mark.anyio async def test_non_flatten_future(subtests): """Ensures that `flatten` is always returning the correct type.""" futures = [ # Not flattens: FutureResult.from_failure(FutureResult.from_failure(1)), FutureResult.from_failure(FutureResult.from_value(1)), ] for cont in futures: with subtests.test(container=cont): assert isinstance( (await flatten(cont)).failure()._inner_value, # noqa: WPS437 cont.__class__, ) @pytest.mark.anyio async def test_non_flatten_context_future_result(subtests): """Ensures that `flatten` is always returning the correct type.""" futures = [ # Not flattens: RequiresContextFutureResult.from_failure( RequiresContextFutureResult.from_failure(1), ), RequiresContextFutureResult.from_failure( RequiresContextFutureResult.from_value(1), ), ] for cont in futures: with subtests.test(container=cont): inner = await flatten(cont)(...) assert isinstance( inner.failure()._inner_value, # noqa: WPS437 cont.__class__, ) returns-0.24.0/tests/test_curry/000077500000000000000000000000001472312074000166445ustar00rootroot00000000000000returns-0.24.0/tests/test_curry/test_curry.py000066400000000000000000000112051472312074000214200ustar00rootroot00000000000000from inspect import getdoc import pytest from returns.curry import curry def test_docstring(): """Ensures that we preserve docstrings from curried function.""" @curry def factory(arg: int, other: int) -> None: """Some docstring.""" assert getdoc(factory) == 'Some docstring.' def test_immutable(): """Check that arguments from previous calls are immutable.""" @curry def factory(arg: int, other: int) -> tuple[int, int]: return (arg, other) cached = factory(arg=1) assert cached(other=2) == (1, 2) assert cached(other=3) == (1, 3) def test_no_args(): """Ensures that it is possible to curry a function with empty args.""" @curry def factory() -> int: return 1 assert factory() == 1 def test_one_arg(): """Ensures that it is possible to curry a function with one arg.""" @curry def factory(arg: int) -> int: return arg assert factory(1) == 1 assert factory(arg=1) == 1 with pytest.raises(TypeError): factory(other=2) # type: ignore with pytest.raises(TypeError): factory(1, 2) # type: ignore with pytest.raises(TypeError): factory(1)(2) # type: ignore def test_two_args(): """Ensures that it is possible to curry a function with two args.""" @curry def factory(arg: int, other: int) -> tuple[int, int]: return (arg, other) assert factory(1)(2) == (1, 2) assert factory(1, 2) == (1, 2) assert factory(2, other=3) == (2, 3) assert factory(arg=2, other=3) == (2, 3) assert factory(other=3, arg=2) == (2, 3) assert factory(arg=0)(other=5) == (0, 5) assert factory(0)(other=5) == (0, 5) with pytest.raises(TypeError): factory(1, 2, 3) # type: ignore with pytest.raises(TypeError): factory(1, c=2) # type: ignore with pytest.raises(TypeError): factory(1)(c=2) # type: ignore with pytest.raises(TypeError): factory(1)(2)(3) # type: ignore def test_star_args(): """Ensures that it is possible to curry a function with ``*args``.""" @curry def factory(*args: int) -> int: return sum(args) assert factory() == 0 assert factory(1) == 1 assert factory(1, 2) == 3 assert factory(1, 2, 3) == 6 with pytest.raises(TypeError): factory(arg=1) with pytest.raises(TypeError): factory(1, other=2) with pytest.raises(TypeError): factory(1)(2) def test_arg_and_star_args(): """Ensures that it is possible to curry a function with ``*args``.""" @curry def factory(arg: int, *args: int) -> int: return arg + sum(args) assert factory(1) == 1 assert factory(1, 2) == 3 assert factory(1, 2, 3) == 6 with pytest.raises(TypeError): assert factory(1)(2, 3) == 6 def test_star_kwargs(): """Ensures that it is possible to curry a function with ``**kwargs``.""" @curry def factory(**kwargs: int) -> list[tuple[str, int]]: return sorted(kwargs.items()) assert not factory() assert factory(arg=1) == [('arg', 1)] assert factory( arg=1, other=2, ) == [('arg', 1), ('other', 2)] with pytest.raises(TypeError): factory(1) with pytest.raises(TypeError): factory(1, other=2) def test_arg_star_kwargs(): """The decorator should work with ``kwargs``.""" @curry def factory(first: int, **kwargs: int) -> list[tuple[str, int]]: return [('first', first)] + sorted(kwargs.items()) assert factory(1) == [('first', 1)] assert factory(1, arg=2) == [('first', 1), ('arg', 2)] assert factory( first=1, arg=2, ) == [('first', 1), ('arg', 2)] assert factory(1, arg=2, other=3) == [ ('first', 1), ('arg', 2), ('other', 3), ] with pytest.raises(TypeError): factory(1, 2) with pytest.raises(TypeError): factory(1, first=2) with pytest.raises(TypeError): factory(1, 2, c=2) def test_kwonly(): """The decorator should work with kw-only args.""" @curry def factory(*args: int, by: int) -> tuple[int, ...]: return args + (by, ) assert factory( 1, 2, 3, )(by=10) == (1, 2, 3, 10) assert factory(by=10) == (10, ) def test_raises(): """Exception raised from the function must not be intercepted.""" @curry def factory(arg: int, other: int) -> None: msg = "f() missing 2 required positional arguments: 'a' and 'b'" raise TypeError(msg) with pytest.raises(TypeError): factory(1)(2) with pytest.raises(TypeError): factory(1, 2) with pytest.raises(TypeError): factory(1, 2, 3) # type: ignore returns-0.24.0/tests/test_examples/000077500000000000000000000000001472312074000173165ustar00rootroot00000000000000returns-0.24.0/tests/test_examples/test_context/000077500000000000000000000000001472312074000220415ustar00rootroot00000000000000returns-0.24.0/tests/test_examples/test_context/test_reader_future_result.py000066400000000000000000000045151472312074000277110ustar00rootroot00000000000000from collections.abc import Sequence from typing import Final, cast import anyio # you would need to `pip install anyio` import httpx # you would need to `pip install httpx` from typing_extensions import TypedDict from returns.context import RequiresContextFutureResultE from returns.functions import tap from returns.future import FutureResult, future_safe from returns.iterables import Fold from returns.pipeline import managed from returns.result import ResultE, safe _URL: Final = 'https://jsonplaceholder.typicode.com/posts/{0}' class _Post(TypedDict): id: int user_id: int title: str body: str def _close( client: httpx.AsyncClient, raw_value: ResultE[Sequence[str]], ) -> FutureResult[None, Exception]: return future_safe(client.aclose)() def _fetch_post( post_id: int, ) -> RequiresContextFutureResultE[_Post, httpx.AsyncClient]: context: RequiresContextFutureResultE[ httpx.AsyncClient, httpx.AsyncClient, ] = RequiresContextFutureResultE.ask() return context.bind_future_result( lambda client: future_safe(client.get)(_URL.format(post_id)), ).bind_result( safe(tap(httpx.Response.raise_for_status)), ).map( lambda response: cast(_Post, response.json()), # or validate it ) def _show_titles( number_of_posts: int, ) -> RequiresContextFutureResultE[Sequence[str], httpx.AsyncClient]: def factory(post: _Post) -> str: return post['title'] titles = [ # Notice how easily we compose async and sync functions: _fetch_post(post_id).map(factory) # TODO: try `for post_id in (2, 1, 0):` to see how errors work for post_id in range(1, number_of_posts + 1) ] return Fold.collect(titles, RequiresContextFutureResultE.from_value(())) if __name__ == '__main__': # Let's fetch 3 titles of posts one-by-one, but with async client, # because we want to highlight `managed` in this example: managed_httpx = managed(_show_titles(3), _close) future_result = managed_httpx( FutureResult.from_value(httpx.AsyncClient(timeout=5)), ) print(anyio.run(future_result.awaitable)) # noqa: WPS421 # > returns-0.24.0/tests/test_examples/test_future/000077500000000000000000000000001472312074000216675ustar00rootroot00000000000000returns-0.24.0/tests/test_examples/test_future/test_future_result.py000066400000000000000000000035561472312074000262210ustar00rootroot00000000000000import asyncio # we use `asyncio` only as an example, you can use any io lib from collections.abc import Sequence from typing import Final, cast import httpx # you would need to `pip install httpx` from typing_extensions import TypedDict from returns.future import FutureResult, future_safe from returns.io import IOResultE from returns.iterables import Fold _URL: Final = 'https://jsonplaceholder.typicode.com/posts/{0}' class _Post(TypedDict): id: int user_id: int title: str body: str @future_safe async def _fetch_post(post_id: int) -> _Post: # Ideally, we can use `ReaderFutureResult` to provide `client` from deps. async with httpx.AsyncClient(timeout=5) as client: response = await client.get(_URL.format(post_id)) response.raise_for_status() return cast(_Post, response.json()) # or validate the response def _show_titles(number_of_posts: int) -> Sequence[ FutureResult[str, Exception] ]: def factory(post: _Post) -> str: return post['title'] return [ # Notice how easily we compose async and sync functions: _fetch_post(post_id).map(factory) # TODO: try `for post_id in (2, 1, 0):` to see how async errors work for post_id in range(1, number_of_posts + 1) ] async def main() -> IOResultE[Sequence[str]]: """ Main entrypoint for the async world. Let's fetch 3 titles of posts asynchronously. We use `gather` to run requests in "parallel". """ futures: Sequence[IOResultE[str]] = await asyncio.gather(*_show_titles(3)) return Fold.collect(futures, IOResultE.from_value(())) if __name__ == '__main__': print(asyncio.run(main())) # noqa: WPS421 # > returns-0.24.0/tests/test_examples/test_io/000077500000000000000000000000001472312074000207645ustar00rootroot00000000000000returns-0.24.0/tests/test_examples/test_io/test_ioresult_container/000077500000000000000000000000001472312074000257335ustar00rootroot00000000000000returns-0.24.0/tests/test_examples/test_io/test_ioresult_container/test_ioresult_pattern_matching.py000066400000000000000000000013451472312074000346240ustar00rootroot00000000000000from returns.io import IOFailure, IOResult, IOSuccess from returns.result import Success container: IOResult[int, str] = IOSuccess(42) match container: # Matches if the result stored inside `IOSuccess` is `42` # We need to use `Success` until the custom matching protocol # is released. For more information, please visit: # https://www.python.org/dev/peps/pep-0622/#custom-matching-protocol case IOSuccess(Success(42)): print('Result is "42"') # Matches any `IOSuccess` instance # and binds its value to the `value` variable case IOSuccess(value): print('Result is "{0}"'.format(value)) # Matches any `IOFailure` instance case IOFailure(_): print('A failure was occurred') returns-0.24.0/tests/test_examples/test_maybe/000077500000000000000000000000001472312074000214525ustar00rootroot00000000000000returns-0.24.0/tests/test_examples/test_maybe/test_maybe_pattern_matching.py000066400000000000000000000021461472312074000275720ustar00rootroot00000000000000from dataclasses import dataclass from typing import Final from returns.maybe import Maybe, Nothing, Some @dataclass class _Book: book_id: int name: str _BOOK_LIST: Final = ( _Book(book_id=1, name='Category Theory for Programmers'), _Book(book_id=2, name='Fluent Python'), _Book(book_id=3, name='Learn You Some Erlang for Great Good'), _Book(book_id=4, name='Learn You a Haskell for Great Good'), ) def _find_book(book_id: int) -> Maybe[_Book]: for book in _BOOK_LIST: if book.book_id == book_id: return Some(book) return Nothing if __name__ == '__main__': desired_book = _find_book(2) match desired_book: # Matches any `Some` instance that contains a book named `Fluent Python` case Some(_Book(name='Fluent Python')): print('"Fluent Python" was found') # Matches any `Some` instance and binds its value to the `book` variable case Some(book): print('Book found: {0}'.format(book.name)) # Matches `Nothing` instance case Maybe.empty: print('Not found the desired book!') returns-0.24.0/tests/test_examples/test_result/000077500000000000000000000000001472312074000216735ustar00rootroot00000000000000returns-0.24.0/tests/test_examples/test_result/test_result_pattern_matching.py000066400000000000000000000013031472312074000302260ustar00rootroot00000000000000from returns.result import Failure, Success, safe @safe def div(first_number: int, second_number: int) -> int: return first_number // second_number match div(1, 0): # Matches if the result stored inside `Success` is `10` case Success(10): print('Result is "10"') # Matches any `Success` instance and binds its value to the `value` variable case Success(value): print('Result is "{0}"'.format(value)) # Matches if the result stored inside `Failure` is `ZeroDivisionError` case Failure(ZeroDivisionError()): print('"ZeroDivisionError" was raised') # Matches any `Failure` instance case Failure(_): print('The division was a failure') returns-0.24.0/tests/test_examples/test_your_container/000077500000000000000000000000001472312074000234155ustar00rootroot00000000000000returns-0.24.0/tests/test_examples/test_your_container/test_pair1.py000066400000000000000000000045001472312074000260410ustar00rootroot00000000000000from collections.abc import Callable from typing import TypeVar, final from returns.interfaces import bindable, equable, lashable, swappable from returns.primitives.container import BaseContainer, container_equality from returns.primitives.hkt import Kind2, SupportsKind2, dekind _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _NewFirstType = TypeVar('_NewFirstType') _NewSecondType = TypeVar('_NewSecondType') @final class Pair( BaseContainer, SupportsKind2['Pair', _FirstType, _SecondType], bindable.Bindable2[_FirstType, _SecondType], swappable.Swappable2[_FirstType, _SecondType], lashable.Lashable2[_FirstType, _SecondType], equable.Equable, ): """ A type that represents a pair of something. Like to coordinates ``(x, y)`` or two best friends. Or a question and an answer. """ def __init__( self, inner_value: tuple[_FirstType, _SecondType], ) -> None: """Saves passed tuple as ``._inner_value`` inside this instance.""" super().__init__(inner_value) # `Equable` part: equals = container_equality # we already have this defined for all types # `Mappable` part via `BiMappable`: def map( self, function: Callable[[_FirstType], _NewFirstType], ) -> 'Pair[_NewFirstType, _SecondType]': return Pair((function(self._inner_value[0]), self._inner_value[1])) # `BindableN` part: def bind( self, function: Callable[ [_FirstType], Kind2['Pair', _NewFirstType, _SecondType], ], ) -> 'Pair[_NewFirstType, _SecondType]': return dekind(function(self._inner_value[0])) # `AltableN` part via `BiMappableN`: def alt( self, function: Callable[[_SecondType], _NewSecondType], ) -> 'Pair[_FirstType, _NewSecondType]': return Pair((self._inner_value[0], function(self._inner_value[1]))) # `LashableN` part: def lash( self, function: Callable[ [_SecondType], Kind2['Pair', _FirstType, _NewSecondType], ], ) -> 'Pair[_FirstType, _NewSecondType]': return dekind(function(self._inner_value[1])) # `SwappableN` part: def swap(self) -> 'Pair[_SecondType, _FirstType]': return Pair((self._inner_value[1], self._inner_value[0])) returns-0.24.0/tests/test_examples/test_your_container/test_pair2.py000066400000000000000000000107431472312074000260500ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable from typing import TypeVar, final from typing_extensions import Never from returns.interfaces import bindable, equable, lashable, swappable from returns.primitives.container import BaseContainer, container_equality from returns.primitives.hkt import Kind2, KindN, SupportsKind2, dekind _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _NewFirstType = TypeVar('_NewFirstType') _NewSecondType = TypeVar('_NewSecondType') _PairLikeKind = TypeVar('_PairLikeKind', bound='PairLikeN') class PairLikeN( bindable.BindableN[_FirstType, _SecondType, _ThirdType], swappable.SwappableN[_FirstType, _SecondType, _ThirdType], lashable.LashableN[_FirstType, _SecondType, _ThirdType], equable.Equable, ): """Special interface for types that look like a ``Pair``.""" @abstractmethod def pair( self: _PairLikeKind, function: Callable[ [_FirstType, _SecondType], KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType], ], ) -> KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType]: """Allows to work with both arguments at the same time.""" @classmethod @abstractmethod def from_paired( cls: type[_PairLikeKind], first: _NewFirstType, second: _NewSecondType, ) -> KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType]: """Allows to create a PairLikeN from just two values.""" @classmethod @abstractmethod def from_unpaired( cls: type[_PairLikeKind], inner_value: _NewFirstType, ) -> KindN[_PairLikeKind, _NewFirstType, _NewFirstType, _ThirdType]: """Allows to create a PairLikeN from just a single object.""" PairLike2 = PairLikeN[_FirstType, _SecondType, Never] PairLike3 = PairLikeN[_FirstType, _SecondType, _ThirdType] @final class Pair( BaseContainer, SupportsKind2['Pair', _FirstType, _SecondType], PairLike2[_FirstType, _SecondType], ): """ A type that represents a pair of something. Like to coordinates ``(x, y)`` or two best friends. Or a question and an answer. """ def __init__( self, inner_value: tuple[_FirstType, _SecondType], ) -> None: """Saves passed tuple as ``._inner_value`` inside this instance.""" super().__init__(inner_value) # `Equable` part: equals = container_equality # we already have this defined for all types # `Mappable` part via `BiMappable`: def map( self, function: Callable[[_FirstType], _NewFirstType], ) -> 'Pair[_NewFirstType, _SecondType]': """Changes the first type with a pure function.""" return Pair((function(self._inner_value[0]), self._inner_value[1])) # `BindableN` part: def bind( self, function: Callable[ [_FirstType], Kind2['Pair', _NewFirstType, _SecondType], ], ) -> 'Pair[_NewFirstType, _SecondType]': """Changes the first type with a function returning another Pair.""" return dekind(function(self._inner_value[0])) # `AltableN` part via `BiMappableN`: def alt( self, function: Callable[[_SecondType], _NewSecondType], ) -> 'Pair[_FirstType, _NewSecondType]': return Pair((self._inner_value[0], function(self._inner_value[1]))) # `LashableN` part: def lash( self, function: Callable[ [_SecondType], Kind2['Pair', _FirstType, _NewSecondType], ], ) -> 'Pair[_FirstType, _NewSecondType]': return dekind(function(self._inner_value[1])) # `SwappableN` part: def swap(self) -> 'Pair[_SecondType, _FirstType]': return Pair((self._inner_value[1], self._inner_value[0])) # `PairLikeN` part: def pair( self, function: Callable[ [_FirstType, _SecondType], Kind2['Pair', _NewFirstType, _NewSecondType], ], ) -> 'Pair[_NewFirstType, _NewSecondType]': return dekind(function(self._inner_value[0], self._inner_value[1])) @classmethod def from_paired( cls, first: _NewFirstType, second: _NewSecondType, ) -> 'Pair[_NewFirstType, _NewSecondType]': return Pair((first, second)) @classmethod def from_unpaired( cls, inner_value: _NewFirstType, ) -> 'Pair[_NewFirstType, _NewFirstType]': return Pair((inner_value, inner_value)) returns-0.24.0/tests/test_examples/test_your_container/test_pair3.py000066400000000000000000000135541472312074000260540ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable from typing import TypeVar, final from typing_extensions import Never from returns.interfaces import bindable, equable, lashable, swappable from returns.primitives.container import BaseContainer, container_equality from returns.primitives.hkt import Kind2, KindN, SupportsKind2, dekind _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _NewFirstType = TypeVar('_NewFirstType') _NewSecondType = TypeVar('_NewSecondType') _PairLikeKind = TypeVar('_PairLikeKind', bound='PairLikeN') class PairLikeN( bindable.BindableN[_FirstType, _SecondType, _ThirdType], swappable.SwappableN[_FirstType, _SecondType, _ThirdType], lashable.LashableN[_FirstType, _SecondType, _ThirdType], equable.Equable, ): """Special interface for types that look like a ``Pair``.""" @abstractmethod def pair( self: _PairLikeKind, function: Callable[ [_FirstType, _SecondType], KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType], ], ) -> KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType]: """Allows to work with both arguments at the same time.""" @classmethod @abstractmethod def from_paired( cls: type[_PairLikeKind], first: _NewFirstType, second: _NewSecondType, ) -> KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType]: """Allows to create a PairLikeN from just two values.""" @classmethod @abstractmethod def from_unpaired( cls: type[_PairLikeKind], inner_value: _NewFirstType, ) -> KindN[_PairLikeKind, _NewFirstType, _NewFirstType, _ThirdType]: """Allows to create a PairLikeN from just a single object.""" PairLike2 = PairLikeN[_FirstType, _SecondType, Never] PairLike3 = PairLikeN[_FirstType, _SecondType, _ThirdType] @final class Pair( BaseContainer, SupportsKind2['Pair', _FirstType, _SecondType], PairLike2[_FirstType, _SecondType], ): """ A type that represents a pair of something. Like to coordinates ``(x, y)`` or two best friends. Or a question and an answer. """ def __init__( self, inner_value: tuple[_FirstType, _SecondType], ) -> None: """Saves passed tuple as ``._inner_value`` inside this instance.""" super().__init__(inner_value) # `Equable` part: equals = container_equality # we already have this defined for all types # `Mappable` part via `BiMappable`: def map( self, function: Callable[[_FirstType], _NewFirstType], ) -> 'Pair[_NewFirstType, _SecondType]': """ Changes the first type with a pure function. >>> assert Pair((1, 2)).map(str) == Pair(('1', 2)) """ return Pair((function(self._inner_value[0]), self._inner_value[1])) # `BindableN` part: def bind( self, function: Callable[ [_FirstType], Kind2['Pair', _NewFirstType, _SecondType], ], ) -> 'Pair[_NewFirstType, _SecondType]': """ Changes the first type with a function returning another ``Pair``. >>> def bindable(first: int) -> Pair[str, str]: ... return Pair((str(first), '')) >>> assert Pair((1, 'b')).bind(bindable) == Pair(('1', '')) """ return dekind(function(self._inner_value[0])) # `AltableN` part via `BiMappableN`: def alt( self, function: Callable[[_SecondType], _NewSecondType], ) -> 'Pair[_FirstType, _NewSecondType]': """ Changes the second type with a pure function. >>> assert Pair((1, 2)).alt(str) == Pair((1, '2')) """ return Pair((self._inner_value[0], function(self._inner_value[1]))) # `LashableN` part: def lash( self, function: Callable[ [_SecondType], Kind2['Pair', _FirstType, _NewSecondType], ], ) -> 'Pair[_FirstType, _NewSecondType]': """ Changes the second type with a function returning ``Pair``. >>> def lashable(second: int) -> Pair[str, str]: ... return Pair(('', str(second))) >>> assert Pair(('a', 2)).lash(lashable) == Pair(('', '2')) """ return dekind(function(self._inner_value[1])) # `SwappableN` part: def swap(self) -> 'Pair[_SecondType, _FirstType]': """ Swaps ``Pair`` elements. >>> assert Pair((1, 2)).swap() == Pair((2, 1)) """ return Pair((self._inner_value[1], self._inner_value[0])) # `PairLikeN` part: def pair( self, function: Callable[ [_FirstType, _SecondType], Kind2['Pair', _NewFirstType, _NewSecondType], ], ) -> 'Pair[_NewFirstType, _NewSecondType]': """ Creates a new ``Pair`` from an existing one via a passed function. >>> def min_max(first: int, second: int) -> Pair[int, int]: ... return Pair((min(first, second), max(first, second))) >>> assert Pair((2, 1)).pair(min_max) == Pair((1, 2)) >>> assert Pair((1, 2)).pair(min_max) == Pair((1, 2)) """ return dekind(function(self._inner_value[0], self._inner_value[1])) @classmethod def from_paired( cls, first: _NewFirstType, second: _NewSecondType, ) -> 'Pair[_NewFirstType, _NewSecondType]': """ Creates a new pair from two values. >>> assert Pair.from_paired(1, 2) == Pair((1, 2)) """ return Pair((first, second)) @classmethod def from_unpaired( cls, inner_value: _NewFirstType, ) -> 'Pair[_NewFirstType, _NewFirstType]': """ Creates a new pair from a single value. >>> assert Pair.from_unpaired(1) == Pair((1, 1)) """ return Pair((inner_value, inner_value)) returns-0.24.0/tests/test_examples/test_your_container/test_pair4.py000066400000000000000000000162641472312074000260560ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Callable, Sequence from typing import ClassVar, TypeVar, final from typing_extensions import Never from returns.contrib.hypothesis.laws import check_all_laws from returns.interfaces import bindable, equable, lashable, swappable from returns.primitives.asserts import assert_equal from returns.primitives.container import BaseContainer, container_equality from returns.primitives.hkt import Kind2, KindN, SupportsKind2, dekind from returns.primitives.laws import Law, Law2, Law3, LawSpecDef, law_definition _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _NewFirstType = TypeVar('_NewFirstType') _NewSecondType = TypeVar('_NewSecondType') _PairLikeKind = TypeVar('_PairLikeKind', bound='PairLikeN') class _LawSpec(LawSpecDef): @law_definition def pair_equality_law( raw_value: _FirstType, container: 'PairLikeN[_FirstType, _SecondType, _ThirdType]', ) -> None: """Ensures that unpaired and paired constructors work fine.""" assert_equal( container.from_unpaired(raw_value), container.from_paired(raw_value, raw_value), ) @law_definition def pair_left_identity_law( pair: tuple[_FirstType, _SecondType], container: 'PairLikeN[_FirstType, _SecondType, _ThirdType]', function: Callable[ [_FirstType, _SecondType], KindN['PairLikeN', _NewFirstType, _NewSecondType, _ThirdType], ], ) -> None: """Ensures that unpaired and paired constructors work fine.""" assert_equal( container.from_paired(*pair).pair(function), function(*pair), ) class PairLikeN( bindable.BindableN[_FirstType, _SecondType, _ThirdType], swappable.SwappableN[_FirstType, _SecondType, _ThirdType], lashable.LashableN[_FirstType, _SecondType, _ThirdType], equable.Equable, ): """Special interface for types that look like a ``Pair``.""" _laws: ClassVar[Sequence[Law]] = ( Law2(_LawSpec.pair_equality_law), Law3(_LawSpec.pair_left_identity_law), ) @abstractmethod def pair( self: _PairLikeKind, function: Callable[ [_FirstType, _SecondType], KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType], ], ) -> KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType]: """Allows to work with both arguments at the same time.""" @classmethod @abstractmethod def from_paired( cls: type[_PairLikeKind], first: _NewFirstType, second: _NewSecondType, ) -> KindN[_PairLikeKind, _NewFirstType, _NewSecondType, _ThirdType]: """Allows to create a PairLikeN from just two values.""" @classmethod @abstractmethod def from_unpaired( cls: type[_PairLikeKind], inner_value: _NewFirstType, ) -> KindN[_PairLikeKind, _NewFirstType, _NewFirstType, _ThirdType]: """Allows to create a PairLikeN from just a single object.""" PairLike2 = PairLikeN[_FirstType, _SecondType, Never] PairLike3 = PairLikeN[_FirstType, _SecondType, _ThirdType] @final class Pair( BaseContainer, SupportsKind2['Pair', _FirstType, _SecondType], PairLike2[_FirstType, _SecondType], ): """ A type that represents a pair of something. Like to coordinates ``(x, y)`` or two best friends. Or a question and an answer. """ def __init__( self, inner_value: tuple[_FirstType, _SecondType], ) -> None: """Saves passed tuple as ``._inner_value`` inside this instance.""" super().__init__(inner_value) # `Equable` part: equals = container_equality # we already have this defined for all types # `Mappable` part via `BiMappable`: def map( self, function: Callable[[_FirstType], _NewFirstType], ) -> 'Pair[_NewFirstType, _SecondType]': """ Changes the first type with a pure function. >>> assert Pair((1, 2)).map(str) == Pair(('1', 2)) """ return Pair((function(self._inner_value[0]), self._inner_value[1])) # `BindableN` part: def bind( self, function: Callable[ [_FirstType], Kind2['Pair', _NewFirstType, _SecondType], ], ) -> 'Pair[_NewFirstType, _SecondType]': """ Changes the first type with a function returning another ``Pair``. >>> def bindable(first: int) -> Pair[str, str]: ... return Pair((str(first), '')) >>> assert Pair((1, 'b')).bind(bindable) == Pair(('1', '')) """ return dekind(function(self._inner_value[0])) # `AltableN` part via `BiMappableN`: def alt( self, function: Callable[[_SecondType], _NewSecondType], ) -> 'Pair[_FirstType, _NewSecondType]': """ Changes the second type with a pure function. >>> assert Pair((1, 2)).alt(str) == Pair((1, '2')) """ return Pair((self._inner_value[0], function(self._inner_value[1]))) # `LashableN` part: def lash( self, function: Callable[ [_SecondType], Kind2['Pair', _FirstType, _NewSecondType], ], ) -> 'Pair[_FirstType, _NewSecondType]': """ Changes the second type with a function returning ``Pair``. >>> def lashable(second: int) -> Pair[str, str]: ... return Pair(('', str(second))) >>> assert Pair(('a', 2)).lash(lashable) == Pair(('', '2')) """ return dekind(function(self._inner_value[1])) # `SwappableN` part: def swap(self) -> 'Pair[_SecondType, _FirstType]': """ Swaps ``Pair`` elements. >>> assert Pair((1, 2)).swap() == Pair((2, 1)) """ return Pair((self._inner_value[1], self._inner_value[0])) # `PairLikeN` part: def pair( self, function: Callable[ [_FirstType, _SecondType], Kind2['Pair', _NewFirstType, _NewSecondType], ], ) -> 'Pair[_NewFirstType, _NewSecondType]': """ Creates a new ``Pair`` from an existing one via a passed function. >>> def min_max(first: int, second: int) -> Pair[int, int]: ... return Pair((min(first, second), max(first, second))) >>> assert Pair((2, 1)).pair(min_max) == Pair((1, 2)) >>> assert Pair((1, 2)).pair(min_max) == Pair((1, 2)) """ return dekind(function(self._inner_value[0], self._inner_value[1])) @classmethod def from_paired( cls, first: _NewFirstType, second: _NewSecondType, ) -> 'Pair[_NewFirstType, _NewSecondType]': """ Creates a new pair from two values. >>> assert Pair.from_paired(1, 2) == Pair((1, 2)) """ return Pair((first, second)) @classmethod def from_unpaired( cls, inner_value: _NewFirstType, ) -> 'Pair[_NewFirstType, _NewFirstType]': """ Creates a new pair from a single value. >>> assert Pair.from_unpaired(1) == Pair((1, 1)) """ return Pair((inner_value, inner_value)) # Running hypothesis auto-generated tests: check_all_laws(Pair, use_init=True) returns-0.24.0/tests/test_functions/000077500000000000000000000000001472312074000175105ustar00rootroot00000000000000returns-0.24.0/tests/test_functions/test_compose.py000066400000000000000000000006301472312074000225650ustar00rootroot00000000000000from returns.functions import compose def _first(argument: int) -> str: return str(argument) def _second(argument: str) -> bool: return bool(argument) def test_function_composition(): """Ensures that functions can be composed and return type is correct.""" second_after_first = compose(_first, _second) assert second_after_first(1) is True assert second_after_first(0) is True returns-0.24.0/tests/test_functions/test_raise_exception.py000066400000000000000000000015331472312074000243040ustar00rootroot00000000000000 import pytest from returns.functions import raise_exception from returns.result import Failure, Success class _CustomException(Exception): """Just for the test.""" @pytest.mark.parametrize('exception_type', [ TypeError, ValueError, _CustomException, ]) def test_raise_regular_exception(exception_type: type[Exception]): """Ensures that regular exception can be thrown.""" with pytest.raises(exception_type): raise_exception(exception_type()) def test_failure_can_be_alted(): """Ensures that exceptions can work with Failures.""" failure = Failure(ValueError('Message')) with pytest.raises(ValueError, match='Message'): failure.alt(raise_exception) def test_success_is_not_touched(): """Ensures that exceptions can work with Success.""" assert Success(1).alt(raise_exception) == Success(1) returns-0.24.0/tests/test_future/000077500000000000000000000000001472312074000170125ustar00rootroot00000000000000returns-0.24.0/tests/test_future/test_future_container/000077500000000000000000000000001472312074000234255ustar00rootroot00000000000000returns-0.24.0/tests/test_future/test_future_container/test_asyncize_decorator.py000066400000000000000000000004511472312074000307250ustar00rootroot00000000000000import pytest from returns.future import asyncify @asyncify def _function(arg: int) -> float: return arg / 2 @pytest.mark.anyio async def test_asyncify_decorator(): """Ensure that function marked with ``@asyncify`` is awaitable.""" coro = _function(2) assert await coro == 1 returns-0.24.0/tests/test_future/test_future_container/test_future_decorator.py000066400000000000000000000006211472312074000304110ustar00rootroot00000000000000import pytest from returns.future import Future, future from returns.io import IO @future async def _coro(arg: int) -> float: return arg / 2 @pytest.mark.anyio async def test_safe_decorator(): """Ensure that coroutine marked with ``@future`` returns ``Future``.""" future_instance = _coro(1) assert isinstance(future_instance, Future) assert await future_instance == IO(0.5) returns-0.24.0/tests/test_future/test_future_container/test_future_equality.py000066400000000000000000000006261472312074000302710ustar00rootroot00000000000000import pytest from returns.future import Future def test_nonequality(): """Ensures that containers can be compared.""" assert Future.from_value(1) != Future.from_value(1) assert hash(Future.from_value(1)) @pytest.mark.anyio async def test_equality(): """Ensures that containers are not compared to regular values.""" assert await Future.from_value(2) == await Future.from_value(2) returns-0.24.0/tests/test_future/test_future_container/test_future_units.py000066400000000000000000000015231472312074000275730ustar00rootroot00000000000000from collections.abc import Awaitable from typing import Any import pytest from returns.future import Future, FutureResult from returns.io import IO @pytest.mark.anyio async def test_inner_value(subtests): """Ensure that coroutine correct value is preserved for all units.""" containers: list[Awaitable[Any]] = [ # We have to define these values inside the test, because # otherwise `anyio` will `await` reused coroutines. # And they have to be fresh. That's why we use subtests for it. Future.from_value(1), Future.from_io(IO(1)), Future.from_future_result(FutureResult.from_value(1)), Future.from_future_result(FutureResult.from_failure(1)), ] for container in containers: with subtests.test(container=container): assert isinstance(await container, IO) returns-0.24.0/tests/test_future/test_future_result/000077500000000000000000000000001472312074000227615ustar00rootroot00000000000000returns-0.24.0/tests/test_future/test_future_result/test_future_result_decorator.py000066400000000000000000000031651472312074000313510ustar00rootroot00000000000000 import pytest from returns.future import FutureResult, future_safe from returns.io import IOFailure, IOSuccess @future_safe async def _coro(arg: int) -> float: return 1 / arg @future_safe(exceptions=(ZeroDivisionError,)) async def _coro_two(arg: int) -> float: return 1 / arg @future_safe((ZeroDivisionError,)) async def _coro_three(arg: int | str) -> float: assert isinstance(arg, int) return 1 / arg @pytest.mark.anyio async def test_future_safe_decorator(): """Ensure that coroutine marked with ``@future_safe``.""" future_instance = _coro(2) assert isinstance(future_instance, FutureResult) assert await future_instance == IOSuccess(0.5) @pytest.mark.anyio async def test_future_safe_decorator_failure(): """Ensure that coroutine marked with ``@future_safe``.""" future_instance = _coro(0) assert isinstance(future_instance, FutureResult) assert isinstance(await future_instance, IOFailure) @pytest.mark.anyio async def test_future_safe_decorator_w_expected_error(subtests): """Ensure that coroutine marked with ``@future_safe``.""" expected = '>' for future_instance in (_coro_two(0), _coro_three(0)): with subtests.test(future_instance=future_instance): assert isinstance(future_instance, FutureResult) inner_result = await future_instance assert str(inner_result) == expected @pytest.mark.anyio @pytest.mark.xfail(raises=AssertionError) async def test_future_safe_decorator_w_unexpected_error(): """Ensure that coroutine marked with ``@future_safe``.""" await _coro_three('0') returns-0.24.0/tests/test_future/test_future_result/test_future_result_equality.py000066400000000000000000000007111472312074000312160ustar00rootroot00000000000000import pytest from returns.future import FutureResult def test_nonequality(): """Ensures that containers can be compared.""" assert FutureResult.from_value(1) != FutureResult.from_value(1) assert hash(FutureResult.from_value(1)) @pytest.mark.anyio async def test_equality(): """Ensures that containers are not compared to regular values.""" assert await FutureResult.from_value( 2, ) == await FutureResult.from_value(2) returns-0.24.0/tests/test_future/test_future_result/test_future_result_units.py000066400000000000000000000023421472312074000305250ustar00rootroot00000000000000import pytest from returns.future import Future, FutureResult from returns.io import IO, IOFailure, IOSuccess from returns.result import Failure, Success @pytest.mark.anyio async def test_inner_value(subtests): """Ensure that coroutine correct value is preserved for all units.""" containers = [ # We have to define these values inside the test, because # otherwise `anyio` will `await` reused coroutines. # And they have to be fresh. That's why we use subtests for it. FutureResult.from_value(1), FutureResult.from_failure(1), FutureResult.from_io(IO(1)), FutureResult.from_failed_io(IO(1)), FutureResult.from_ioresult(IOSuccess(1)), FutureResult.from_ioresult(IOFailure(1)), FutureResult.from_result(Success(1)), FutureResult.from_result(Failure(1)), FutureResult.from_future(Future.from_value(1)), FutureResult.from_failed_future(Future.from_value(1)), FutureResult.from_typecast(Future.from_value(Success(1))), ] for container in containers: with subtests.test(container=container): result_inst = await container assert result_inst._inner_value._inner_value == 1 # noqa: WPS437 returns-0.24.0/tests/test_io/000077500000000000000000000000001472312074000161075ustar00rootroot00000000000000returns-0.24.0/tests/test_io/test_io_container/000077500000000000000000000000001472312074000216175ustar00rootroot00000000000000returns-0.24.0/tests/test_io/test_io_container/test_io.py000066400000000000000000000015451472312074000236440ustar00rootroot00000000000000import pytest from returns.io import IO, IOFailure, IOResult, IOSuccess def test_io_map(): """Ensures that IO container supports ``.map()`` method.""" io: IO[float] = IO(1).map( lambda number: number / 2, ) assert io == IO(0.5) def test_io_bind(): """Ensures that IO container supports ``.bind()`` method.""" io: IO[int] = IO('1').bind( lambda number: IO(int(number)), ) assert io == IO(1) def test_io_str(): """Ensures that IO container supports str cast.""" assert str(IO([])) == '' @pytest.mark.parametrize('container', [ IOSuccess(1), IOFailure(1), ]) def test_io_typecast_reverse(container): """Ensures that IO can be casted to IOResult and back.""" assert IO.from_ioresult(container) == IO.from_ioresult( IOResult.from_typecast(IO.from_ioresult(container)), ) returns-0.24.0/tests/test_io/test_io_container/test_io_equality.py000066400000000000000000000012041472312074000255510ustar00rootroot00000000000000from returns.io import IO def test_equals(): """Ensures that ``.equals`` method works correctly.""" assert IO(1).equals(IO(1)) assert IO(1).equals(IO.from_value(1)) def test_not_equals(): """Ensures that ``.equals`` method works correctly.""" assert not IO(1).equals(IO('a')) def test_equality(): """Ensures that containers can be compared.""" assert IO(1) == IO(1) assert str(IO(2)) == '' assert hash(IO((1, 2, 3))) def test_nonequality(): """Ensures that containers are not compared to regular values.""" assert IO(1) != 1 assert IO(2) is not IO(2) assert IO('a') != IO('b') returns-0.24.0/tests/test_io/test_io_container/test_io_functions/000077500000000000000000000000001472312074000253555ustar00rootroot00000000000000returns-0.24.0/tests/test_io/test_io_container/test_io_functions/test_impure.py000066400000000000000000000004661472312074000302750ustar00rootroot00000000000000from returns.io import IO, impure def _fake_impure_function(some_param: int) -> int: return some_param def test_impure(): """Ensures that impure returns IO container.""" impure_result = impure(_fake_impure_function)(1) assert isinstance(impure_result, IO) assert impure_result == IO(1) returns-0.24.0/tests/test_io/test_io_container/test_io_pickle.py000066400000000000000000000006021472312074000251640ustar00rootroot00000000000000from returns.io import IO def test_io_pickle(): """Tests how pickle protocol works for containers.""" assert IO(1).__getstate__() == {'container_value': 1} # noqa: WPS609 def test_io_pickle_restore(): """Ensures that object can be restored.""" container = IO(2) container.__setstate__({'container_value': 1}) # noqa: WPS609, E501 assert container == IO(1) returns-0.24.0/tests/test_io/test_ioresult_container/000077500000000000000000000000001472312074000230565ustar00rootroot00000000000000returns-0.24.0/tests/test_io/test_ioresult_container/test_ioresult_bind.py000066400000000000000000000047331472312074000273400ustar00rootroot00000000000000from returns.io import IOFailure, IOResult, IOSuccess from returns.result import Failure, Result, Success def test_bind(): """Ensures that bind works.""" def factory(inner_value: int) -> IOResult[int, str]: if inner_value > 0: return IOSuccess(inner_value * 2) return IOFailure(str(inner_value)) input_value = 5 bound: IOResult[int, str] = IOSuccess(input_value) assert bound.bind(factory) == factory(input_value) assert str(bound.bind(factory)) == '>' input_value = 0 bound2: IOResult[int, str] = IOSuccess(input_value) assert bound2.bind(factory) == factory(input_value) assert str(bound2.bind(factory)) == '>' def test_left_identity_success(): """Ensures that left identity works for IOSuccess container.""" def factory(inner_value: int) -> IOResult[int, str]: return IOSuccess(inner_value * 2) input_value = 5 bound: IOResult[int, str] = IOSuccess(input_value) assert bound.bind(factory) == factory(input_value) def test_left_identity_failure(): """Ensures that left identity works for IOFailure container.""" def factory(inner_value: int) -> IOResult[int, int]: return IOFailure(6) input_value = 5 bound: IOResult[int, int] = IOFailure(input_value) assert bound.bind(factory) == IOFailure(input_value) def test_bind_regular_result(): """Ensures that regular ``Result`` can be bound to ``IOResult``.""" def factory(inner_value: int) -> Result[int, str]: if inner_value > 0: return Success(inner_value + 1) return Failure('nope') first: IOResult[int, str] = IOSuccess(1) second: IOResult[int, str] = IOSuccess(0) third: IOResult[int, str] = IOFailure('a') assert first.bind_result(factory) == IOSuccess(2) assert second.bind_result(factory) == IOFailure('nope') assert third.bind_result(factory) == IOFailure('a') def test_lash_success(): """Ensures that lash works for IOSuccess container.""" def factory(inner_value) -> IOResult[int, str]: return IOSuccess(inner_value * 2) bound = IOSuccess(5).lash(factory) assert bound == IOSuccess(5) def test_lash_failure(): """Ensures that lash works for IOFailure container.""" def factory(inner_value: int) -> IOResult[str, int]: return IOFailure(inner_value + 1) expected = 6 bound: IOResult[str, int] = IOFailure(5) assert bound.lash(factory) == IOFailure(expected) returns-0.24.0/tests/test_io/test_ioresult_container/test_ioresult_equals.py000066400000000000000000000011741472312074000277120ustar00rootroot00000000000000from returns.io import IOFailure, IOSuccess def test_equals(): """Ensures that ``.equals`` method works correctly.""" inner_value = 1 assert IOSuccess(inner_value).equals(IOSuccess(inner_value)) assert IOFailure(inner_value).equals(IOFailure(inner_value)) def test_not_equals(): """Ensures that ``.equals`` method works correctly.""" inner_value = 1 assert not IOSuccess(inner_value).equals(IOFailure(inner_value)) assert not IOSuccess(inner_value).equals(IOSuccess(0)) assert not IOFailure(inner_value).equals(IOSuccess(inner_value)) assert not IOFailure(inner_value).equals(IOFailure(0)) returns-0.24.0/tests/test_io/test_ioresult_container/test_ioresult_functions/000077500000000000000000000000001472312074000300535ustar00rootroot00000000000000returns-0.24.0/tests/test_io/test_ioresult_container/test_ioresult_functions/test_impure_safe.py000066400000000000000000000026261472312074000337710ustar00rootroot00000000000000 import pytest from returns.io import IOSuccess, impure_safe @impure_safe def _function(number: int) -> float: return number / number @impure_safe(exceptions=(ZeroDivisionError,)) def _function_two(number: int | str) -> float: assert isinstance(number, int) return number / number @impure_safe((ZeroDivisionError,)) # no name def _function_three(number: int | str) -> float: assert isinstance(number, int) return number / number def test_safe_iosuccess(): """Ensures that safe decorator works correctly for IOSuccess case.""" assert _function(1) == IOSuccess(1.0) def test_safe_iofailure(): """Ensures that safe decorator works correctly for IOFailure case.""" failed = _function(0) assert isinstance( failed.failure()._inner_value, ZeroDivisionError, # noqa: WPS437 ) def test_safe_failure_with_expected_error(): """Ensures that safe decorator works correctly for Failure case.""" failed = _function_two(0) assert isinstance( failed.failure()._inner_value, # noqa: WPS437 ZeroDivisionError, ) failed2 = _function_three(0) assert isinstance( failed2.failure()._inner_value, # noqa: WPS437 ZeroDivisionError, ) def test_safe_failure_with_non_expected_error(): """Ensures that safe decorator works correctly for Failure case.""" with pytest.raises(AssertionError): _function_two('0') returns-0.24.0/tests/test_io/test_ioresult_container/test_ioresult_map.py000066400000000000000000000007221472312074000271730ustar00rootroot00000000000000from returns.io import IOFailure, IOSuccess def test_map_iosuccess(): """Ensures that IOSuccess is mappable.""" assert IOSuccess(5).map(str) == IOSuccess('5') def test_alt_iofailure(): """Ensures that IOFailure is mappable.""" assert IOFailure(5).map(str) == IOFailure(5) assert IOFailure(5).alt(str) == IOFailure('5') def test_alt_iosuccess(): """Ensures that IOSuccess.alt is NoOp.""" assert IOSuccess(5).alt(str) == IOSuccess(5) returns-0.24.0/tests/test_io/test_ioresult_container/test_ioresult_values.py000066400000000000000000000023411472312074000277140ustar00rootroot00000000000000import pytest from returns.io import IO, IOFailure, IOSuccess from returns.primitives.exceptions import UnwrapFailedError def test_ioresult_value_or(): """Ensures that ``value_or`` works correctly.""" assert IOSuccess(1).value_or(0) == IO(1) assert IOFailure(1).value_or(0) == IO(0) def test_unwrap_iosuccess(): """Ensures that unwrap works for IOSuccess container.""" assert IOSuccess(5).unwrap() == IO(5) def test_unwrap_iofailure(): """Ensures that unwrap works for IOFailure container.""" with pytest.raises(UnwrapFailedError): IOFailure(5).unwrap() def test_unwrap_iofailure_with_exception(): """Ensures that unwrap raises from the original exception.""" expected_exception = ValueError('error') with pytest.raises(UnwrapFailedError) as excinfo: IOFailure(expected_exception).unwrap() assert 'ValueError: error' in str( excinfo.getrepr(), # noqa: WPS441 ) def test_failure_iosuccess(): """Ensures that failure works for IOSuccess container.""" with pytest.raises(UnwrapFailedError): IOSuccess(5).failure() def test_failure_iofailure(): """Ensures that failure works for IOFailure container.""" assert IOFailure(5).failure() == IO(5) returns-0.24.0/tests/test_io/test_ioresult_container/test_ioresulte_cast.py000066400000000000000000000010001472312074000275030ustar00rootroot00000000000000from returns.io import IOFailure, IOResult, IOResultE, IOSuccess from returns.pipeline import is_successful def _function(arg: int) -> IOResultE[float]: if arg == 0: return IOFailure(ZeroDivisionError('Divided by 0')) return IOSuccess(10 / arg) def test_ioresulte(): """Ensures that IOResultE correctly typecast.""" container: IOResult[float, Exception] = _function(1) assert container == IOSuccess(10.0) container = _function(0) assert is_successful(container) is False returns-0.24.0/tests/test_iterables/000077500000000000000000000000001472312074000174525ustar00rootroot00000000000000returns-0.24.0/tests/test_iterables/test_fold/000077500000000000000000000000001472312074000214355ustar00rootroot00000000000000returns-0.24.0/tests/test_iterables/test_fold/test_collect.py000066400000000000000000000144671472312074000245070ustar00rootroot00000000000000import sys from collections.abc import Iterable, Sequence import pytest from returns.context import ( NoDeps, Reader, ReaderFutureResult, ReaderIOResult, ReaderResult, ) from returns.future import Future, FutureFailure, FutureResult, FutureSuccess from returns.io import IO, IOFailure, IOSuccess from returns.iterables import Fold from returns.maybe import Nothing, Some from returns.result import Failure, Success @pytest.mark.parametrize(('iterable', 'sequence'), [ # Regular types: ([], IO(())), ([IO(1)], IO((1,))), ([IO(1), IO(2)], IO((1, 2))), # Can fail: ([], Success(())), ([Success(1)], Success((1,))), ([Success(1), Success(2)], Success((1, 2))), ( [Failure('a'), Success(1), Success(2)], Failure('a'), ), ([Success(1), Failure('a')], Failure('a')), ([Failure('a'), Failure('b')], Failure('a')), ([], Some(())), ([Some(1)], Some((1,))), ([Some(1), Some(2)], Some((1, 2))), ([Nothing, Some(1), Some(2)], Nothing), ([Some(1), Nothing, Some(2)], Nothing), ([Some(1), Some(2), Nothing], Nothing), ([Nothing], Nothing), ([], IOSuccess(())), ([IOSuccess(1)], IOSuccess((1,))), ([IOSuccess(1), IOSuccess(2)], IOSuccess((1, 2))), ( [IOFailure('a'), IOSuccess(1), IOSuccess(2)], IOFailure('a'), ), ([IOSuccess(1), IOFailure('a')], IOFailure('a')), ([IOFailure('a'), IOFailure('b')], IOFailure('a')), ]) def test_fold_collect(iterable, sequence): """Iterable for regular types and ``Fold``.""" assert Fold.collect(iterable, sequence.from_value(())) == sequence @pytest.mark.parametrize(('iterable', 'sequence'), [ # Regular types: ([], Reader.from_value(())), ([Reader.from_value(1)], Reader.from_value((1,))), ( [Reader.from_value(1), Reader.from_value(2)], Reader.from_value((1, 2)), ), # Can fail: ([], ReaderResult.from_value(())), ([ReaderResult.from_value(1)], ReaderResult.from_value((1,))), ( [ReaderResult.from_value(1), ReaderResult.from_value(2)], ReaderResult.from_value((1, 2)), ), ( [ ReaderResult.from_failure('a'), ReaderResult.from_value(1), ReaderResult.from_value(2), ], ReaderResult.from_failure('a'), ), ( [ReaderResult.from_failure('a'), ReaderResult.from_failure('b')], ReaderResult.from_failure('a'), ), ([], ReaderIOResult.from_value(())), ([ReaderIOResult.from_value(1)], ReaderIOResult.from_value((1,))), ( [ReaderIOResult.from_value(1), ReaderIOResult.from_value(2)], ReaderIOResult.from_value((1, 2)), ), ( [ ReaderIOResult.from_failure('a'), ReaderIOResult.from_value(1), ReaderIOResult.from_value(2), ], ReaderIOResult.from_failure('a'), ), ( [ReaderIOResult.from_failure('a'), ReaderIOResult.from_failure('b')], ReaderIOResult.from_failure('a'), ), ]) def test_fold_collect_reader(iterable, sequence): """Ensures that ``.collect`` works for readers.""" assert Fold.collect( iterable, sequence.from_value(()), )(...) == sequence(...) @pytest.mark.anyio async def test_fold_collect_reader_future_result(subtests): """Iterable for ``ReaderFutureResult`` and ``Fold``.""" containers: list[tuple[ # noqa: WPS234 Iterable[ReaderFutureResult[int, str, NoDeps]], ReaderFutureResult[Sequence[int], str, NoDeps], ]] = [ ([], ReaderFutureResult.from_value(())), ( [ReaderFutureResult.from_value(1)], ReaderFutureResult.from_value((1,)), ), ( [ ReaderFutureResult.from_value(1), ReaderFutureResult.from_value(2), ], ReaderFutureResult.from_value((1, 2)), ), ( [ ReaderFutureResult.from_failure('a'), ReaderFutureResult.from_value(1), ReaderFutureResult.from_value(2), ], ReaderFutureResult.from_failure('a'), ), ( [ ReaderFutureResult.from_failure('a'), ReaderFutureResult.from_failure('b'), ], ReaderFutureResult.from_failure('a'), ), ] for iterable, sequence in containers: with subtests.test(iterable=iterable, sequence=sequence): assert await Fold.collect( iterable, sequence.from_value(()), )(...) == await sequence(...) @pytest.mark.anyio async def test_fold_collect_future(subtests): """Iterable for ``Future`` and ``Fold``.""" containers: list[tuple[ # noqa: WPS234 Iterable[Future[int]], Future[Sequence[int]], ]] = [ ([], Future.from_value(())), ([Future.from_value(1)], Future.from_value((1,))), ( [Future.from_value(1), Future.from_value(2)], Future.from_value((1, 2)), ), ] for iterable, sequence in containers: with subtests.test(iterable=iterable, sequence=sequence): assert await Fold.collect( iterable, sequence.from_value(()), ) == await sequence @pytest.mark.anyio async def test_fold_collect_future_result(subtests): """Iterable for ``FutureResult`` and ``Fold``.""" containers: list[tuple[ # noqa: WPS234 Iterable[FutureResult[int, str]], FutureResult[Sequence[int], str], ]] = [ ([], FutureSuccess(())), ([FutureSuccess(1)], FutureSuccess((1,))), ([FutureSuccess(1), FutureSuccess(2)], FutureSuccess((1, 2))), ( [FutureFailure('a'), FutureSuccess(1), FutureSuccess(2)], FutureFailure('a'), ), ([FutureFailure('a'), FutureFailure('b')], FutureFailure('a')), ] for iterable, sequence in containers: with subtests.test(iterable=iterable, sequence=sequence): assert await Fold.collect( iterable, sequence.from_value(()), ) == await sequence def test_fold_collect_recursion_limit(): """Ensures that ``.collect`` method is recursion safe.""" limit = sys.getrecursionlimit() + 1 iterable = (IO(1) for _ in range(limit)) expected = IO((1,) * limit) assert Fold.collect(iterable, IO(())) == expected returns-0.24.0/tests/test_iterables/test_fold/test_collect_all.py000066400000000000000000000125211472312074000253240ustar00rootroot00000000000000import sys from collections.abc import Iterable, Sequence import pytest from returns.context import ( NoDeps, ReaderFutureResult, ReaderIOResult, ReaderResult, ) from returns.future import FutureFailure, FutureResult, FutureSuccess from returns.io import IOFailure, IOSuccess from returns.iterables import Fold from returns.maybe import Nothing, Some from returns.result import Failure, Success @pytest.mark.parametrize(('iterable', 'sequence'), [ ([], Some(())), ([Some(1)], Some((1,))), ([Some(1), Some(2)], Some((1, 2))), ([Nothing, Some(1), Some(2)], Some((1, 2))), ([Some(1), Nothing, Some(2)], Some((1, 2))), ([Some(1), Some(2), Nothing], Some((1, 2))), ([Nothing], Some(())), ([], Success(())), ([Success(1)], Success((1,))), ([Success(1), Success(2)], Success((1, 2))), ( [Failure('a'), Success(1), Success(2)], Success((1, 2)), ), ([Success(1), Failure('b')], Success((1,))), ([Failure('a'), Failure('b')], Success(())), ([], IOSuccess(())), ([IOSuccess(1)], IOSuccess((1,))), ([IOSuccess(1), IOSuccess(2)], IOSuccess((1, 2))), ( [IOFailure('a'), IOSuccess(1), IOSuccess(2)], IOSuccess((1, 2)), ), ([IOSuccess(1), IOFailure('b')], IOSuccess((1,))), ([IOFailure('a'), IOFailure('b')], IOSuccess(())), ]) def test_collect_all_result(iterable, sequence): """Iterable for ``Result`` and ``Fold``.""" assert Fold.collect_all(iterable, sequence.from_value(())) == sequence @pytest.mark.parametrize(('iterable', 'sequence'), [ ([], ReaderResult.from_value(())), ([ReaderResult.from_value(1)], ReaderResult.from_value((1,))), ( [ReaderResult.from_value(1), ReaderResult.from_value(2)], ReaderResult.from_value((1, 2)), ), ( [ ReaderResult.from_failure('a'), ReaderResult.from_value(1), ReaderResult.from_value(2), ], ReaderResult.from_value((1, 2)), ), ( [ReaderResult.from_failure('a'), ReaderResult.from_failure('b')], ReaderResult.from_value(()), ), ([], ReaderIOResult.from_value(())), ([ReaderIOResult.from_value(1)], ReaderIOResult.from_value((1,))), ( [ReaderIOResult.from_value(1), ReaderIOResult.from_value(2)], ReaderIOResult.from_value((1, 2)), ), ( [ ReaderIOResult.from_failure('a'), ReaderIOResult.from_value(1), ReaderIOResult.from_value(2), ], ReaderIOResult.from_value((1, 2)), ), ( [ReaderIOResult.from_failure('a'), ReaderIOResult.from_failure('b')], ReaderIOResult.from_value(()), ), ]) def test_collect_all_reader_result(iterable, sequence): """Iterable for ``ReaderResult`` and ``Fold``.""" assert Fold.collect_all( iterable, sequence.from_value(()), )(...) == sequence(...) @pytest.mark.anyio async def test_collect_all_reader_future_result(subtests): """Iterable for ``ReaderFutureResult`` and ``Fold``.""" containers: list[tuple[ # noqa: WPS234 Iterable[ReaderFutureResult[int, str, NoDeps]], ReaderFutureResult[Sequence[int], str, NoDeps], ]] = [ ([], ReaderFutureResult.from_value(())), ( [ReaderFutureResult.from_value(1)], ReaderFutureResult.from_value((1,)), ), ( [ ReaderFutureResult.from_value(1), ReaderFutureResult.from_value(2), ], ReaderFutureResult.from_value((1, 2)), ), ( [ ReaderFutureResult.from_failure('a'), ReaderFutureResult.from_value(1), ReaderFutureResult.from_value(2), ], ReaderFutureResult.from_value((1, 2)), ), ( [ ReaderFutureResult.from_failure('a'), ReaderFutureResult.from_failure('b'), ], ReaderFutureResult.from_value(()), ), ] for iterable, sequence in containers: with subtests.test(iterable=iterable, sequence=sequence): assert await Fold.collect_all( iterable, sequence.from_value(()), )(...) == await sequence(...) @pytest.mark.anyio async def test_collect_all_future_result(subtests): """Iterable for ``FutureResult`` and ``Fold``.""" containers: list[tuple[ # noqa: WPS234 Iterable[FutureResult[int, str]], FutureResult[Sequence[int], str], ]] = [ ([], FutureSuccess(())), ([FutureSuccess(1)], FutureSuccess((1,))), ([FutureSuccess(1), FutureSuccess(2)], FutureSuccess((1, 2))), ( [FutureFailure('a'), FutureSuccess(1), FutureSuccess(2)], FutureSuccess((1, 2)), ), ([FutureFailure('a'), FutureFailure('b')], FutureSuccess(())), ] for iterable, sequence in containers: with subtests.test(iterable=iterable, sequence=sequence): assert await Fold.collect_all( iterable, sequence.from_value(()), ) == await sequence def test_fold_collect_recursion_limit(): """Ensures that ``.collect_all`` method is recursion safe.""" limit = sys.getrecursionlimit() + 1 iterable = (Success(1) for _ in range(limit)) expected = Success((1,) * limit) assert Fold.collect_all(iterable, Success(())) == expected returns-0.24.0/tests/test_iterables/test_fold/test_loop.py000066400000000000000000000151611472312074000240230ustar00rootroot00000000000000import sys from collections.abc import Iterable import pytest from returns.context import ( NoDeps, Reader, ReaderFutureResult, ReaderIOResult, ReaderResult, ) from returns.future import Future, FutureFailure, FutureResult, FutureSuccess from returns.io import IO, IOFailure, IOSuccess from returns.iterables import Fold from returns.maybe import Nothing, Some from returns.result import Failure, Success def _sum_two(first): return lambda second: first + second @pytest.mark.parametrize(('iterable', 'sequence'), [ # Regular types: ([], IO(10)), ([IO(1)], IO(11)), ([IO(1), IO(2)], IO(13)), # Can fail: ([], Success(10)), ([Success(1)], Success(11)), ([Success(1), Success(2)], Success(13)), ( [Failure('a'), Success(1), Success(2)], Failure('a'), ), ([Success(1), Failure('a')], Failure('a')), ([Failure('a'), Failure('b')], Failure('a')), ([], Some(10)), ([Some(1)], Some(11)), ([Some(1), Some(2)], Some(13)), ([Nothing, Some(1), Some(2)], Nothing), ([Some(1), Nothing, Some(2)], Nothing), ([Some(1), Some(2), Nothing], Nothing), ([Nothing], Nothing), ([], IOSuccess(10)), ([IOSuccess(1)], IOSuccess(11)), ([IOSuccess(1), IOSuccess(2)], IOSuccess(13)), ( [IOFailure('a'), IOSuccess(1), IOSuccess(2)], IOFailure('a'), ), ([IOFailure('a'), IOFailure('b')], IOFailure('a')), ]) def test_fold_loop(iterable, sequence): """Iterable for ``Result`` and ``FailFast``.""" assert Fold.loop(iterable, sequence.from_value(10), _sum_two) == sequence @pytest.mark.parametrize(('iterable', 'sequence'), [ # Regular types: ([], Reader.from_value(10)), ([Reader.from_value(1)], Reader.from_value(11)), ( [Reader.from_value(1), Reader.from_value(2)], Reader.from_value(13), ), # Can fail: ([], ReaderResult.from_value(10)), ([ReaderResult.from_value(1)], ReaderResult.from_value(11)), ( [ReaderResult.from_value(1), ReaderResult.from_value(2)], ReaderResult.from_value(13), ), ( [ ReaderResult.from_failure('a'), ReaderResult.from_value(1), ReaderResult.from_value(2), ], ReaderResult.from_failure('a'), ), ( [ReaderResult.from_failure('a'), ReaderResult.from_failure('b')], ReaderResult.from_failure('a'), ), ([], ReaderIOResult.from_value(10)), ([ReaderIOResult.from_value(1)], ReaderIOResult.from_value(11)), ( [ReaderIOResult.from_value(1), ReaderIOResult.from_value(2)], ReaderIOResult.from_value(13), ), ( [ ReaderIOResult.from_failure('a'), ReaderIOResult.from_value(1), ReaderIOResult.from_value(2), ], ReaderIOResult.from_failure('a'), ), ( [ReaderIOResult.from_failure('a'), ReaderIOResult.from_failure('b')], ReaderIOResult.from_failure('a'), ), ( [ReaderIOResult.from_value(1), ReaderIOResult.from_failure('a')], ReaderIOResult.from_failure('a'), ), ]) def test_fold_loop_reader(iterable, sequence): """Ensures that ``.loop`` works for readers.""" assert Fold.loop( iterable, sequence.from_value(10), _sum_two, )(...) == sequence(...) @pytest.mark.anyio async def test_fold_loop_reader_future_result(subtests): """Iterable for ``ReaderFutureResult`` and ``Fold``.""" containers: list[tuple[ # noqa: WPS234 Iterable[ReaderFutureResult[int, str, NoDeps]], ReaderFutureResult[int, str, NoDeps], ]] = [ ([], ReaderFutureResult.from_value(10)), ( [ReaderFutureResult.from_value(1)], ReaderFutureResult.from_value(11), ), ( [ ReaderFutureResult.from_value(1), ReaderFutureResult.from_value(2), ], ReaderFutureResult.from_value(13), ), ( [ ReaderFutureResult.from_failure('a'), ReaderFutureResult.from_value(1), ReaderFutureResult.from_value(2), ], ReaderFutureResult.from_failure('a'), ), ( [ ReaderFutureResult.from_failure('a'), ReaderFutureResult.from_failure('b'), ], ReaderFutureResult.from_failure('a'), ), ( [ ReaderFutureResult.from_value(1), ReaderFutureResult.from_failure('a'), ], ReaderFutureResult.from_failure('a'), ), ] for iterable, sequence in containers: with subtests.test(iterable=iterable, sequence=sequence): assert await Fold.loop( iterable, sequence.from_value(10), _sum_two, )(...) == await sequence(...) @pytest.mark.anyio async def test_fold_collect_future(subtests): """Iterable for ``Future`` and ``Fold``.""" containers: list[tuple[ # noqa: WPS234 Iterable[Future[int]], Future[int], ]] = [ ([], Future.from_value(10)), ([Future.from_value(1)], Future.from_value(11)), ( [Future.from_value(1), Future.from_value(2)], Future.from_value(13), ), ] for iterable, sequence in containers: with subtests.test(iterable=iterable, sequence=sequence): assert await Fold.loop( iterable, sequence.from_value(10), _sum_two, ) == await sequence @pytest.mark.anyio async def test_fold_collect_future_result(subtests): """Iterable for ``FutureResult`` and ``Fold``.""" containers: list[tuple[ # noqa: WPS234 Iterable[FutureResult[int, str]], FutureResult[int, str], ]] = [ ([], FutureSuccess(10)), ([FutureSuccess(1)], FutureSuccess(11)), ([FutureSuccess(1), FutureSuccess(2)], FutureSuccess(13)), ( [FutureFailure('a'), FutureSuccess(1), FutureSuccess(2)], FutureFailure('a'), ), ([FutureFailure('a'), FutureFailure('b')], FutureFailure('a')), ([FutureSuccess(1), FutureFailure('a')], FutureFailure('a')), ] for iterable, sequence in containers: with subtests.test(iterable=iterable, sequence=sequence): assert await Fold.loop( iterable, sequence.from_value(10), _sum_two, ) == await sequence def test_fold_loop_recursion_limit(): """Ensures that ``.loop`` method is recursion safe.""" limit = sys.getrecursionlimit() + 1 iterable = (IO(1) for _ in range(limit)) assert Fold.loop(iterable, IO(0), _sum_two) == IO(limit) returns-0.24.0/tests/test_laws.py000066400000000000000000000011021472312074000170120ustar00rootroot00000000000000from returns.context import ( Reader, ReaderFutureResult, ReaderIOResult, ReaderResult, ) from returns.contrib.hypothesis.laws import check_all_laws from returns.future import Future, FutureResult from returns.io import IO, IOResult from returns.maybe import Maybe from returns.result import Result check_all_laws(Maybe) check_all_laws(Result) check_all_laws(IO) check_all_laws(IOResult) check_all_laws(Future) check_all_laws(FutureResult) check_all_laws(Reader) check_all_laws(ReaderResult) check_all_laws(ReaderIOResult) check_all_laws(ReaderFutureResult) returns-0.24.0/tests/test_maybe/000077500000000000000000000000001472312074000165755ustar00rootroot00000000000000returns-0.24.0/tests/test_maybe/test_maybe_bind.py000066400000000000000000000012631472312074000223010ustar00rootroot00000000000000 from returns.maybe import Maybe, Nothing, Some def test_bind_some(): """Ensures that bind works correctly.""" def factory(inner_value: int) -> Maybe[int]: return Some(inner_value * 2) input_value = 5 bound = Some(input_value).bind(factory) assert bound == factory(input_value) assert str(bound) == '' def test_bind_optional(): """Ensures that bind_optional works correctly.""" def factory(inner_value: int) -> int | None: return inner_value if inner_value else None assert Some(1).bind_optional(factory) == Some(1) assert Some(0).bind_optional(factory) == Nothing assert Nothing.bind_optional(factory) == Nothing returns-0.24.0/tests/test_maybe/test_maybe_equality.py000066400000000000000000000056171472312074000232310ustar00rootroot00000000000000from copy import copy, deepcopy import pytest from returns.maybe import Nothing, Some, _Nothing from returns.primitives.exceptions import ImmutableStateError def test_equals(): """Ensures that ``.equals`` method works correctly.""" inner_value = 1 assert Some(inner_value).equals(Some(inner_value)) assert Nothing.equals(Nothing) def test_not_equals(): """Ensures that ``.equals`` method works correctly.""" assert not Some(1).equals(Nothing) assert not Some(1).equals(Some(0)) assert not Nothing.equals(Some(1)) def test_equality(): """Ensures that containers can be compared.""" assert Nothing is Nothing # noqa: WPS312 assert Nothing == _Nothing() == _Nothing(None) assert Some(5) == Some(5) assert hash(Some(1)) assert hash(Nothing) def test_nonequality(): """Ensures that containers are not compared to regular values.""" assert Nothing is not None assert Nothing != None # noqa: E711 assert _Nothing(None) != None # noqa: E711 assert Some(5) != 5 assert Some(3) is not Some(3) def test_is_compare(): """Ensures that `is` operator works correctly.""" some_container = Some(1) assert Nothing.bind(lambda state: state) is Nothing assert some_container is not Some(1) def test_immutability_failure(): """Ensures that Failure container is immutable.""" with pytest.raises(ImmutableStateError): Nothing._inner_state = 1 # noqa: WPS437 with pytest.raises(ImmutableStateError): Nothing.missing = 2 with pytest.raises(ImmutableStateError): del Nothing._inner_state # type: ignore # noqa: WPS420, WPS437 with pytest.raises(AttributeError): Nothing.missing # type: ignore # noqa: WPS428 def test_immutability_success(): """Ensures that Success container is immutable.""" with pytest.raises(ImmutableStateError): Some(0)._inner_state = 1 # noqa: WPS437 with pytest.raises(ImmutableStateError): Some(1).missing = 2 with pytest.raises(ImmutableStateError): del Some(0)._inner_state # type: ignore # noqa: WPS420, WPS437 with pytest.raises(AttributeError): Some(1).missing # type: ignore # noqa: WPS428 def test_success_immutable_copy(): """Ensures that Success returns it self when passed to copy function.""" some = Some(1) # noqa: WPS110 assert some is copy(some) def test_success_immutable_deepcopy(): """Ensures that Success returns it self when passed to deepcopy function.""" some = Some(1) # noqa: WPS110 assert some is deepcopy(some) def test_failure_immutable_copy(): """Ensures that Failure returns it self when passed to copy function.""" nothing = _Nothing() assert nothing is copy(nothing) def test_failure_immutable_deepcopy(): """Ensures that Failure returns it self when passed to deepcopy function.""" nothing = _Nothing() assert nothing is deepcopy(nothing) returns-0.24.0/tests/test_maybe/test_maybe_functions/000077500000000000000000000000001472312074000230215ustar00rootroot00000000000000returns-0.24.0/tests/test_maybe/test_maybe_functions/test_maybe_decorator.py000066400000000000000000000007031472312074000275710ustar00rootroot00000000000000 from returns.maybe import Nothing, Some, maybe @maybe def _function(hashmap: dict[str, str], key: str) -> str | None: return hashmap.get(key, None) def test_maybe_some(): """Ensures that maybe decorator works correctly for some case.""" assert _function({'a': 'b'}, 'a') == Some('b') def test_maybe_nothing(): """Ensures that maybe decorator works correctly for nothing case.""" assert _function({'a': 'b'}, 'c') == Nothing returns-0.24.0/tests/test_maybe/test_maybe_unwrap.py000066400000000000000000000006141472312074000227000ustar00rootroot00000000000000import pytest from returns.maybe import Nothing, Some from returns.primitives.exceptions import UnwrapFailedError def test_unwrap_success(): """Ensures that unwrap works for Some container.""" assert Some(5).unwrap() == 5 def test_unwrap_failure(): """Ensures that unwrap works for Nothing container.""" with pytest.raises(UnwrapFailedError): assert Nothing.unwrap() returns-0.24.0/tests/test_maybe/test_nothing_singleton.py000066400000000000000000000002241472312074000237340ustar00rootroot00000000000000from returns.maybe import _Nothing def test_nothing_singleton(): """Ensures `_Nothing` is a singleton.""" assert _Nothing() is _Nothing() returns-0.24.0/tests/test_methods/000077500000000000000000000000001472312074000171435ustar00rootroot00000000000000returns-0.24.0/tests/test_methods/test_partition.py000066400000000000000000000014121472312074000225630ustar00rootroot00000000000000import pytest from returns.io import IO, IOResult from returns.maybe import Nothing, Some from returns.methods import partition from returns.result import Failure, Success @pytest.mark.parametrize(('containers', 'expected'), [ ( (Success(1), Success(2), Failure(None), Success(3)), ([1, 2, 3], [None]), ), ( ( IOResult.from_value(1), IOResult.from_failure(2), IOResult.from_value(3), IOResult.from_failure(4), ), ([IO(1), IO(3)], [IO(2), IO(4)]), ), ( (Some(1), Some(2), Nothing), ([1, 2], [None]), ), ((), ([], [])), ]) def test_partition(containers, expected): """Test partition function.""" assert partition(containers) == expected returns-0.24.0/tests/test_pattern_matching.py000066400000000000000000000047341472312074000214110ustar00rootroot00000000000000import pytest from returns.io import IO, IOFailure, IOResult, IOSuccess from returns.maybe import Maybe, Nothing, Some from returns.result import Failure, Result, Success @pytest.mark.parametrize('container', [ Success(10), Success(42), Failure(RuntimeError()), Failure(Exception()), ]) def test_result_pattern_matching(container: Result[int, Exception]): """Ensures ``Result`` containers work properly with pattern matching.""" match container: case Success(10): assert isinstance(container, Success) assert container.unwrap() == 10 case Success(value): assert isinstance(container, Success) assert value == 42 assert container.unwrap() == value case Failure(RuntimeError()): assert isinstance(container, Failure) assert isinstance(container.failure(), RuntimeError) case Failure(_): assert isinstance(container, Failure) assert isinstance(container.failure(), Exception) case _: pytest.fail('Was not matched') @pytest.mark.parametrize('container', [ Some('SOME'), Some('THERE IS SOME VALUE'), Nothing, ]) def test_maybe_pattern_matching(container: Maybe[str]): """Ensures ``Maybe`` containers work properly with pattern matching.""" match container: case Some('SOME'): assert isinstance(container, Some) assert container.unwrap() == 'SOME' case Some(value): assert isinstance(container, Some) assert value == 'THERE IS SOME VALUE' assert container.unwrap() == value case Maybe.empty: assert container is Nothing case _: pytest.fail('Was not matched') @pytest.mark.parametrize('container', [ IOSuccess(42.0), IOSuccess(10.0), IOFailure(50), ]) def test_ioresult_pattern_matching(container: IOResult[float, int]): """Ensures ``IOResult`` containers work properly with pattern matching.""" match container: case IOSuccess(Success(42.0)): assert isinstance(container, IOSuccess) assert container.unwrap() == IO(42.0) case IOSuccess(value): assert isinstance(container, IOSuccess) assert container.unwrap() == IO(value.unwrap()) case IOFailure(_): assert isinstance(container, IOFailure) assert container.failure() == IO(50) case _: pytest.fail('Was not matched') returns-0.24.0/tests/test_pipeline/000077500000000000000000000000001472312074000173055ustar00rootroot00000000000000returns-0.24.0/tests/test_pipeline/test_is_successful.py000066400000000000000000000011221472312074000235640ustar00rootroot00000000000000import pytest from returns.io import IOFailure, IOSuccess from returns.maybe import Nothing, Some from returns.pipeline import is_successful from returns.result import Failure, Success @pytest.mark.parametrize(('container', 'correct_result'), [ (Success('a'), True), (Failure('a'), False), (IOSuccess('a'), True), (IOFailure('a'), False), (Some('a'), True), (Some(None), True), (Nothing, False), ]) def test_is_successful(container, correct_result): """Ensures that successful state works correctly.""" assert is_successful(container) is correct_result returns-0.24.0/tests/test_pipeline/test_managed/000077500000000000000000000000001472312074000217405ustar00rootroot00000000000000returns-0.24.0/tests/test_pipeline/test_managed/test_managed_future_result.py000066400000000000000000000064461472312074000277470ustar00rootroot00000000000000 import pytest from returns.future import FutureResult from returns.io import IOFailure, IOSuccess from returns.pipeline import managed from returns.result import Failure, Result, Success def _acquire_success() -> FutureResult[str, str]: return FutureResult.from_value('acquire success') def _acquire_failure() -> FutureResult[str, str]: return FutureResult.from_failure('acquire failure') def _use_success(inner_value: str) -> FutureResult[str, str]: return FutureResult.from_value('use success') def _use_failure(inner_value: str) -> FutureResult[str, str]: return FutureResult.from_failure('use failure') class _ReleaseSuccess: def __init__(self, logs: list[tuple[str, Result[str, str]]]) -> None: self._logs = logs def __call__( self, inner_value: str, use_result: Result[str, str], ) -> FutureResult[None, str]: self._logs.append((inner_value, use_result)) return FutureResult.from_value(None) class _ReleaseFailure: def __init__(self, logs: list[tuple[str, Result[str, str]]]) -> None: self._logs = logs def __call__( self, inner_value: str, use_result: Result[str, str], ) -> FutureResult[None, str]: return FutureResult.from_failure('release failure') @pytest.mark.anyio @pytest.mark.parametrize(('acquire', 'use', 'release', 'final_result', 'log'), [ # Acquire success: ( _acquire_success, _use_success, _ReleaseSuccess, IOSuccess('use success'), [('acquire success', Success('use success'))], ), ( _acquire_success, _use_success, _ReleaseFailure, IOFailure('release failure'), [], ), ( _acquire_success, _use_failure, _ReleaseSuccess, IOFailure('use failure'), [('acquire success', Failure('use failure'))], ), ( _acquire_success, _use_failure, _ReleaseFailure, IOFailure('release failure'), [], ), # Acquire failure: ( _acquire_failure, _use_success, _ReleaseSuccess, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_failure, _ReleaseSuccess, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_success, _ReleaseFailure, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_failure, _ReleaseFailure, IOFailure('acquire failure'), [], ), ]) async def test_all_success(acquire, use, release, final_result, log): """Ensures that managed works as intended.""" pipeline_logs: list[tuple[str, Result[str, str]]] = [] pipeline_result = managed( use, release(pipeline_logs), )(acquire()) assert await pipeline_result == final_result assert pipeline_logs == log @pytest.mark.anyio async def test_full_typing(): """This test is here to be a case for typing.""" logs: list[tuple[str, Result[str, str]]] = [] pipeline_result = managed( _use_success, _ReleaseSuccess(logs), )(_acquire_success()) assert await pipeline_result == IOSuccess('use success') assert logs == [('acquire success', Success('use success'))] returns-0.24.0/tests/test_pipeline/test_managed/test_managed_ioresult.py000066400000000000000000000060051472312074000266740ustar00rootroot00000000000000 import pytest from returns.io import IOFailure, IOResult, IOSuccess from returns.pipeline import managed from returns.result import Failure, Result, Success _acquire_success = IOSuccess('acquire success') _acquire_failure = IOFailure('acquire failure') def _use_success(inner_value: str) -> IOResult[str, str]: return IOSuccess('use success') def _use_failure(inner_value: str) -> IOResult[str, str]: return IOFailure('use failure') class _ReleaseSuccess: def __init__(self, logs: list[tuple[str, Result[str, str]]]) -> None: self._logs = logs def __call__( self, inner_value: str, use_result: Result[str, str], ) -> IOResult[None, str]: self._logs.append((inner_value, use_result)) return IOSuccess(None) class _ReleaseFailure: def __init__(self, logs: list[tuple[str, Result[str, str]]]) -> None: self._logs = logs def __call__( self, inner_value: str, use_result: Result[str, str], ) -> IOResult[None, str]: return IOFailure('release failure') @pytest.mark.parametrize(('acquire', 'use', 'release', 'final_result', 'log'), [ # Acquire success: ( _acquire_success, _use_success, _ReleaseSuccess, IOSuccess('use success'), [('acquire success', Success('use success'))], ), ( _acquire_success, _use_success, _ReleaseFailure, IOFailure('release failure'), [], ), ( _acquire_success, _use_failure, _ReleaseSuccess, IOFailure('use failure'), [('acquire success', Failure('use failure'))], ), ( _acquire_success, _use_failure, _ReleaseFailure, IOFailure('release failure'), [], ), # Acquire failure: ( _acquire_failure, _use_success, _ReleaseSuccess, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_failure, _ReleaseSuccess, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_success, _ReleaseFailure, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_failure, _ReleaseFailure, IOFailure('acquire failure'), [], ), ]) def test_all_success(acquire, use, release, final_result, log): """Ensures that managed works as intended.""" pipeline_logs: list[tuple[str, Result[str, str]]] = [] pipeline_result = managed( use, release(pipeline_logs), )(acquire) assert pipeline_result == final_result assert pipeline_logs == log def test_full_typing(): """This test is here to be a case for typing.""" logs: list[tuple[str, Result[str, str]]] = [] pipeline_result = managed( _use_success, _ReleaseSuccess(logs), )(_acquire_success) assert pipeline_result == IOSuccess('use success') assert logs == [('acquire success', Success('use success'))] returns-0.24.0/tests/test_pipeline/test_managed/test_managed_reader_future_result.py000066400000000000000000000067671472312074000312770ustar00rootroot00000000000000 import pytest from returns.context import NoDeps, ReaderFutureResult from returns.io import IOFailure, IOSuccess from returns.pipeline import managed from returns.result import Failure, Result, Success def _acquire_success() -> ReaderFutureResult[str, str, NoDeps]: return ReaderFutureResult.from_value('acquire success') def _acquire_failure() -> ReaderFutureResult[str, str, NoDeps]: return ReaderFutureResult.from_failure('acquire failure') def _use_success(inner_value: str) -> ReaderFutureResult[str, str, NoDeps]: return ReaderFutureResult.from_value('use success') def _use_failure(inner_value: str) -> ReaderFutureResult[str, str, NoDeps]: return ReaderFutureResult.from_failure('use failure') class _ReleaseSuccess: def __init__(self, logs: list[tuple[str, Result[str, str]]]) -> None: self._logs = logs def __call__( self, inner_value: str, use_result: Result[str, str], ) -> ReaderFutureResult[None, str, NoDeps]: self._logs.append((inner_value, use_result)) return ReaderFutureResult.from_value(None) class _ReleaseFailure: def __init__(self, logs: list[tuple[str, Result[str, str]]]) -> None: self._logs = logs def __call__( self, inner_value: str, use_result: Result[str, str], ) -> ReaderFutureResult[None, str, NoDeps]: return ReaderFutureResult.from_failure('release failure') @pytest.mark.anyio @pytest.mark.parametrize(('acquire', 'use', 'release', 'final_result', 'log'), [ # Acquire success: ( _acquire_success, _use_success, _ReleaseSuccess, IOSuccess('use success'), [('acquire success', Success('use success'))], ), ( _acquire_success, _use_success, _ReleaseFailure, IOFailure('release failure'), [], ), ( _acquire_success, _use_failure, _ReleaseSuccess, IOFailure('use failure'), [('acquire success', Failure('use failure'))], ), ( _acquire_success, _use_failure, _ReleaseFailure, IOFailure('release failure'), [], ), # Acquire failure: ( _acquire_failure, _use_success, _ReleaseSuccess, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_failure, _ReleaseSuccess, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_success, _ReleaseFailure, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_failure, _ReleaseFailure, IOFailure('acquire failure'), [], ), ]) async def test_all_success(acquire, use, release, final_result, log): """Ensures that managed works as intended.""" pipeline_logs: list[tuple[str, Result[str, str]]] = [] pipeline_result = managed( use, release(pipeline_logs), )(acquire()) assert await pipeline_result(ReaderFutureResult.no_args) == final_result assert pipeline_logs == log @pytest.mark.anyio async def test_full_typing(): """This test is here to be a case for typing.""" logs: list[tuple[str, Result[str, str]]] = [] pipeline_result = managed( _use_success, _ReleaseSuccess(logs), )(_acquire_success()) inner = pipeline_result(ReaderFutureResult.no_args) assert await inner == IOSuccess('use success') assert logs == [('acquire success', Success('use success'))] returns-0.24.0/tests/test_pipeline/test_managed/test_managed_reader_ioresult.py000066400000000000000000000063741472312074000302270ustar00rootroot00000000000000 import pytest from returns.context import NoDeps, ReaderIOResult from returns.io import IOFailure, IOSuccess from returns.pipeline import managed from returns.result import Failure, Result, Success _acquire_success = ReaderIOResult.from_value('acquire success') _acquire_failure = ReaderIOResult.from_failure('acquire failure') def _use_success(inner_value: str) -> ReaderIOResult[str, str, NoDeps]: return ReaderIOResult.from_value('use success') def _use_failure(inner_value: str) -> ReaderIOResult[str, str, NoDeps]: return ReaderIOResult.from_failure('use failure') class _ReleaseSuccess: def __init__(self, logs: list[tuple[str, Result[str, str]]]) -> None: self._logs = logs def __call__( self, inner_value: str, use_result: Result[str, str], ) -> ReaderIOResult[None, str, NoDeps]: self._logs.append((inner_value, use_result)) return ReaderIOResult.from_value(None) class _ReleaseFailure: def __init__(self, logs: list[tuple[str, Result[str, str]]]) -> None: self._logs = logs def __call__( self, inner_value: str, use_result: Result[str, str], ) -> ReaderIOResult[None, str, NoDeps]: return ReaderIOResult.from_failure('release failure') @pytest.mark.parametrize(('acquire', 'use', 'release', 'final_result', 'log'), [ # Acquire success: ( _acquire_success, _use_success, _ReleaseSuccess, IOSuccess('use success'), [('acquire success', Success('use success'))], ), ( _acquire_success, _use_success, _ReleaseFailure, IOFailure('release failure'), [], ), ( _acquire_success, _use_failure, _ReleaseSuccess, IOFailure('use failure'), [('acquire success', Failure('use failure'))], ), ( _acquire_success, _use_failure, _ReleaseFailure, IOFailure('release failure'), [], ), # Acquire failure: ( _acquire_failure, _use_success, _ReleaseSuccess, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_failure, _ReleaseSuccess, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_success, _ReleaseFailure, IOFailure('acquire failure'), [], ), ( _acquire_failure, _use_failure, _ReleaseFailure, IOFailure('acquire failure'), [], ), ]) def test_all_success(acquire, use, release, final_result, log): """Ensures that managed works as intended.""" pipeline_logs: list[tuple[str, Result[str, str]]] = [] pipeline_result = managed( use, release(pipeline_logs), )(acquire) assert pipeline_result(ReaderIOResult.no_args) == final_result assert pipeline_logs == log def test_full_typing(): """This test is here to be a case for typing.""" logs: list[tuple[str, Result[str, str]]] = [] pipeline_result = managed( _use_success, _ReleaseSuccess(logs), )(_acquire_success) assert pipeline_result(ReaderIOResult.no_args) == IOSuccess('use success') assert logs == [('acquire success', Success('use success'))] returns-0.24.0/tests/test_primitives/000077500000000000000000000000001472312074000176735ustar00rootroot00000000000000returns-0.24.0/tests/test_primitives/test_asserts/000077500000000000000000000000001472312074000224165ustar00rootroot00000000000000returns-0.24.0/tests/test_primitives/test_asserts/test_assert_equal.py000066400000000000000000000043651472312074000265270ustar00rootroot00000000000000from collections.abc import Sequence import pytest from returns.context import ( Reader, ReaderFutureResult, ReaderIOResult, ReaderResult, ) from returns.contrib.pytest import ReturnsAsserts from returns.future import Future, FutureResult from returns.io import IO, IOResult from returns.maybe import Maybe from returns.primitives.asserts import assert_equal from returns.primitives.container import BaseContainer from returns.result import Result _containers: Sequence[BaseContainer] = ( Result.from_failure(1), Result.from_value(1), IO(1), IOResult.from_failure(1), IOResult.from_value(1), Maybe.from_value(1), Maybe.from_value(None), Maybe.from_optional(None), Future.from_value(1), FutureResult.from_value(1), FutureResult.from_failure(1), Reader.from_value(1), ReaderResult.from_value(1), ReaderResult.from_failure(1), ReaderIOResult.from_value(1), ReaderIOResult.from_failure(1), ReaderFutureResult.from_value(1), ReaderFutureResult.from_failure(1), ) @pytest.mark.parametrize('container', _containers) def test_assert_equal(container, anyio_backend_name: str): """Ensure that containers can be equal.""" assert_equal(container, container, backend=anyio_backend_name) @pytest.mark.parametrize('container', _containers) def test_assert_equal_plugin( container, anyio_backend_name: str, returns: ReturnsAsserts, ): """Ensure that containers can be equal.""" returns.assert_equal(container, container, backend=anyio_backend_name) @pytest.mark.parametrize('container', _containers) def test_assert_equal_not(container, anyio_backend_name: str): """Ensure that containers can be not equal.""" with pytest.raises(AssertionError): assert_equal( container, container.from_value(2), backend=anyio_backend_name, ) @pytest.mark.parametrize('container', _containers) def test_assert_equal_not_plugin( container, anyio_backend_name: str, returns: ReturnsAsserts, ): """Ensure that containers can be not equal.""" with pytest.raises(AssertionError): returns.assert_equal( container, container.from_value(2), backend=anyio_backend_name, ) returns-0.24.0/tests/test_primitives/test_container/000077500000000000000000000000001472312074000227145ustar00rootroot00000000000000returns-0.24.0/tests/test_primitives/test_container/test_base_container/000077500000000000000000000000001472312074000267275ustar00rootroot00000000000000returns-0.24.0/tests/test_primitives/test_container/test_base_container/test_pickle.py000066400000000000000000000017311472312074000316110ustar00rootroot00000000000000import pickle # noqa: S403 from typing import Any from hypothesis import example, given from hypothesis import strategies as st from returns.primitives.container import BaseContainer class _CustomClass: def __init__(self, inner_value: Any) -> None: self.inner_value = inner_value def __eq__(self, other: Any) -> bool: return ( # noqa: E721 type(other) == type(self) and # noqa: WPS516 self.inner_value == other.inner_value ) @given( st.one_of( st.integers(), st.floats(allow_nan=False), st.text(), st.booleans(), st.lists(st.text()), st.dictionaries(st.text(), st.integers()), st.builds(_CustomClass, st.text()), ), ) @example(None) def test_pickle(container_value: Any): """Ensures custom pickle protocol works as expected.""" container = BaseContainer(container_value) assert pickle.loads(pickle.dumps(container)) == container # noqa: S301 test_pickle_backward_deserialization.py000066400000000000000000000010331472312074000366310ustar00rootroot00000000000000returns-0.24.0/tests/test_primitives/test_container/test_base_containerimport pickle # noqa: S403 from returns.primitives.container import BaseContainer def test_pickle_backward_deserialization(): """Test that BaseContainer can be deserialized from 0.19.0 and earlier.""" # BaseContainer(1) serialized as of 0.19.0 serialized_container = ( b'\x80\x04\x958\x00\x00\x00\x00\x00\x00\x00\x8c\x1c' + b'returns.primitives.container\x94\x8c\rBaseContainer' + b'\x94\x93\x94)\x81\x94K\x01b.' ) assert pickle.loads(serialized_container) == BaseContainer(1) # noqa: S301 returns-0.24.0/tests/test_primitives/test_laws/000077500000000000000000000000001472312074000217005ustar00rootroot00000000000000returns-0.24.0/tests/test_primitives/test_laws/test_lawful/000077500000000000000000000000001472312074000242315ustar00rootroot00000000000000returns-0.24.0/tests/test_primitives/test_laws/test_lawful/test_laws_resolution.py000066400000000000000000000014721472312074000310770ustar00rootroot00000000000000 import pytest from returns.context import ( RequiresContext, RequiresContextFutureResult, RequiresContextIOResult, RequiresContextResult, ) from returns.future import Future, FutureResult from returns.io import IO, IOResult from returns.maybe import Maybe from returns.primitives.laws import Law, Lawful from returns.result import Result @pytest.mark.parametrize('container', [ Result, Maybe, Future, FutureResult, IO, IOResult, RequiresContext, RequiresContextFutureResult, RequiresContextIOResult, RequiresContextResult, ]) def test_laws_resolution(container: type[Lawful]): """Ensures all tests are unique.""" all_laws: list[Law] = [] for laws in container.laws().values(): all_laws.extend(laws) assert len(all_laws) == len(set(all_laws)) returns-0.24.0/tests/test_result/000077500000000000000000000000001472312074000170165ustar00rootroot00000000000000returns-0.24.0/tests/test_result/test_result_bind.py000066400000000000000000000042101472312074000227360ustar00rootroot00000000000000from returns.result import Failure, Result, Success def test_bind(): """Ensures that bind works.""" def factory(inner_value: int) -> Result[int, str]: if inner_value > 0: return Success(inner_value * 2) return Failure(str(inner_value)) input_value = 5 bound: Result[int, str] = Success(input_value) assert bound.bind(factory) == factory(input_value) assert Success(input_value).bind(factory) == factory(input_value) assert str(bound.bind(factory)) == '' input_value = 0 bound2: Result[int, str] = Success(input_value) assert bound2.bind(factory) == factory(input_value) assert str(bound2.bind(factory)) == '' def test_left_identity_success(): """Ensures that left identity works for Success container.""" def factory(inner_value: int) -> Result[int, str]: return Success(inner_value * 2) input_value = 5 bound: Result[int, str] = Success(input_value) assert bound.bind(factory) == factory(input_value) def test_left_identity_failure(): """Ensures that left identity works for Failure container.""" def factory(inner_value: int) -> Result[int, int]: return Failure(6) input_value = 5 bound: Result[int, int] = Failure(input_value) assert bound.bind(factory) == Failure(input_value) assert Failure(input_value).bind(factory) == Failure(5) assert str(bound) == '' def test_lash_success(): """Ensures that lash works for Success container.""" def factory(inner_value) -> Result[int, str]: return Success(inner_value * 2) bound = Success(5).lash(factory) assert bound == Success(5) assert Success(5).lash(factory) == Success(5) assert str(bound) == '' def test_lash_failure(): """Ensures that lash works for Failure container.""" def factory(inner_value: int) -> Result[str, int]: return Failure(inner_value + 1) expected = 6 bound: Result[str, int] = Failure(5) assert bound.lash(factory) == Failure(expected) assert Failure(5).lash(factory) == Failure(expected) assert str(bound.lash(factory)) == '' returns-0.24.0/tests/test_result/test_result_equality.py000066400000000000000000000056701472312074000236720ustar00rootroot00000000000000from copy import copy, deepcopy import pytest from returns.primitives.exceptions import ImmutableStateError from returns.result import Failure, Success def test_equals(): """Ensures that ``.equals`` method works correctly.""" inner_value = 1 assert Success(inner_value).equals(Success(inner_value)) assert Failure(inner_value).equals(Failure(inner_value)) def test_not_equals(): """Ensures that ``.equals`` method works correctly.""" inner_value = 1 assert not Success(inner_value).equals(Failure(inner_value)) assert not Success(inner_value).equals(Success(0)) assert not Failure(inner_value).equals(Success(inner_value)) assert not Failure(inner_value).equals(Failure(0)) def test_non_equality(): """Ensures that containers are not compared to regular values.""" input_value = 5 assert Failure(input_value) != input_value assert Success(input_value) != input_value assert Failure(input_value) != Success(input_value) assert hash(Failure(1)) assert hash(Success(1)) def test_is_compare(): """Ensures that `is` operator works correctly.""" left = Failure(1) right = Success(1) assert left.bind(lambda state: state) is left assert right.lash(lambda state: state) is right assert right is not Success(1) def test_immutability_failure(): """Ensures that Failure container is immutable.""" with pytest.raises(ImmutableStateError): Failure(0)._inner_state = 1 # noqa: WPS437 with pytest.raises(ImmutableStateError): Failure(1).missing = 2 with pytest.raises(ImmutableStateError): del Failure(0)._inner_state # type: ignore # noqa: WPS420, WPS437 with pytest.raises(AttributeError): Failure(1).missing # type: ignore # noqa: WPS428 def test_immutability_success(): """Ensures that Success container is immutable.""" with pytest.raises(ImmutableStateError): Success(0)._inner_state = 1 # noqa: WPS437 with pytest.raises(ImmutableStateError): Success(1).missing = 2 with pytest.raises(ImmutableStateError): del Success(0)._inner_state # type: ignore # noqa: WPS420, WPS437 with pytest.raises(AttributeError): Success(1).missing # type: ignore # noqa: WPS428 def test_success_immutable_copy(): """Ensures that Success returns it self when passed to copy function.""" success = Success(1) assert success is copy(success) def test_success_immutable_deepcopy(): """Ensures that Success returns it self when passed to deepcopy function.""" success = Success(1) assert success is deepcopy(success) def test_failure_immutable_copy(): """Ensures that Failure returns it self when passed to copy function.""" failure = Failure(0) assert failure is copy(failure) def test_failure_immutable_deepcopy(): """Ensures that Failure returns it self when passed to deepcopy function.""" failure = Failure(0) assert failure is deepcopy(failure) returns-0.24.0/tests/test_result/test_result_error.py000066400000000000000000000006431472312074000231610ustar00rootroot00000000000000from returns.result import Failure, ResultE, Success def test_result_error_success(): """Ensures that ResultE can be typecasted to success.""" container: ResultE[int] = Success(1) assert container.unwrap() == 1 def test_result_error_failure(): """Ensures that ResultE can be typecasted to failure.""" container: ResultE[int] = Failure(ValueError('1')) assert str(container.failure()) == '1' returns-0.24.0/tests/test_result/test_result_failure.py000066400000000000000000000006331472312074000234560ustar00rootroot00000000000000import pytest from returns.primitives.exceptions import UnwrapFailedError from returns.result import Failure, Success def test_unwrap_success(): """Ensures that unwrap works for Success container.""" with pytest.raises(UnwrapFailedError): assert Success(5).failure() def test_unwrap_failure(): """Ensures that unwrap works for Failure container.""" assert Failure(5).failure() == 5 returns-0.24.0/tests/test_result/test_result_functions/000077500000000000000000000000001472312074000234635ustar00rootroot00000000000000returns-0.24.0/tests/test_result/test_result_functions/test_safe.py000066400000000000000000000023361472312074000260160ustar00rootroot00000000000000 import pytest from returns.result import Success, safe @safe def _function(number: int) -> float: return number / number @safe(exceptions=(ZeroDivisionError,)) def _function_two(number: int | str) -> float: assert isinstance(number, int) return number / number @safe((ZeroDivisionError,)) # no name def _function_three(number: int | str) -> float: assert isinstance(number, int) return number / number def test_safe_success(): """Ensures that safe decorator works correctly for Success case.""" assert _function(1) == Success(1.0) def test_safe_failure(): """Ensures that safe decorator works correctly for Failure case.""" failed = _function(0) assert isinstance(failed.failure(), ZeroDivisionError) def test_safe_failure_with_expected_error(): """Ensures that safe decorator works correctly for Failure case.""" failed = _function_two(0) assert isinstance(failed.failure(), ZeroDivisionError) failed2 = _function_three(0) assert isinstance(failed2.failure(), ZeroDivisionError) def test_safe_failure_with_non_expected_error(): """Ensures that safe decorator works correctly for Failure case.""" with pytest.raises(AssertionError): _function_two('0') returns-0.24.0/tests/test_result/test_result_map.py000066400000000000000000000006661472312074000226120ustar00rootroot00000000000000from returns.result import Failure, Success def test_map_success(): """Ensures that Success is mappable.""" assert Success(5).map(str) == Success('5') def test_alt_failure(): """Ensures that Failure is mappable.""" assert Failure(5).map(str) == Failure(5) assert Failure(5).alt(str) == Failure('5') def test_alt_success(): """Ensures that Success.alt is NoOp.""" assert Success(5).alt(str) == Success(5) returns-0.24.0/tests/test_result/test_result_unwrap.py000066400000000000000000000013601472312074000233410ustar00rootroot00000000000000import pytest from returns.primitives.exceptions import UnwrapFailedError from returns.result import Failure, Success def test_unwrap_success(): """Ensures that unwrap works for Success container.""" assert Success(5).unwrap() == 5 def test_unwrap_failure(): """Ensures that unwrap works for Failure container.""" with pytest.raises(UnwrapFailedError): assert Failure(5).unwrap() def test_unwrap_failure_with_exception(): """Ensures that unwrap raises from the original exception.""" expected_exception = ValueError('error') with pytest.raises(UnwrapFailedError) as excinfo: Failure(expected_exception).unwrap() assert 'ValueError: error' in str( excinfo.getrepr(), # noqa: WPS441 ) returns-0.24.0/tests/test_result/test_result_value_or.py000066400000000000000000000005631472312074000236450ustar00rootroot00000000000000from returns.result import Failure, Success def test_success_value(): """Ensures that value is fetch correctly from the Success.""" bound = Success(5).value_or(None) assert bound == 5 def test_failure_value(): """Ensures that value is fetch correctly from the Failure.""" bound = Failure(1).value_or(default_value=None) assert bound is None returns-0.24.0/tests/test_trampolines/000077500000000000000000000000001472312074000200355ustar00rootroot00000000000000returns-0.24.0/tests/test_trampolines/test_trampoline_decorator.py000066400000000000000000000023231472312074000256620ustar00rootroot00000000000000import sys from collections.abc import Callable, Iterator import pytest from returns.trampolines import Trampoline, trampoline @trampoline def _accumulate( numbers: Iterator[int], acc: int = 0, ) -> int | Trampoline[int]: number = next(numbers, None) if number is None: return acc return Trampoline(_accumulate, numbers, acc + number) @trampoline def _with_func_kwarg( numbers: Iterator[int], func: int = 0, # we need this name to match `Trampoline` constructor ) -> int | Trampoline[int]: number = next(numbers, None) if number is None: return func return Trampoline(_with_func_kwarg, numbers, func=func + number) @pytest.mark.parametrize('trampoline_func', [ _accumulate, _with_func_kwarg, ]) @pytest.mark.parametrize('given_range', [ range(0), range(1), range(2), range(5), range(sys.getrecursionlimit()), range(sys.getrecursionlimit() + 1), ]) def test_recursion_limit( trampoline_func: Callable[[Iterator[int]], int], given_range: range, ) -> None: """Test that accumulation is correct and no ``RecursionError`` happens.""" accumulated = trampoline_func(iter(given_range)) assert accumulated == sum(given_range) returns-0.24.0/tests/test_unsafe/000077500000000000000000000000001472312074000167615ustar00rootroot00000000000000returns-0.24.0/tests/test_unsafe/test_unsafe_perform_io.py000066400000000000000000000003751472312074000241010ustar00rootroot00000000000000from returns.io import IO from returns.unsafe import unsafe_perform_io def test_unsafe_perform_io(): """Ensures that unsafe_perform_io returns the object itself.""" id_object = object() assert unsafe_perform_io(IO(id_object)) is id_object returns-0.24.0/typesafety/000077500000000000000000000000001472312074000154745ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/000077500000000000000000000000001472312074000202175ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context/000077500000000000000000000000001472312074000246615ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context/test_context.yml000066400000000000000000000020341472312074000301260ustar00rootroot00000000000000- case: context_ask1 disable_cache: false main: | from returns.context import RequiresContext reveal_type(RequiresContext.ask()) # N: Revealed type is "returns.context.requires_context.RequiresContext[Never, Never]" - case: context_ask2 disable_cache: false main: | from returns.context import RequiresContext reveal_type(RequiresContext[int, str].ask()) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.str, builtins.str]" - case: requires_context_from_value disable_cache: false main: | from returns.context import RequiresContext reveal_type(RequiresContext.from_value(1)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.int, Any]" - case: requires_context_from_context disable_cache: false main: | from returns.context import RequiresContext x: RequiresContext[int, str] reveal_type(RequiresContext.from_context(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.int, builtins.str]" returns-0.24.0/typesafety/test_context/test_requires_context/test_requires_context_cast.yml000066400000000000000000000040401472312074000330560ustar00rootroot00000000000000- case: context_correct_cast disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[TypeError, int] # we can only cast return type second: RequiresContext[Exception, int] = first reveal_type(second) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.Exception, builtins.int]" - case: context_wrong_cast disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[ValueError, TypeError] # we can only cast return type second: RequiresContext[Exception, Exception] = first out: | main:4: error: Incompatible types in assignment (expression has type "RequiresContext[ValueError, TypeError]", variable has type "RequiresContext[Exception, Exception]") [assignment] - case: context_covariant_cast disable_cache: false main: | from returns.context import RequiresContext class A(object): a = 1 class B(A): b = 2 class C(A): c = 3 def func() -> RequiresContext[int, B]: return RequiresContext(lambda deps: deps.a + deps.b) def second(a: int) -> RequiresContext[int, A]: return RequiresContext(lambda deps: deps.a + a) def third(a: int) -> RequiresContext[int, C]: return RequiresContext(lambda deps: deps.c + a) reveal_type(func().bind(second)) reveal_type(func().bind(third)) out: | main:21: note: Revealed type is "returns.context.requires_context.RequiresContext[builtins.int, main.B]" main:21: error: Argument 1 to "bind" of "RequiresContext" has incompatible type "Callable[[int], RequiresContext[int, A]]"; expected "Callable[[int], KindN[RequiresContext[Any, Any], int, B, Any]]" [arg-type] main:22: note: Revealed type is "returns.context.requires_context.RequiresContext[builtins.int, main.B]" main:22: error: Argument 1 to "bind" of "RequiresContext" has incompatible type "Callable[[int], RequiresContext[int, C]]"; expected "Callable[[int], KindN[RequiresContext[Any, Any], int, B, Any]]" [arg-type] returns-0.24.0/typesafety/test_context/test_requires_context/test_requires_context_type.yml000066400000000000000000000076321472312074000331170ustar00rootroot00000000000000- case: requires_context_call_correct disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[str, int] reveal_type(first(1)) # N: Revealed type is "builtins.str" - case: requires_context_getattr disable_cache: false main: | from returns.context import RequiresContext x: RequiresContext[int, str] x.missing # E: "RequiresContext[int, str]" has no attribute "missing" [attr-defined] - case: requires_context_map_correct disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[str, int] reveal_type(first.map(lambda char: float(char))) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.float, builtins.int]" - case: requires_context_apply_correct disable_cache: false main: | from typing import Callable from returns.context import RequiresContext first: RequiresContext[str, int] second: RequiresContext[Callable[[str], float], int] reveal_type(first.apply(second)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.float, builtins.int]" - case: requires_context_bind_correct disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[str, int] def function(arg: str) -> RequiresContext[float, int]: return RequiresContext.from_value(1.5) reveal_type(first.bind(function)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.float, builtins.int]" - case: requires_context_bind_context_correct disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[str, int] def function(arg: str) -> RequiresContext[float, int]: return RequiresContext.from_value(1.5) reveal_type(first.bind_context(function)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.float, builtins.int]" - case: requires_context_modify_env disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[float, int] reveal_type(first.modify_env(int)('1')) # N: Revealed type is "builtins.float" - case: requires_context_call_wrong disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[str, int] first('a') out: | main:4: error: Argument 1 to "__call__" of "RequiresContext" has incompatible type "str"; expected "int" [arg-type] - case: requires_context_map_wrong disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[str, int] def function(arg: int) -> int: return arg + 1 first.map(function) out: | main:8: error: Argument 1 to "map" of "RequiresContext" has incompatible type "Callable[[int], int]"; expected "Callable[[str], int]" [arg-type] - case: requires_context_bind_wrong1 disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[str, int] def function(arg: float) -> RequiresContext[float, int]: return RequiresContext.from_value(1.5) first.bind(function) out: | main:8: error: Argument 1 to "bind" of "RequiresContext" has incompatible type "Callable[[float], RequiresContext[float, int]]"; expected "Callable[[str], KindN[RequiresContext[Any, Any], float, int, Any]]" [arg-type] - case: requires_context_bind_wrong2 disable_cache: false main: | from returns.context import RequiresContext first: RequiresContext[str, int] def function(arg: str) -> RequiresContext[float, str]: return RequiresContext.from_value(1.5) first.bind(function) out: | main:8: error: Argument 1 to "bind" of "RequiresContext" has incompatible type "Callable[[str], RequiresContext[float, str]]"; expected "Callable[[str], KindN[RequiresContext[Any, Any], float, int, Any]]" [arg-type] returns-0.24.0/typesafety/test_context/test_requires_context/test_requires_context_typecast.yml000066400000000000000000000025301472312074000337620ustar00rootroot00000000000000- case: requires_context_from_requires_context_ioresult disable_cache: true main: | from returns.context import RequiresContext from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, float, str] reveal_type(RequiresContext.from_requires_context_ioresult(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[returns.io.IOResult[builtins.int, builtins.float], builtins.str]" - case: requires_context_from_requires_context_result disable_cache: true main: | from returns.context import RequiresContext from returns.context import RequiresContextResult x: RequiresContextResult[int, float, str] reveal_type(RequiresContext.from_requires_context_result(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[returns.result.Result[builtins.int, builtins.float], builtins.str]" - case: requires_context_from_requires_context_future_result disable_cache: true main: | from returns.context import RequiresContext from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, float, str] reveal_type(RequiresContext.from_requires_context_future_result(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[returns.future.FutureResult[builtins.int, builtins.float], builtins.str]" returns-0.24.0/typesafety/test_context/test_requires_context_future_result/000077500000000000000000000000001472312074000276515ustar00rootroot00000000000000test_context_future_result.yml000066400000000000000000000012021472312074000360230ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_future_result- case: context_result_future_ask1 disable_cache: false main: | from returns.context import RequiresContextFutureResult reveal_type(RequiresContextFutureResult.ask()) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[Never, Never, Never]" - case: context_result_future_ask2 disable_cache: false main: | from returns.context import RequiresContextFutureResult reveal_type(RequiresContextFutureResult[int, bool, str].ask()) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.str, builtins.bool, builtins.str]" test_requires_context_future_result.yml000066400000000000000000000221501472312074000377470ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_future_result- case: requires_context_future_result_call disable_cache: false main: | from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, float, str] reveal_type(x('a')) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.float]" - case: requires_context_future_result_getattr disable_cache: false main: | from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, str, bool] x.missing # E: "RequiresContextFutureResult[int, str, bool]" has no attribute "missing" [attr-defined] - case: requires_context_future_result_swap disable_cache: false main: | from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, float, str] reveal_type(x.swap()) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.float, builtins.int, builtins.str]" - case: requires_context_future_result_map disable_cache: false main: | from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, float, str] reveal_type(x.map(bool)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_apply disable_cache: false main: | from typing import Callable from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, float, str] y: RequiresContextFutureResult[Callable[[int], bool], float, str] reveal_type(x.apply(y)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind disable_cache: false main: | from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, float, str] def test(param: int) -> RequiresContextFutureResult[bool, float, str]: ... reveal_type(x.bind(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_awaitable disable_cache: false main: | from returns.context import RequiresContextFutureResult async def bind_awaitable(arg: int) -> float: ... first: RequiresContextFutureResult[int, str, bool] reveal_type(first.bind_awaitable(bind_awaitable)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.float, builtins.str, builtins.bool]" - case: requires_context_future_result_bind_async disable_cache: false main: | from returns.context import RequiresContextFutureResult async def bind_async(arg: int) -> RequiresContextFutureResult[float, str, bool]: ... first: RequiresContextFutureResult[int, str, bool] reveal_type(first.bind_async(bind_async)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.float, builtins.str, builtins.bool]" - case: requires_context_future_result_bind_result disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.result import Result x: RequiresContextFutureResult[int, float, str] def test(param: int) -> Result[bool, float]: ... reveal_type(x.bind_result(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_ioresult disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.io import IOResult x: RequiresContextFutureResult[int, float, str] def test(param: int) -> IOResult[bool, float]: ... reveal_type(x.bind_ioresult(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_io disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.io import IO x: RequiresContextFutureResult[int, float, str] def test(param: int) -> IO[bool]: ... reveal_type(x.bind_io(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_future disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.future import Future x: RequiresContextFutureResult[int, float, str] def test(param: int) -> Future[bool]: ... reveal_type(x.bind_future(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_future_result disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.future import FutureResult x: RequiresContextFutureResult[int, float, str] def test(param: int) -> FutureResult[bool, float]: ... reveal_type(x.bind_future_result(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_async_future disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.future import Future x: RequiresContextFutureResult[int, float, str] async def test(param: int) -> Future[bool]: ... reveal_type(x.bind_async_future(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_async_future_result disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.future import FutureResult x: RequiresContextFutureResult[int, float, str] async def test(param: int) -> FutureResult[bool, float]: ... reveal_type(x.bind_async_future_result(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_context disable_cache: false main: | from returns.context import RequiresContextFutureResult, RequiresContext x: RequiresContextFutureResult[int, float, str] def test(param: int) -> RequiresContext[bool, str]: ... reveal_type(x.bind_context(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_context_result disable_cache: false main: | from returns.context import RequiresContextFutureResult, RequiresContextResult x: RequiresContextFutureResult[int, float, str] def test(param: int) -> RequiresContextResult[bool, float, str]: ... reveal_type(x.bind_context_result(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_bind_context_ioresult disable_cache: false main: | from returns.context import RequiresContextFutureResult, RequiresContextIOResult x: RequiresContextFutureResult[int, float, str] def test(param: int) -> RequiresContextIOResult[bool, float, str]: ... reveal_type(x.bind_context_ioresult(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_future_result_lash disable_cache: false main: | from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, float, str] def test(param: float) -> RequiresContextFutureResult[int, bool, str]: ... reveal_type(x.lash(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: requires_context_future_result_alt disable_cache: false main: | from returns.context import RequiresContextFutureResult x: RequiresContextFutureResult[int, float, str] def test(param: float) -> bool: ... reveal_type(x.alt(test)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: requires_context_future_result_modify_env disable_cache: false main: | from returns.context import RequiresContextFutureResult first: RequiresContextFutureResult[float, bool, int] reveal_type(first.modify_env(int)('1')) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.bool]" test_requires_context_future_result_aliases.yml000066400000000000000000000007071472312074000414540ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_future_result- case: requires_context_future_result_aliases disable_cache: false main: | from returns.context import ( RequiresContextFutureResult, RequiresContextFutureResultE, ReaderFutureResult, ReaderFutureResultE, ) x: RequiresContextFutureResult[int, Exception, str] x1: RequiresContextFutureResultE[int, str] = x x2: ReaderFutureResult[int, Exception, str] = x x3: ReaderFutureResultE[int, str] = x test_requires_context_future_result_cast.yml000066400000000000000000000035171472312074000407670ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_future_result- case: requires_context_future_result_success_cast disable_cache: false main: | from returns.context import RequiresContextFutureResult first: RequiresContextFutureResult[object, Exception, str] = RequiresContextFutureResult.from_value(1) reveal_type(first) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_future_result_failure_cast disable_cache: false main: | from returns.context import RequiresContextFutureResult first: RequiresContextFutureResult[object, Exception, str] = RequiresContextFutureResult.from_failure(TypeError()) reveal_type(first) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_future_result_env_cast disable_cache: false main: | from returns.context import RequiresContextFutureResult first: RequiresContextFutureResult[object, Exception, object] second: RequiresContextFutureResult[object, Exception, str] = first reveal_type(second) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_future_result_wrong_cast disable_cache: false main: | from returns.context import RequiresContextFutureResult first: RequiresContextFutureResult[ValueError, TypeError, IndexError] second: RequiresContextFutureResult[Exception, Exception, Exception] = first out: | main:4: error: Incompatible types in assignment (expression has type "RequiresContextFutureResult[ValueError, TypeError, IndexError]", variable has type "RequiresContextFutureResult[Exception, Exception, Exception]") [assignment] test_requires_context_future_result_unit.yml000066400000000000000000000126651472312074000410200ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_future_result- case: requires_context_future_result_success disable_cache: false main: | from returns.context import RequiresContextFutureResult reveal_type(RequiresContextFutureResult.from_value(1)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, Any, Any]" - case: requires_context_future_result_failure disable_cache: false main: | from returns.context import RequiresContextFutureResult reveal_type(RequiresContextFutureResult.from_failure(1)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[Any, builtins.int, Any]" - case: requires_context_future_result_result disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.result import Result r: Result[int, str] reveal_type(RequiresContextFutureResult.from_result(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, Any]" - case: requires_context_future_result_io disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.io import IO r: IO[int] reveal_type(RequiresContextFutureResult.from_io(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, Any, Any]" - case: requires_context_future_result_failed_io disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.io import IO r: IO[int] reveal_type(RequiresContextFutureResult.from_failed_io(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[Any, builtins.int, Any]" - case: requires_context_future_result_ioresult disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.io import IOResult r: IOResult[int, str] reveal_type(RequiresContextFutureResult.from_ioresult(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, Any]" - case: requires_context_future_result_future disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.future import Future r: Future[int] reveal_type(RequiresContextFutureResult.from_future(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, Any, Any]" - case: requires_context_future_result_failed_future disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.future import Future r: Future[int] reveal_type(RequiresContextFutureResult.from_failed_future(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[Any, builtins.int, Any]" - case: requires_context_future_result_future_result disable_cache: false main: | from returns.context import RequiresContextFutureResult from returns.future import FutureResult r: FutureResult[int, str] reveal_type(RequiresContextFutureResult.from_future_result(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, Any]" - case: requires_context_future_result_typecast disable_cache: false main: | from returns.context import RequiresContextFutureResult, RequiresContext from returns.future import FutureResult r: RequiresContext[FutureResult[int, str], float] reveal_type(RequiresContextFutureResult.from_typecast(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.float]" - case: requires_context_future_result_successful_context disable_cache: false main: | from returns.context import RequiresContextFutureResult, RequiresContext r: RequiresContext[str, float] reveal_type(RequiresContextFutureResult.from_context(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.str, Any, builtins.float]" - case: requires_context_future_result_failed_context disable_cache: false main: | from returns.context import RequiresContextFutureResult, RequiresContext r: RequiresContext[str, float] reveal_type(RequiresContextFutureResult.from_failed_context(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[Any, builtins.str, builtins.float]" - case: requires_context_future_result_from_result_context disable_cache: false main: | from returns.context import RequiresContextFutureResult, RequiresContextResult r: RequiresContextResult[int, str, float] reveal_type(RequiresContextFutureResult.from_result_context(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.float]" - case: requires_context_future_result_from_ioresult_context disable_cache: false main: | from returns.context import RequiresContextFutureResult, RequiresContextIOResult r: RequiresContextIOResult[int, str, float] reveal_type(RequiresContextFutureResult.from_ioresult_context(r)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.float]" returns-0.24.0/typesafety/test_context/test_requires_context_ioresult/000077500000000000000000000000001472312074000266075ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_ioresult/test_context_ioresult.yml000066400000000000000000000011301472312074000337760ustar00rootroot00000000000000- case: context_result_io_ask1 disable_cache: false main: | from returns.context import RequiresContextIOResult reveal_type(RequiresContextIOResult.ask()) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[Never, Never, Never]" - case: context_result_io_ask2 disable_cache: false main: | from returns.context import RequiresContextIOResult reveal_type(RequiresContextIOResult[int, bool, str].ask()) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.str, builtins.bool, builtins.str]" test_requires_context_ioresult.yml000066400000000000000000000126411472312074000356470ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_ioresult- case: requires_context_ioresult_call disable_cache: false main: | from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, float, str] reveal_type(x('a')) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.float]" - case: requires_context_ioresult_getattr disable_cache: false main: | from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, str, bool] x.missing # E: "RequiresContextIOResult[int, str, bool]" has no attribute "missing" [attr-defined] - case: requires_context_ioresult_swap disable_cache: false main: | from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, float, str] reveal_type(x.swap()) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.int, builtins.str]" - case: requires_context_ioresult_bind disable_cache: false main: | from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, float, str] def test(param: int) -> RequiresContextIOResult[bool, float, str]: ... reveal_type(x.bind(test)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_ioresult_bind_result disable_cache: false main: | from returns.context import RequiresContextIOResult from returns.result import Result x: RequiresContextIOResult[int, float, str] def test(param: int) -> Result[bool, float]: ... reveal_type(x.bind_result(test)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_ioresult_bind_ioresult disable_cache: false main: | from returns.context import RequiresContextIOResult from returns.io import IOResult x: RequiresContextIOResult[int, float, str] def test(param: int) -> IOResult[bool, float]: ... reveal_type(x.bind_ioresult(test)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_ioresult_bind_io disable_cache: false main: | from returns.context import RequiresContextIOResult from returns.io import IO x: RequiresContextIOResult[int, float, str] def test(param: int) -> IO[bool]: ... reveal_type(x.bind_io(test)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_ioresult_bind_context disable_cache: false main: | from returns.context import RequiresContextIOResult, RequiresContext x: RequiresContextIOResult[int, float, str] def test(param: int) -> RequiresContext[bool, str]: ... reveal_type(x.bind_context(test)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_ioresult_bind_context_result disable_cache: false main: | from returns.context import RequiresContextIOResult, RequiresContextResult x: RequiresContextIOResult[int, float, str] def test(param: int) -> RequiresContextResult[bool, float, str]: ... reveal_type(x.bind_context_result(test)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_ioresult_map disable_cache: false main: | from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, float, str] reveal_type(x.map(bool)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_ioresult_apply disable_cache: false main: | from typing import Callable from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, float, str] y: RequiresContextIOResult[Callable[[int], bool], float, str] reveal_type(x.apply(y)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_ioresult_lash disable_cache: false main: | from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, float, str] def test(param: float) -> RequiresContextIOResult[int, bool, str]: ... reveal_type(x.lash(test)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.bool, builtins.str]" - case: requires_context_ioresult_alt disable_cache: false main: | from returns.context import RequiresContextIOResult x: RequiresContextIOResult[int, float, str] def test(param: float) -> bool: ... reveal_type(x.alt(test)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.bool, builtins.str]" - case: requires_context_ioresult_modify_env disable_cache: false main: | from returns.context import RequiresContextIOResult first: RequiresContextIOResult[float, bool, int] reveal_type(first.modify_env(int)('1')) # N: Revealed type is "returns.io.IOResult[builtins.float, builtins.bool]" test_requires_context_ioresult_aliases.yml000066400000000000000000000006421472312074000373460ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_ioresult- case: requires_context_ioresult_aliases disable_cache: false main: | from returns.context import ( RequiresContextIOResult, RequiresContextIOResultE, ReaderIOResult, ReaderIOResultE, ) x: RequiresContextIOResult[int, Exception, str] x1: RequiresContextIOResultE[int, str] = x x2: ReaderIOResult[int, Exception, str] = x x3: ReaderIOResultE[int, str] = x test_requires_context_ioresult_cast.yml000066400000000000000000000033501472312074000366560ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_ioresult- case: requires_context_ioresult_success_cast disable_cache: false main: | from returns.context import RequiresContextIOResult first: RequiresContextIOResult[object, Exception, str] = RequiresContextIOResult.from_value(1) reveal_type(first) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_ioresult_failure_cast disable_cache: false main: | from returns.context import RequiresContextIOResult first: RequiresContextIOResult[object, Exception, str] = RequiresContextIOResult.from_failure(TypeError()) reveal_type(first) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_ioresult_env_cast disable_cache: false main: | from returns.context import RequiresContextIOResult first: RequiresContextIOResult[object, Exception, object] second: RequiresContextIOResult[object, Exception, str] = first reveal_type(second) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_ioresult_wrong_cast disable_cache: false main: | from returns.context import RequiresContextIOResult first: RequiresContextIOResult[ValueError, TypeError, IndexError] second: RequiresContextIOResult[Exception, Exception, Exception] = first out: | main:4: error: Incompatible types in assignment (expression has type "RequiresContextIOResult[ValueError, TypeError, IndexError]", variable has type "RequiresContextIOResult[Exception, Exception, Exception]") [assignment] test_requires_context_ioresult_unit.yml000066400000000000000000000071321472312074000367050ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_ioresult- case: requires_context_ioresult_success disable_cache: false main: | from returns.context import RequiresContextIOResult reveal_type(RequiresContextIOResult.from_value(1)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, Any, Any]" - case: requires_context_ioresult_failure disable_cache: false main: | from returns.context import RequiresContextIOResult reveal_type(RequiresContextIOResult.from_failure(1)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[Any, builtins.int, Any]" - case: requires_context_ioresult_result disable_cache: false main: | from returns.context import RequiresContextIOResult from returns.result import Result r: Result[int, str] reveal_type(RequiresContextIOResult.from_result(r)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, Any]" - case: requires_context_ioresult_io disable_cache: false main: | from returns.context import RequiresContextIOResult from returns.io import IO r: IO[int] reveal_type(RequiresContextIOResult.from_io(r)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, Any, Any]" - case: requires_context_ioresult_failed_io disable_cache: false main: | from returns.context import RequiresContextIOResult from returns.io import IO r: IO[int] reveal_type(RequiresContextIOResult.from_failed_io(r)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[Any, builtins.int, Any]" - case: requires_context_ioresult_ioresult disable_cache: false main: | from returns.context import RequiresContextIOResult from returns.io import IOResult r: IOResult[int, str] reveal_type(RequiresContextIOResult.from_ioresult(r)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, Any]" - case: requires_context_ioresult_typecast disable_cache: false main: | from returns.context import RequiresContextIOResult, RequiresContext from returns.io import IOResult r: RequiresContext[IOResult[int, str], float] reveal_type(RequiresContextIOResult.from_typecast(r)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.float]" - case: requires_context_ioresult_successful_context disable_cache: false main: | from returns.context import RequiresContextIOResult, RequiresContext r: RequiresContext[str, float] reveal_type(RequiresContextIOResult.from_context(r)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.str, Any, builtins.float]" - case: requires_context_ioresult_failed_context disable_cache: false main: | from returns.context import RequiresContextIOResult, RequiresContext r: RequiresContext[str, float] reveal_type(RequiresContextIOResult.from_failed_context(r)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[Any, builtins.str, builtins.float]" - case: requires_context_ioresult_from_result_context disable_cache: false main: | from returns.context import RequiresContextIOResult, RequiresContextResult r: RequiresContextResult[int, str, float] reveal_type(RequiresContextIOResult.from_result_context(r)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.float]" returns-0.24.0/typesafety/test_context/test_requires_context_result/000077500000000000000000000000001472312074000262575ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_result/test_context_result.yml000066400000000000000000000010641472312074000331240ustar00rootroot00000000000000- case: context_ask1 disable_cache: false main: | from returns.context import RequiresContextResult reveal_type(RequiresContextResult.ask()) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[Never, Never, Never]" - case: context_ask2 disable_cache: false main: | from returns.context import RequiresContextResult reveal_type(RequiresContextResult[int, bool, str].ask()) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.str, builtins.bool, builtins.str]" returns-0.24.0/typesafety/test_context/test_requires_context_result/test_requires_context_cast.yml000066400000000000000000000032701472312074000344600ustar00rootroot00000000000000- case: requires_context_result_success_cast disable_cache: false main: | from returns.context import RequiresContextResult first: RequiresContextResult[object, Exception, str] = RequiresContextResult.from_value(1) reveal_type(first) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_result_failure_cast disable_cache: false main: | from returns.context import RequiresContextResult first: RequiresContextResult[object, Exception, str] = RequiresContextResult.from_failure(TypeError()) reveal_type(first) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_result_env_cast disable_cache: false main: | from returns.context import RequiresContextResult first: RequiresContextResult[object, Exception, object] second: RequiresContextResult[object, Exception, str] = first reveal_type(second) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.object, builtins.Exception, builtins.str]" - case: requires_context_result_wrong_cast disable_cache: false main: | from returns.context import RequiresContextResult first: RequiresContextResult[ValueError, TypeError, IndexError] second: RequiresContextResult[Exception, Exception, Exception] = first out: | main:4: error: Incompatible types in assignment (expression has type "RequiresContextResult[ValueError, TypeError, IndexError]", variable has type "RequiresContextResult[Exception, Exception, Exception]") [assignment] returns-0.24.0/typesafety/test_context/test_requires_context_result/test_requires_context_result.yml000066400000000000000000000077401472312074000350520ustar00rootroot00000000000000- case: requires_context_result_call disable_cache: false main: | from returns.context import RequiresContextResult x: RequiresContextResult[int, Exception, str] reveal_type(x('a')) # N: Revealed type is "returns.result.Result[builtins.int, builtins.Exception]" - case: requires_context_result_getattr disable_cache: false main: | from returns.context import RequiresContextResult x: RequiresContextResult[int, str, bool] x.missing # E: "RequiresContextResult[int, str, bool]" has no attribute "missing" [attr-defined] - case: requires_context_result_swap disable_cache: false main: | from returns.context import RequiresContextResult x: RequiresContextResult[int, float, str] reveal_type(x.swap()) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.float, builtins.int, builtins.str]" - case: requires_context_result_bind disable_cache: false main: | from returns.context import RequiresContextResult x: RequiresContextResult[int, float, str] def test(param: int) -> RequiresContextResult[bool, float, str]: ... reveal_type(x.bind(test)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_result_bind_result disable_cache: false main: | from returns.context import RequiresContextResult from returns.result import Result x: RequiresContextResult[int, float, str] def test(param: int) -> Result[bool, float]: ... reveal_type(x.bind_result(test)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_result_bind_context disable_cache: false main: | from returns.context import RequiresContextResult, RequiresContext x: RequiresContextResult[int, float, str] def test(param: int) -> RequiresContext[bool, str]: ... reveal_type(x.bind_context(test)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_result_map disable_cache: false main: | from returns.context import RequiresContextResult x: RequiresContextResult[int, float, str] reveal_type(x.map(bool)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_result_apply disable_cache: false main: | from typing import Callable from returns.context import RequiresContextResult x: RequiresContextResult[int, float, str] y: RequiresContextResult[Callable[[int], bool], float, str] reveal_type(x.apply(y)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.bool, builtins.float, builtins.str]" - case: requires_context_result_lash disable_cache: false main: | from returns.context import RequiresContextResult x: RequiresContextResult[int, float, str] def test(param: float) -> RequiresContextResult[int, bool, str]: ... reveal_type(x.lash(test)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.bool, builtins.str]" - case: requires_context_result_alt disable_cache: false main: | from returns.context import RequiresContextResult x: RequiresContextResult[int, float, str] def test(param: float) -> bool: ... reveal_type(x.alt(test)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.bool, builtins.str]" - case: requires_context_result_modify_env disable_cache: false main: | from returns.context import RequiresContextResult first: RequiresContextResult[float, bool, int] reveal_type(first.modify_env(int)('1')) # N: Revealed type is "returns.result.Result[builtins.float, builtins.bool]" test_requires_context_result_unit.yml000066400000000000000000000041111472312074000360170ustar00rootroot00000000000000returns-0.24.0/typesafety/test_context/test_requires_context_result- case: requires_context_result_success disable_cache: false main: | from returns.context import RequiresContextResult reveal_type(RequiresContextResult.from_value(1)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, Any, Any]" - case: requires_context_result_failure disable_cache: false main: | from returns.context import RequiresContextResult reveal_type(RequiresContextResult.from_failure(1)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[Any, builtins.int, Any]" - case: requires_context_result_result disable_cache: false main: | from returns.context import RequiresContextResult from returns.result import Result r: Result[int, str] reveal_type(RequiresContextResult.from_result(r)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.str, Any]" - case: requires_context_result_typecast disable_cache: false main: | from returns.context import RequiresContextResult, RequiresContext from returns.result import Result r: RequiresContext[Result[int, str], float] reveal_type(RequiresContextResult.from_typecast(r)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.str, builtins.float]" - case: requires_context_result_successful_context disable_cache: false main: | from returns.context import RequiresContextResult, RequiresContext r: RequiresContext[str, float] reveal_type(RequiresContextResult.from_context(r)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.str, Any, builtins.float]" - case: requires_context_result_failed_context disable_cache: false main: | from returns.context import RequiresContextResult, RequiresContext r: RequiresContext[str, float] reveal_type(RequiresContextResult.from_failed_context(r)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[Any, builtins.str, builtins.float]" returns-0.24.0/typesafety/test_contrib/000077500000000000000000000000001472312074000201735ustar00rootroot00000000000000returns-0.24.0/typesafety/test_contrib/test_hypothesis/000077500000000000000000000000001472312074000234315ustar00rootroot00000000000000returns-0.24.0/typesafety/test_contrib/test_hypothesis/test_laws/000077500000000000000000000000001472312074000254365ustar00rootroot00000000000000returns-0.24.0/typesafety/test_contrib/test_hypothesis/test_laws/test_check_all_laws.yml000066400000000000000000000024041472312074000321530ustar00rootroot00000000000000- case: check_all_laws disable_cache: false parametrized: - container: Result - container: Maybe - container: IO - container: IOResult - container: Reader - container: ReaderResult - container: ReaderIOResult - container: ReaderFutureResult - container: Future - container: FutureResult main: | from returns.context import ( Reader, ReaderResult, ReaderIOResult, ReaderFutureResult, ) from returns.future import Future, FutureResult from returns.maybe import Maybe from returns.result import Result from returns.io import IO, IOResult from returns.primitivies.laws import Lawful from typing import Type x: Type[Lawful] = {{ container }} - case: test_all_laws_sig disable_cache: false # TODO: remove this config after # mypy/typeshed/stdlib/unittest/mock.pyi:120: # error: Class cannot subclass "Any" (has type "Any") # is fixed. mypy_config: disallow_subclassing_any = False main: | from returns.contrib.hypothesis.laws import check_all_laws reveal_type(check_all_laws) # N: Revealed type is "def (container_type: Type[returns.primitives.laws.Lawful[Any]], *, settings_kwargs: Union[builtins.dict[builtins.str, Any], None] =, use_init: builtins.bool =)" returns-0.24.0/typesafety/test_converters/000077500000000000000000000000001472312074000207255ustar00rootroot00000000000000returns-0.24.0/typesafety/test_converters/test_flatten.yml000066400000000000000000000122111472312074000241410ustar00rootroot00000000000000- case: flatten_wrong_flatten_non_bindable disable_cache: false main: | from typing import Callable, TypeVar from returns.converters import flatten from returns.interfaces.mappable import Mappable1 from returns.primitives.hkt import Kind1, SupportsKind1 from returns.result import Result V = TypeVar('V') N = TypeVar('N') class Functor(SupportsKind1['Functor', V], Mappable1[V]): def map(self, function: Callable[[V], N]) -> Functor[N]: ... x: Functor[Functor[int]] flatten(x) # E: Value of type variable "_BindableKind" of "flatten" cannot be "Functor[Any]" [type-var] - case: flatten_wrong_error_type disable_cache: false main: | from returns.converters import flatten from returns.result import Result x: Result[Result[int, str], float] flatten(x) # E: Cannot infer type argument 3 of "flatten" [misc] - case: flatten_custom_type disable_cache: false main: | from typing import TypeVar from returns.converters import flatten from returns.interfaces.bindable import Bindable1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') class MyClass(SupportsKind1['MyClass', V], Bindable1[V]): ... x: MyClass[MyClass[int]] reveal_type(flatten(x)) # N: Revealed type is "main.MyClass[builtins.int]" - case: flatten_wrong_flatten_error_type disable_cache: false main: | from returns.converters import flatten from returns.result import Result def returns_result() -> Result[int, Result[int, str]]: ... flatten(returns_result()) out: | main:7: error: Argument 1 to "flatten" has incompatible type "Result[int, Result[int, str]]"; expected "KindN[Result[Any, Any], KindN[Result[Any, Any], Never, Result[int, str], Never], Result[int, str], Never]" [arg-type] - case: flatten_io disable_cache: false main: | from returns.converters import flatten from returns.io import IO reveal_type(flatten(IO(IO(1)))) # N: Revealed type is "returns.io.IO[builtins.int]" - case: flatten_maybe disable_cache: false main: | from returns.converters import flatten from returns.maybe import Some reveal_type(flatten(Some(Some(1)))) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" - case: flatten_result disable_cache: false main: | from returns.converters import flatten from returns.result import Result def returns_result() -> Result[Result[int, str], str]: ... reveal_type(flatten(returns_result())) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: flatten_ioresult disable_cache: false main: | from returns.converters import flatten from returns.io import IOResult def returns_ioresult() -> IOResult[IOResult[int, str], str]: ... reveal_type(flatten(returns_ioresult())) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: flatten_context disable_cache: false main: | from returns.converters import flatten from returns.context import RequiresContext x: RequiresContext[RequiresContext[str, int], int] reveal_type(flatten(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.str, builtins.int]" - case: flatten_context_result disable_cache: false main: | from returns.converters import flatten from returns.context import RequiresContextResult x: RequiresContextResult[RequiresContextResult[str, int, float], int, float] reveal_type(flatten(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.str, builtins.int, builtins.float]" - case: flatten_context_ioresult disable_cache: false main: | from returns.converters import flatten from returns.context import RequiresContextIOResult x: RequiresContextIOResult[RequiresContextIOResult[str, int, float], int, float] reveal_type(flatten(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.str, builtins.int, builtins.float]" - case: flatten_context_ioresult disable_cache: false main: | from returns.converters import flatten from returns.context import RequiresContextIOResult x: RequiresContextIOResult[RequiresContextIOResult[str, int, float], int, float] reveal_type(flatten(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.str, builtins.int, builtins.float]" - case: flatten_future_result disable_cache: false main: | from returns.converters import flatten from returns.context import ReaderFutureResult x: ReaderFutureResult[ReaderFutureResult[int, bool, str], bool, str] reveal_type(flatten(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: flatten_future_result disable_cache: false main: | from returns.converters import flatten from returns.future import FutureResult x: FutureResult[FutureResult[int, str], str] reveal_type(flatten(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" returns-0.24.0/typesafety/test_converters/test_maybe_to_result.yml000066400000000000000000000010611472312074000257020ustar00rootroot00000000000000- case: maybe_to_result disable_cache: false main: | from returns.converters import maybe_to_result from returns.maybe import Maybe reveal_type(maybe_to_result(Maybe.from_value(1))) # N: Revealed type is "returns.result.Result[builtins.int, None]" - case: maybe_to_result_default_error disable_cache: false main: | from returns.converters import maybe_to_result from returns.maybe import Maybe reveal_type(maybe_to_result(Maybe.from_value(1), 'a')) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" returns-0.24.0/typesafety/test_converters/test_result_to_maybe.yml000066400000000000000000000004741472312074000257110ustar00rootroot00000000000000- case: result_to_maybe disable_cache: false main: | from returns.converters import result_to_maybe from returns.result import Result def returns_result() -> Result[int, str]: ... reveal_type(result_to_maybe(returns_result())) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" returns-0.24.0/typesafety/test_curry/000077500000000000000000000000001472312074000176775ustar00rootroot00000000000000returns-0.24.0/typesafety/test_curry/test_curry/000077500000000000000000000000001472312074000221025ustar00rootroot00000000000000returns-0.24.0/typesafety/test_curry/test_curry/test_curry.yml000066400000000000000000000113131472312074000250270ustar00rootroot00000000000000- case: curry_zero_args disable_cache: false main: | from returns.curry import curry @curry def zero() -> str: ... reveal_type(zero) # N: Revealed type is "def () -> builtins.str" reveal_type(zero()) # N: Revealed type is "builtins.str" - case: curry_single_arg disable_cache: false main: | from returns.curry import curry @curry def zero(arg: int) -> str: ... reveal_type(zero) # N: Revealed type is "def (arg: builtins.int) -> builtins.str" reveal_type(zero(1)) # N: Revealed type is "builtins.str" - case: curry_two_args1 disable_cache: false main: | from returns.curry import curry @curry def zero(arg: int, other: float) -> str: ... reveal_type(zero) # N: Revealed type is "Overload(def (arg: builtins.int) -> def (other: builtins.float) -> builtins.str, def (arg: builtins.int, other: builtins.float) -> builtins.str)" reveal_type(zero(1)) # N: Revealed type is "def (other: builtins.float) -> builtins.str" reveal_type(zero(1, 2.0)) # N: Revealed type is "builtins.str" reveal_type(zero(1)(2.0)) # N: Revealed type is "builtins.str" - case: curry_two_args2 disable_cache: false main: | from returns.curry import curry def zero(arg: int, other: float) -> str: ... reveal_type(curry(zero)) # N: Revealed type is "Overload(def (arg: builtins.int) -> def (other: builtins.float) -> builtins.str, def (arg: builtins.int, other: builtins.float) -> builtins.str)" reveal_type(curry(zero)(1)) # N: Revealed type is "def (other: builtins.float) -> builtins.str" reveal_type(curry(zero)(1, 2.0)) # N: Revealed type is "builtins.str" reveal_type(curry(zero)(1)(2.0)) # N: Revealed type is "builtins.str" - case: curry_two_args3 disable_cache: false main: | from returns.curry import curry @curry def zero(arg: int, other: float) -> str: ... reveal_type(zero('a')) reveal_type(zero(1.0, 1)) reveal_type(zero(1, 2.0, 2.0)) out: | main:7: error: No overload variant of "zero" matches argument type "str" [call-overload] main:7: note: Possible overload variants: main:7: note: def zero(arg: int) -> Callable[..., str] main:7: note: def zero(arg: int, other: float) -> str main:7: note: Revealed type is "Any" main:8: error: No overload variant of "zero" matches argument types "float", "int" [call-overload] main:8: note: Possible overload variants: main:8: note: def zero(arg: int) -> Callable[..., str] main:8: note: def zero(arg: int, other: float) -> str main:8: note: Revealed type is "Any" main:9: error: No overload variant of "zero" matches argument types "int", "float", "float" [call-overload] main:9: note: Possible overload variants: main:9: note: def zero(arg: int) -> Callable[..., str] main:9: note: def zero(arg: int, other: float) -> str main:9: note: Revealed type is "Any" - case: curry_two_args_one_default disable_cache: false main: | from returns.curry import curry @curry def zero(arg: int, other: float = 1.0) -> str: ... reveal_type(zero) # N: Revealed type is "Overload(def (arg: builtins.int) -> def (other: builtins.float =) -> builtins.str, def (arg: builtins.int, other: builtins.float =) -> builtins.str)" reveal_type(zero(1)) # N: Revealed type is "def (other: builtins.float =) -> builtins.str" reveal_type(zero(1, 2.0)) # N: Revealed type is "builtins.str" reveal_type(zero(1)(2.0)) # N: Revealed type is "builtins.str" - case: curry_three_args disable_cache: false main: | from returns.curry import curry @curry def zero(arg: int, other: float, *, kw: bool) -> str: ... reveal_type(zero) # N: Revealed type is "Overload(def (arg: builtins.int) -> Overload(def (other: builtins.float, *, kw: builtins.bool) -> builtins.str, def (other: builtins.float) -> def (*, kw: builtins.bool) -> builtins.str), def (arg: builtins.int, other: builtins.float) -> def (*, kw: builtins.bool) -> builtins.str, def (arg: builtins.int, other: builtins.float, *, kw: builtins.bool) -> builtins.str)" reveal_type(zero(1)) # N: Revealed type is "Overload(def (other: builtins.float, *, kw: builtins.bool) -> builtins.str, def (other: builtins.float) -> def (*, kw: builtins.bool) -> builtins.str)" reveal_type(zero(1, 2.0)) # N: Revealed type is "def (*, kw: builtins.bool) -> builtins.str" reveal_type(zero(1)(2.0)) # N: Revealed type is "def (*, kw: builtins.bool) -> builtins.str" reveal_type(zero(1, 2.0)(kw=True)) # N: Revealed type is "builtins.str" reveal_type(zero(1)(2.0)(kw=True)) # N: Revealed type is "builtins.str" reveal_type(zero(1, 2.0, kw=True)) # N: Revealed type is "builtins.str" returns-0.24.0/typesafety/test_curry/test_curry/test_curry_args_kwargs.yml000066400000000000000000000011321472312074000274170ustar00rootroot00000000000000- case: curry_args disable_cache: false main: | from returns.curry import curry @curry def zero(*args) -> str: ... reveal_type(zero) # N: Revealed type is "Any" - case: curry_kwargs disable_cache: false main: | from returns.curry import curry @curry def zero(**kwargs) -> str: ... reveal_type(zero) # N: Revealed type is "Any" - case: curry_args_kwargs disable_cache: false main: | from returns.curry import curry @curry def zero(*args, **kwargs) -> str: ... reveal_type(zero) # N: Revealed type is "Any" returns-0.24.0/typesafety/test_curry/test_curry/test_curry_arguments.yml000066400000000000000000000234041472312074000271200ustar00rootroot00000000000000- case: curry_pos_only_args disable_cache: false main: | from returns.curry import curry @curry def multiple( a: int, b: int, c: int, /, d: int, ) -> str: ... reveal_type(multiple) # N: Revealed type is "Overload(def (builtins.int) -> Overload(def (builtins.int, builtins.int, d: builtins.int) -> builtins.str, def (builtins.int, builtins.int) -> def (d: builtins.int) -> builtins.str, def (builtins.int) -> Overload(def (builtins.int, d: builtins.int) -> builtins.str, def (builtins.int) -> def (d: builtins.int) -> builtins.str)), def (builtins.int, builtins.int) -> Overload(def (builtins.int, d: builtins.int) -> builtins.str, def (builtins.int) -> def (d: builtins.int) -> builtins.str), def (builtins.int, builtins.int, builtins.int) -> def (d: builtins.int) -> builtins.str, def (builtins.int, builtins.int, builtins.int, d: builtins.int) -> builtins.str)" - case: curry_nested_overload1 disable_cache: false main: | from typing import Generic, TypeVar from returns.curry import curry ValueType = TypeVar('ValueType') class MyClass(Generic[ValueType]): inner_value: ValueType def __init__(self, inner_value: ValueType) -> None: ... @curry def test(a: int, b: int) -> float: ... reveal_type(MyClass(test)) # N: Revealed type is "main.MyClass[Overload(def (a: builtins.int) -> def (b: builtins.int) -> builtins.float, def (a: builtins.int, b: builtins.int) -> builtins.float)]" - case: curry_nested_overload2 disable_cache: false main: | from typing import Generic, TypeVar from returns.curry import curry ValueType = TypeVar('ValueType') class MyClass(Generic[ValueType]): inner_value: ValueType def __init__(self, inner_value: ValueType) -> None: ... @curry def test(a: int, b: int, c: str) -> int: ... reveal_type(MyClass(test)) # N: Revealed type is "main.MyClass[Overload(def (a: builtins.int) -> Overload(def (b: builtins.int, c: builtins.str) -> builtins.int, def (b: builtins.int) -> def (c: builtins.str) -> builtins.int), def (a: builtins.int, b: builtins.int) -> def (c: builtins.str) -> builtins.int, def (a: builtins.int, b: builtins.int, c: builtins.str) -> builtins.int)]" # TODO: remove skip after this bug in `mypy` is fixed: # https://github.com/python/mypy/issues/8801 - case: curry_init_magic_method disable_cache: false skip: true main: | from returns.curry import curry class Test(object): @curry def __init__(self, arg: int, other: str) -> None: ... reveal_type(Test) # N: Revealed type is "Overload(def (arg: builtins.int) -> def (other: builtins.str) -> ex.Test, def (arg: builtins.int, other: builtins.str) -> ex.Test)" - case: curry_call_magic_method disable_cache: false main: | from returns.curry import curry class Test(object): @curry def __call__(self, arg: int, other: float, last: str) -> str: ... reveal_type(Test()(1)) # N: Revealed type is "Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str)" - case: curry_classmethod1 disable_cache: false main: | from returns.curry import curry class Test(object): @curry @classmethod def some(cls, arg: int, other: float, last: str) -> str: ... reveal_type(Test.some) # N: Revealed type is "Overload(def () -> Overload(def (arg: builtins.int, other: builtins.float, last: builtins.str) -> builtins.str, def (arg: builtins.int, other: builtins.float) -> def (last: builtins.str) -> builtins.str, def (arg: builtins.int) -> Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str)), def (arg: builtins.int) -> Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str), def (arg: builtins.int, other: builtins.float) -> def (last: builtins.str) -> builtins.str, def (arg: builtins.int, other: builtins.float, last: builtins.str) -> builtins.str)" reveal_type(Test.some(1)) # N: Revealed type is "Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str)" reveal_type(Test.some(1, 2.0, 'a')) # N: Revealed type is "builtins.str" - case: curry_classmethod2 disable_cache: false main: | from returns.curry import curry from typing import Callable class Test(object): @curry @classmethod def some(cls, arg: int, other: str) -> str: ... def test(c: Callable[[int, str], str]) -> str: return c(1, 'a') reveal_type(test(Test.some)) # N: Revealed type is "builtins.str" - case: curry_classmethod3 disable_cache: false main: | from returns.curry import curry from typing import Callable class Test(object): @curry @classmethod def some(cls, first:str, arg: int, other: str) -> str: ... def test(c: Callable[[int, str], str]) -> str: return c(1, 'a') reveal_type(test(Test.some('a'))) # N: Revealed type is "builtins.str" - case: curry_staticmethod disable_cache: false main: | from returns.curry import curry class Test(object): @curry @staticmethod def some(arg: int, other: float, last: str) -> str: ... reveal_type(Test.some) # N: Revealed type is "Overload(def (arg: builtins.int) -> Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str), def (arg: builtins.int, other: builtins.float) -> def (last: builtins.str) -> builtins.str, def (arg: builtins.int, other: builtins.float, last: builtins.str) -> builtins.str)" - case: curry_regular_method disable_cache: false main: | from returns.curry import curry class Test(object): @curry def some(self, arg: int, other: float, last: str) -> str: ... reveal_type(Test.some) # N: Revealed type is "Overload(def (self: main.Test) -> Overload(def (arg: builtins.int, other: builtins.float, last: builtins.str) -> builtins.str, def (arg: builtins.int, other: builtins.float) -> def (last: builtins.str) -> builtins.str, def (arg: builtins.int) -> Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str)), def (self: main.Test, arg: builtins.int) -> Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str), def (self: main.Test, arg: builtins.int, other: builtins.float) -> def (last: builtins.str) -> builtins.str, def (self: main.Test, arg: builtins.int, other: builtins.float, last: builtins.str) -> builtins.str)" reveal_type(Test.some(Test(), 1)) # N: Revealed type is "Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str)" reveal_type(Test().some) # N: Revealed type is "Overload(def () -> Overload(def (arg: builtins.int, other: builtins.float, last: builtins.str) -> builtins.str, def (arg: builtins.int, other: builtins.float) -> def (last: builtins.str) -> builtins.str, def (arg: builtins.int) -> Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str)), def (arg: builtins.int) -> Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str), def (arg: builtins.int, other: builtins.float) -> def (last: builtins.str) -> builtins.str, def (arg: builtins.int, other: builtins.float, last: builtins.str) -> builtins.str)" reveal_type(Test().some(1)) # N: Revealed type is "Overload(def (other: builtins.float, last: builtins.str) -> builtins.str, def (other: builtins.float) -> def (last: builtins.str) -> builtins.str)" - case: curry_match_callable_protocol1 disable_cache: false main: | from returns.curry import curry from typing import Callable class Test(object): @curry def some(self, a: int, arg: int, other: str) -> str: ... def test(c: Callable[[int, str], str]) -> str: return c(1, 'a') reveal_type(test(Test().some(1))) # N: Revealed type is "builtins.str" - case: curry_match_callable_protocol2 disable_cache: false main: | from returns.curry import curry from typing import Callable class Test(object): @curry def some(self, arg: int, other: str) -> str: ... def test(c: Callable[[int, str], str]) -> str: return c(1, 'a') reveal_type(test(Test().some)) # N: Revealed type is "builtins.str" - case: curry_match_callable_protocol3 disable_cache: false main: | from returns.curry import curry from typing import Callable class Test(object): @curry def some(self, arg: int, other: float) -> str: ... def test(c: Callable[[int], Callable[[float], str]]) -> str: return c(1)(5.0) reveal_type(test(Test().some)) # N: Revealed type is "builtins.str" - case: curry_match_callable_protocol4 disable_cache: false main: | from returns.curry import curry from typing import Callable class Test(object): @curry @classmethod def some(cls, arg: int, other: float) -> str: ... def test(c: Callable[[int], Callable[[float], str]]) -> str: return c(1)(5.0) reveal_type(test(Test.some)) # N: Revealed type is "builtins.str" returns-0.24.0/typesafety/test_curry/test_curry/test_curry_generics.yml000066400000000000000000000046171472312074000267170ustar00rootroot00000000000000- case: curry_single_generic_arg disable_cache: false main: | from returns.curry import curry from typing import List, TypeVar T = TypeVar('T') @curry def zero(arg: List[T]) -> T: ... x: List[int] reveal_type(zero) # N: Revealed type is "def [T] (arg: builtins.list[T`-1]) -> T`-1" reveal_type(zero(x)) # N: Revealed type is "builtins.int" - case: curry_two_generic_args1 disable_cache: false main: | from returns.curry import curry from typing import List, TypeVar T = TypeVar('T') @curry def zero(arg: List[T], other: int) -> T: ... x: List[int] reveal_type(zero) # N: Revealed type is "Overload(def [T] (arg: builtins.list[T`-1]) -> def (other: builtins.int) -> T`-1, def [T] (arg: builtins.list[T`-1], other: builtins.int) -> T`-1)" reveal_type(zero(x)) # N: Revealed type is "def (other: builtins.int) -> builtins.int" reveal_type(zero(x)(1)) # N: Revealed type is "builtins.int" reveal_type(zero(x, 1)) # N: Revealed type is "builtins.int" - case: curry_two_generic_args2 disable_cache: false main: | from returns.curry import curry from typing import List, TypeVar T = TypeVar('T') @curry def zero(arg: int, other: List[T]) -> T: ... x: List[int] reveal_type(zero) # N: Revealed type is "Overload(def (arg: builtins.int) -> def [T] (other: builtins.list[T`-1]) -> T`-1, def [T] (arg: builtins.int, other: builtins.list[T`-1]) -> T`-1)" reveal_type(zero(1)) # N: Revealed type is "def [T] (other: builtins.list[T`2]) -> T`2" reveal_type(zero(1)(x)) # N: Revealed type is "builtins.int" reveal_type(zero(1, x)) # N: Revealed type is "builtins.int" # TODO: enable and fix our plugin - case: curry_two_generic_args3 disable_cache: false skip: True main: | from returns.curry import curry from typing import List, TypeVar T = TypeVar('T') @curry def zero(arg: T, other: List[T]) -> T: ... x: List[int] reveal_type(zero) # N: Revealed type is "Overload(def [T] (arg: T`-1) -> def [T] (other: builtins.list[T`-1]) -> T`-1, def [T] (arg: T`-1, other: builtins.list[T`-1]) -> T`-1)" reveal_type(zero(1)) # N: Revealed type is "def [T] (other: builtins.list[builtins.int]) -> builtins.int" reveal_type(zero(1)(x)) # N: Revealed type is "builtins.int" reveal_type(zero(1, x)) # N: Revealed type is "builtins.int" returns-0.24.0/typesafety/test_curry/test_partial/000077500000000000000000000000001472312074000223725ustar00rootroot00000000000000returns-0.24.0/typesafety/test_curry/test_partial/test_partial.yml000066400000000000000000000072731472312074000256210ustar00rootroot00000000000000- case: partial_zero_args disable_cache: false main: | from returns.curry import partial def two_args(first: int, second: float) -> str: ... reveal_type(partial(two_args)) # N: Revealed type is "def (first: builtins.int, second: builtins.float) -> builtins.str" - case: partial_single_arg disable_cache: false main: | from returns.curry import partial def two_args(first: int, second: float) -> str: ... reveal_type(partial(two_args, 1)) # N: Revealed type is "def (second: builtins.float) -> builtins.str" - case: partial_all_args disable_cache: false main: | from returns.curry import partial def two_args(first: int, second: float) -> str: ... reveal_type(partial(two_args, 1, second=0.5)) # N: Revealed type is "def () -> builtins.str" - case: partial_single_named_arg disable_cache: false main: | from returns.curry import partial def two_args(first: int, second: float) -> str: ... reveal_type(partial(two_args, second=1.0)) # N: Revealed type is "def (first: builtins.int) -> builtins.str" - case: partial_multiple_args disable_cache: false main: | from returns.curry import partial def multiple( first: int, second: float, third: str, flag1: bool, flag2: bool, flag3: type, ) -> str: ... reveal_type(partial(multiple, 1, 0.4, flag3=int, flag2=True)) # N: Revealed type is "def (third: builtins.str, flag1: builtins.bool) -> builtins.str" - case: partial_not_callable_type disable_cache: false main: | from returns.curry import partial curried_int = partial(int, 10) reveal_type(curried_int) # N: Revealed type is "def () -> builtins.int" - case: partial_explicit_noreturn disable_cache: false main: | from returns.curry import partial from typing import NoReturn def exit(x: int) -> NoReturn: ... reveal_type(partial(exit, 1)) # N: Revealed type is "def () -> Never" - case: partial_wrong_argument_types disable_cache: false main: | from returns.curry import partial from typing_extensions import Literal def multiple( first: int, second: float, flag: Literal[True], ) -> str: ... partial(multiple, 'a', flag=False) out: | main:11: error: Argument 1 to "multiple" has incompatible type "str"; expected "int" [arg-type] main:11: error: Argument 2 to "multiple" has incompatible type "bool"; expected "Literal[True]" [arg-type] - case: partial_too_many_positional_args disable_cache: false main: | from returns.curry import partial from typing_extensions import Literal def multiple( first: int, second: float, ) -> str: ... partial(multiple, 1, 2.0, 3) out: | main:10: error: Too many arguments for "multiple" [call-arg] - case: partial_invalid_named_arg disable_cache: false main: | from returns.curry import partial from typing_extensions import Literal def multiple( first: int, second: float, ) -> str: ... partial(multiple, missing=1) out: | main:4: note: "multiple" defined here main:10: error: Unexpected keyword argument "missing" for "multiple" [call-arg] - case: partial_regression618 disable_cache: false main: | from typing import Callable, TypeVar from returns.curry import partial _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') def test( default: _SecondType, function: Callable[[_SecondType, _FirstType], _SecondType], ): reveal_type(partial(function, default)) # N: Revealed type is "def (_FirstType`-2) -> _SecondType`-1" returns-0.24.0/typesafety/test_curry/test_partial/test_partial_arguments.yml000066400000000000000000000154111472312074000276770ustar00rootroot00000000000000- case: partial_complex_args disable_cache: false main: | from returns.curry import partial def multiple( a: int, b: int, c: int = 0, *args: float, d: str, e: bool = True, **kwargs: str, ) -> str: ... reveal_type(partial(multiple)) reveal_type(partial(multiple, 1)) reveal_type(partial(multiple, 1, 2)) reveal_type(partial(multiple, 1, 2, c=3)) reveal_type(partial(multiple, 1, 2, c=3, d='a')) reveal_type(partial(multiple, 1, 2, c=3, e=False)) reveal_type(partial(multiple, 1, 2, d='a')) reveal_type(partial(multiple, 1, 2, e=False)) reveal_type(partial(multiple, 1, 2, 3, 4.0, 5.0)) reveal_type(partial(multiple, 1, 2, 3, m='m', q='q', long='long')) out: | main:14: note: Revealed type is "def (a: builtins.int, b: builtins.int, c: builtins.int =, *args: builtins.float, d: builtins.str, e: builtins.bool =, **kwargs: builtins.str) -> builtins.str" main:15: note: Revealed type is "def (b: builtins.int, c: builtins.int =, *args: builtins.float, d: builtins.str, e: builtins.bool =, **kwargs: builtins.str) -> builtins.str" main:16: note: Revealed type is "def (c: builtins.int =, *args: builtins.float, d: builtins.str, e: builtins.bool =, **kwargs: builtins.str) -> builtins.str" main:17: note: Revealed type is "def (*args: builtins.float, d: builtins.str, e: builtins.bool =, **kwargs: builtins.str) -> builtins.str" main:18: note: Revealed type is "def (*args: builtins.float, e: builtins.bool =, **kwargs: builtins.str) -> builtins.str" main:19: note: Revealed type is "def (*args: builtins.float, d: builtins.str, **kwargs: builtins.str) -> builtins.str" main:20: note: Revealed type is "def (c: builtins.int =, *args: builtins.float, e: builtins.bool =, **kwargs: builtins.str) -> builtins.str" main:21: note: Revealed type is "def (c: builtins.int =, *args: builtins.float, d: builtins.str, **kwargs: builtins.str) -> builtins.str" main:22: note: Revealed type is "def (*args: builtins.float, d: builtins.str, e: builtins.bool =, **kwargs: builtins.str) -> builtins.str" main:23: note: Revealed type is "def (*args: builtins.float, d: builtins.str, e: builtins.bool =, **kwargs: builtins.str) -> builtins.str" - case: partial_args_kwargs disable_cache: false main: | from returns.curry import partial def multiple( *args: int, **kwargs: str, ) -> str: ... reveal_type(partial(multiple, 1, 2, 3, x='x', y='y')(4, 5, z='z')) # N: Revealed type is "builtins.str" - case: partial_pos_only_args disable_cache: false main: | from returns.curry import partial def multiple( a: int, b: int, c: int, /, d: int, ) -> str: ... reveal_type(partial(multiple, 1)) # N: Revealed type is "def (builtins.int, builtins.int, d: builtins.int) -> builtins.str" reveal_type(partial(multiple, 1, 2)) # N: Revealed type is "def (builtins.int, d: builtins.int) -> builtins.str" reveal_type(partial(multiple, 1, 2, 3)) # N: Revealed type is "def (d: builtins.int) -> builtins.str" reveal_type(partial(multiple, 1, 2, d=4)) # N: Revealed type is "def (builtins.int) -> builtins.str" reveal_type(partial(multiple, 1, 2, 3, d=4)) # N: Revealed type is "def () -> builtins.str" - case: partial_object disable_cache: false main: | from returns.curry import partial class Inst(object): def __init__(self, arg: int) -> None: ... def __call__(self, other: int) -> int: ... reveal_type(partial(Inst)) reveal_type(partial(Inst, 1)) reveal_type(partial(Inst(1))) reveal_type(partial(Inst(1), 1)) out: | main:10: note: Revealed type is "def (arg: builtins.int) -> main.Inst" main:11: note: Revealed type is "def () -> main.Inst" main:12: note: Revealed type is "main.Inst" main:13: note: Revealed type is "def () -> builtins.int" - case: partial_classmethod disable_cache: false main: | from returns.curry import partial class Test(object): @classmethod def some(cls, arg: int, other: str) -> float: ... reveal_type(partial(Test.some, 1)) # N: Revealed type is "def (other: builtins.str) -> builtins.float" - case: partial_staticmethod disable_cache: false main: | from returns.curry import partial class Test(object): @staticmethod def some(arg: int, other: str) -> float: ... reveal_type(partial(Test.some, 1)) # N: Revealed type is "def (other: builtins.str) -> builtins.float" - case: partial_union disable_cache: false main: | from typing import Union from returns.curry import partial class Inst(object): def __call__(self, arg: int) -> None: ... class Other(object): def __call__(self, arg: int, other: str) -> None: ... x: Union[Inst, Other] reveal_type(partial(x)) # This does not work as well: reveal_type(partial(x, 1)) out: | main:13: note: Revealed type is "Union[main.Inst, main.Other]" main:15: note: Revealed type is "def (*Any, **Any)" - case: partial_type_var disable_cache: false main: | from typing import Callable, TypeVar from returns.curry import partial C = TypeVar('C', bound=Callable) def first(arg: int) -> int: return arg def test(func: C) -> C: # One can say, that this case is not supported, # but I don't know how to work with it reveal_type(partial(func, 1)) # N: Revealed type is "def (*Any, **Any) -> Any" return func test(first) - case: partial_type_type disable_cache: false main: | from typing import Type, TypeVar from returns.curry import partial class Inst(object): def __init__(self, arg: int) -> None: self.arg = arg I = TypeVar('I', bound=Inst) def receives_type(a: int, t: Type[I]) -> I: x = partial(t, a) reveal_type(x) # N: Revealed type is "def () -> I`-1" reveal_type(x().arg) # N: Revealed type is "builtins.int" return t(1) - case: partial_star_arg disable_cache: false main: | from returns.curry import partial def multiple(a: int, b: int) -> int: ... reveal_type(partial(multiple, *(1, 2))) # N: Revealed type is "def (*Any, **Any) -> builtins.int" - case: partial_star2_arg disable_cache: false main: | from returns.curry import partial def multiple(a: int, b: int) -> int: ... reveal_type(partial(multiple, **{'a': 1, 'b': 2})) # N: Revealed type is "def (*Any, **Any) -> builtins.int" - case: partial_lambda disable_cache: false main: | from returns.curry import partial reveal_type(partial((lambda x, y: str(x + y)), 1)) # N: Revealed type is "def (y: Any) -> builtins.str" returns-0.24.0/typesafety/test_curry/test_partial/test_partial_generic.yml000066400000000000000000000100441472312074000273030ustar00rootroot00000000000000- case: partial_wrong_generic disable_cache: false main: | from returns.curry import partial from typing import List, TypeVar T = TypeVar('T') def multiple( a: List[T], b: List[T], ) -> T: ... x: List[int] y: List[str] reveal_type(partial(multiple, x)(y)) out: | main:15: note: Revealed type is "builtins.int" main:15: error: Argument 1 to "multiple" has incompatible type "List[str]"; expected "List[int]" [arg-type] - case: partial_correct_generic disable_cache: false main: | from returns.curry import partial from typing import List, TypeVar T = TypeVar('T') def multiple( a: List[T], b: List[T], ) -> T: ... x: List[int] y: List[int] reveal_type(partial(multiple, x)(y)) out: | main:15: note: Revealed type is "builtins.int" - case: partial_single_generic disable_cache: false main: | from returns.curry import partial from typing import List, TypeVar T = TypeVar('T') def multiple( a: int, b: List[T], c: bool = False, ) -> T: ... x: List[int] reveal_type(partial(multiple)) reveal_type(partial(multiple, 1)) reveal_type(partial(multiple, 1)(x, True)) reveal_type(partial(multiple, 1)(x)) reveal_type(partial(multiple, 1)(x, c=True)) reveal_type(partial(multiple, 2, x)) reveal_type(partial(multiple, 2, x, True)) reveal_type(partial(multiple, 2, x)()) out: | main:15: note: Revealed type is "def [T] (a: builtins.int, b: builtins.list[T`-1], c: builtins.bool =) -> T`-1" main:16: note: Revealed type is "def [T] (b: builtins.list[T`-1], c: builtins.bool =) -> T`-1" main:17: note: Revealed type is "builtins.int" main:18: note: Revealed type is "builtins.int" main:19: note: Revealed type is "builtins.int" main:20: note: Revealed type is "def (c: builtins.bool =) -> builtins.int" main:21: note: Revealed type is "def () -> builtins.int" main:22: note: Revealed type is "builtins.int" # Python3.8+ sorts generic arguments differently: - case: partial_double_generic_complex38 disable_cache: false main: | from returns.curry import partial from typing import List, TypeVar, Union A = TypeVar('A') B = TypeVar('B') def multiple( a: int, *, b: List[B], c: List[A], ) -> Union[A, B]: ... x: List[int] y: List[str] reveal_type(partial(multiple)) reveal_type(partial(multiple, 1)) reveal_type(partial(multiple, 1, b=x)) reveal_type(partial(multiple, 1, c=x)) reveal_type(partial(multiple, 1, b=y)) reveal_type(partial(multiple, 1, c=y)) out: | main:18: note: Revealed type is "def [B, A] (a: builtins.int, *, b: builtins.list[B`-1], c: builtins.list[A`-2]) -> Union[A`-2, B`-1]" main:19: note: Revealed type is "def [B, A] (*, b: builtins.list[B`-1], c: builtins.list[A`-2]) -> Union[A`-2, B`-1]" main:20: note: Revealed type is "def [A] (*, c: builtins.list[A`-2]) -> Union[A`-2, builtins.int]" main:21: note: Revealed type is "def [B] (*, b: builtins.list[B`-1]) -> Union[builtins.int, B`-1]" main:22: note: Revealed type is "def [A] (*, c: builtins.list[A`-2]) -> Union[A`-2, builtins.str]" main:23: note: Revealed type is "def [B] (*, b: builtins.list[B`-1]) -> Union[builtins.str, B`-1]" - case: partial_double_generic disable_cache: false main: | from returns.curry import partial from typing import List, TypeVar, Union A = TypeVar('A') B = TypeVar('B') def multiple( a: int, *, b: List[B], c: List[A], ) -> Union[A, B]: ... x: List[int] y: List[str] reveal_type(partial(multiple, 1, c=y, b=x)) reveal_type(partial(multiple, 1, b=x)(c=y)) reveal_type(partial(multiple, 1, c=x)(b=y)) out: | main:17: note: Revealed type is "def () -> Union[builtins.str, builtins.int]" main:19: note: Revealed type is "Union[builtins.str, builtins.int]" main:20: note: Revealed type is "Union[builtins.int, builtins.str]" returns-0.24.0/typesafety/test_curry/test_partial/test_partial_overload.yml000066400000000000000000000127421472312074000275110ustar00rootroot00000000000000- case: partial_wrong_overload1 disable_cache: false main: | from typing import overload from returns.curry import partial @overload def two_args(a: int, b: int) -> int: ... @overload def two_args(a: str, b: str) -> str: ... def two_args(a, b): ... reveal_type(partial(two_args, None)) out: | main:15: error: No overload variant of "two_args" matches argument type "None" [call-overload] main:15: note: Possible overload variants: main:15: note: def two_args(a, a: int) -> int main:15: note: def two_args(a, a: str) -> str main:15: note: Revealed type is "def (*Any, **Any) -> builtins.int" - case: partial_wrong_overload2 disable_cache: false main: | from typing import overload from returns.curry import partial @overload def two_args(a: int, b: int) -> int: ... @overload def two_args(a: int, b: str) -> str: ... def two_args(a, b): ... reveal_type(partial(two_args, 1)(None)) out: | main:15: error: No overload variant of "two_args" matches argument type "None" [call-overload] main:15: note: Possible overload variants: main:15: note: def two_args(a, b: int) -> int main:15: note: def two_args(a, b: str) -> str main:15: note: Revealed type is "Any" - case: partial_regular_overload disable_cache: false main: | from typing import overload from returns.curry import partial @overload def two_args(a: int, b: int) -> int: ... @overload def two_args(a: int, b: str) -> str: ... @overload def two_args(a: str, b: str) -> str: ... def two_args(a, b): ... reveal_type(partial(two_args)) reveal_type(partial(two_args, 1)) reveal_type(partial(two_args, 1, 1)) reveal_type(partial(two_args, 1, 'a')) reveal_type(partial(two_args, 'a')) out: | main:19: note: Revealed type is "Overload(def (a: builtins.int, b: builtins.int) -> builtins.int, def (a: builtins.int, b: builtins.str) -> builtins.str, def (a: builtins.str, b: builtins.str) -> builtins.str)" main:20: note: Revealed type is "Overload(def (b: builtins.int) -> builtins.int, def (b: builtins.str) -> builtins.str)" main:21: note: Revealed type is "def () -> builtins.int" main:22: note: Revealed type is "def () -> builtins.str" main:23: note: Revealed type is "def (b: builtins.str) -> builtins.str" - case: partial_generic_overload_kind1 disable_cache: false main: | from typing import overload, TypeVar, List, Set from returns.curry import partial T = TypeVar('T') @overload def two_args(a: int, b: List[T]) -> T: ... @overload def two_args(a: int, b: Set[T]) -> T: ... @overload def two_args(a: List[T], b: Set[T]) -> T: ... def two_args(a, b): ... x: List[float] y: Set[float] reveal_type(partial(two_args)) reveal_type(partial(two_args, 1)) reveal_type(partial(two_args, 1, x)) reveal_type(partial(two_args, 1, y)) reveal_type(partial(two_args, x)) reveal_type(partial(two_args, x, y)) out: | main:24: note: Revealed type is "Overload(def [T] (a: builtins.int, b: builtins.list[T`-1]) -> T`-1, def [T] (a: builtins.int, b: builtins.set[T`-1]) -> T`-1, def [T] (a: builtins.list[T`-1], b: builtins.set[T`-1]) -> T`-1)" main:25: note: Revealed type is "Overload(def [T] (b: builtins.list[T`-1]) -> T`-1, def [T] (b: builtins.set[T`-1]) -> T`-1)" main:26: note: Revealed type is "def () -> builtins.float" main:27: note: Revealed type is "def () -> builtins.float" main:28: note: Revealed type is "def (b: builtins.set[builtins.float]) -> builtins.float" main:29: note: Revealed type is "def () -> builtins.float" - case: partial_generic_overload_kind2 disable_cache: false main: | from typing import overload, TypeVar, List, Union from returns.curry import partial A = TypeVar('A') B = TypeVar('B') @overload def two_args(a: int, b: List[A]) -> A: ... @overload def two_args(a: int, b: List[B]) -> B: ... @overload def two_args(a: List[A], b: List[B]) -> Union[A, B]: ... def two_args(a, b): ... a: List[float] b: List[str] reveal_type(partial(two_args)) reveal_type(partial(two_args, 1)) reveal_type(partial(two_args, 1, a)) reveal_type(partial(two_args, 1, b)) reveal_type(partial(two_args, a)) reveal_type(partial(two_args, b)) reveal_type(partial(two_args, a, b)) reveal_type(partial(two_args, b, a)) out: | main:25: note: Revealed type is "Overload(def [A] (a: builtins.int, b: builtins.list[A`-1]) -> A`-1, def [B] (a: builtins.int, b: builtins.list[B`-1]) -> B`-1, def [A, B] (a: builtins.list[A`-1], b: builtins.list[B`-2]) -> Union[A`-1, B`-2])" main:26: note: Revealed type is "Overload(def [A] (b: builtins.list[A`-1]) -> A`-1, def [B] (b: builtins.list[B`-1]) -> B`-1)" main:27: note: Revealed type is "Overload(def () -> builtins.float, def () -> builtins.float)" main:28: note: Revealed type is "Overload(def () -> builtins.str, def () -> builtins.str)" main:29: note: Revealed type is "def [B] (b: builtins.list[B`-2]) -> Union[builtins.float, B`-2]" main:30: note: Revealed type is "def [B] (b: builtins.list[B`-2]) -> Union[builtins.str, B`-2]" main:31: note: Revealed type is "def () -> Union[builtins.float, builtins.str]" main:32: note: Revealed type is "def () -> Union[builtins.str, builtins.float]" returns-0.24.0/typesafety/test_examples/000077500000000000000000000000001472312074000203515ustar00rootroot00000000000000returns-0.24.0/typesafety/test_examples/test_your_container/000077500000000000000000000000001472312074000244505ustar00rootroot00000000000000returns-0.24.0/typesafety/test_examples/test_your_container/test_pair4_def.yml000066400000000000000000000017571472312074000301010ustar00rootroot00000000000000- case: test_pair_type disable_cache: false env: # We only need this because we store this example in `tests/` # and not in our source code. Please, do not copy this line! - MYPYPATH=./tests/test_examples/test_your_container # TODO: remove this config after # mypy/typeshed/stdlib/unittest/mock.pyi:120: # error: Class cannot subclass "Any" (has type "Any") # is fixed. mypy_config: disallow_subclassing_any = False main: | # Let's import our `Pair` type we defined earlier: from test_pair4 import Pair reveal_type(Pair) def function(first: int, second: str) -> Pair[float, bool]: ... my_pair: Pair[int, str] = Pair.from_paired(1, 'a') reveal_type(my_pair.pair(function)) out: | main:4: note: Revealed type is "def [_FirstType, _SecondType] (inner_value: Tuple[_FirstType`1, _SecondType`2]) -> test_pair4.Pair[_FirstType`1, _SecondType`2]" main:10: note: Revealed type is "test_pair4.Pair[builtins.float, builtins.bool]" returns-0.24.0/typesafety/test_examples/test_your_container/test_pair4_error.yml000066400000000000000000000017461472312074000304720ustar00rootroot00000000000000- case: test_pair_error disable_cache: false env: # We only need this because we store this example in `tests/` # and not in our source code. Please, do not copy this line! - MYPYPATH=./tests/test_examples/test_your_container # TODO: remove this config after # mypy/typeshed/stdlib/unittest/mock.pyi:120: # error: Class cannot subclass "Any" (has type "Any") # is fixed. mypy_config: disallow_subclassing_any = False main: | # Let's import our `Pair` type we defined earlier: from test_pair4 import Pair # Oups! This function has first and second types swapped! def function(first: str, second: int) -> Pair[float, bool]: ... my_pair = Pair.from_paired(1, 'a') my_pair.pair(function) # this should and will error out: | main:9: error: Argument 1 to "pair" of "Pair" has incompatible type "Callable[[str, int], Pair[float, bool]]"; expected "Callable[[int, str], KindN[Pair[Any, Any], float, bool, Any]]" [arg-type] returns-0.24.0/typesafety/test_examples/test_your_container/test_pair4_reuse.yml000066400000000000000000000014751472312074000304630ustar00rootroot00000000000000- case: test_pair_map disable_cache: false env: # We only need this because we store this example in `tests/` # and not in our source code. Please, do not copy this line! - MYPYPATH=./tests/test_examples/test_your_container # TODO: remove this config after # mypy/typeshed/stdlib/unittest/mock.pyi:120: # error: Class cannot subclass "Any" (has type "Any") # is fixed. mypy_config: disallow_subclassing_any = False main: | from test_pair4 import Pair from returns.pointfree import map_ my_pair: Pair[int, int] = Pair.from_unpaired(1) reveal_type(my_pair.map(str)) reveal_type(map_(str)(my_pair)) out: | main:5: note: Revealed type is "test_pair4.Pair[builtins.str, builtins.int]" main:6: note: Revealed type is "test_pair4.Pair[builtins.str, builtins.int]" returns-0.24.0/typesafety/test_functions/000077500000000000000000000000001472312074000205435ustar00rootroot00000000000000returns-0.24.0/typesafety/test_functions/test_compose.yml000066400000000000000000000020671472312074000237770ustar00rootroot00000000000000- case: compose_two_correct_functions disable_cache: false main: | from returns.functions import compose def first(num: int) -> float: return float(num) def second(num: float) -> str: return str(num) reveal_type(compose(first, second)) # N: Revealed type is "def (builtins.int) -> builtins.str" - case: compose_two_wrong_functions main: | from returns.functions import compose def first(num: int) -> float: return float(num) def second(num: str) -> str: return str(num) reveal_type(compose(first, second)) out: | main:9: error: Cannot infer type argument 2 of "compose" [misc] main:9: note: Revealed type is "def (Any) -> Any" - case: compose_optional_functions mypy_config: no_implicit_optional = True main: | from returns.functions import compose def first(num: int = 1) -> float: return float(num) def second(num: float) -> str: return str(num) reveal_type(compose(first, second)) # N: Revealed type is "def (builtins.int) -> builtins.str" returns-0.24.0/typesafety/test_functions/test_identity.yml000066400000000000000000000002521472312074000241550ustar00rootroot00000000000000- case: identity_function disable_cache: false main: | from returns.functions import identity reveal_type(identity(1)) # N: Revealed type is "builtins.int" returns-0.24.0/typesafety/test_functions/test_not_.yml000066400000000000000000000011761472312074000232710ustar00rootroot00000000000000- case: function_with_one_argument disable_cache: false main: | from returns.functions import not_ def is_even(number: int) -> bool: return number % 2 == 0 reveal_type(not_(is_even)) # N: Revealed type is "def (number: builtins.int) -> builtins.bool" - case: function_with_two_arguments main: | from returns.functions import not_ from typing import List def number_is_in_list(number: int, list_: List[int]) -> bool: return number in list_ reveal_type(not_(number_is_in_list)) # N: Revealed type is "def (number: builtins.int, list_: builtins.list[builtins.int]) -> builtins.bool" returns-0.24.0/typesafety/test_functions/test_raise_exception.yml000066400000000000000000000002721472312074000255070ustar00rootroot00000000000000- case: raise_exception disable_cache: false main: | from returns.functions import raise_exception reveal_type(raise_exception(ValueError())) # N: Revealed type is "Never" returns-0.24.0/typesafety/test_functions/test_tap.yml000066400000000000000000000010511472312074000231060ustar00rootroot00000000000000- case: tap_single_function disable_cache: false main: | from returns.functions import tap def first(num: int) -> float: return float(num) reveal_type(tap(first)) # N: Revealed type is "def (builtins.int) -> builtins.int" - case: untap_single_function disable_cache: false main: | from returns.functions import untap def first(num: int) -> float: return float(num) reveal_type(untap(first)) # N: Revealed type is "def (builtins.int)" reveal_type(untap(first)(1)) # N: Revealed type is "None" returns-0.24.0/typesafety/test_future/000077500000000000000000000000001472312074000200455ustar00rootroot00000000000000returns-0.24.0/typesafety/test_future/test_future_container/000077500000000000000000000000001472312074000244605ustar00rootroot00000000000000returns-0.24.0/typesafety/test_future/test_future_container/test_asyncify_decorator.yml000066400000000000000000000007031472312074000321310ustar00rootroot00000000000000- case: asyncify_decorator_with_args disable_cache: false main: | from typing import Optional from returns.future import asyncify @asyncify def test( first: int, second: Optional[str] = None, *, kw: bool = True, ) -> int: return 1 reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> typing.Coroutine[Any, Any, builtins.int]" returns-0.24.0/typesafety/test_future/test_future_container/test_do.yml000066400000000000000000000027321472312074000266500ustar00rootroot00000000000000- case: do_sync_error disable_cache: false main: | from returns.future import Future Future.do( x + 1 for x in Future.from_value(1) ) out: | main:3: error: Argument 1 to "do" of "Future" has incompatible type "Generator[Any, None, None]"; expected "AsyncGenerator[Never, None]" [arg-type] main:5: error: "Future[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) [attr-defined] main:5: note: Maybe you forgot to use "await"? - case: do_types_missmatch disable_cache: false main: | from returns.future import Future, FutureSuccess Future.do( x + y async for x in FutureSuccess(1) # E: Invalid type supplied in do-notation: expected "returns.future.Future[Any]", got "returns.future.FutureResult[builtins.int, Any]" [misc] async for y in Future.from_value(2.5) ) - case: do_with_if disable_cache: false main: | from returns.future import Future Future.do( # E: Using "if" conditions inside a generator is not allowed [misc] x + y async for x in Future.from_value(1) async for y in Future.from_value(2.5) if y > 5 ) - case: do_with_var disable_cache: false main: | from returns.future import Future x = ( x + y async for x in Future.from_value(1) async for y in Future.from_value(2.5) ) Future.do(x) # E: Literal generator expression is required, not a variable or function call [misc] returns-0.24.0/typesafety/test_future/test_future_container/test_future_base.yml000066400000000000000000000037261472312074000305560ustar00rootroot00000000000000- case: future_constructor disable_cache: false main: | from returns.future import Future async def test() -> int: ... reveal_type(Future(test())) # N: Revealed type is "returns.future.Future[builtins.int]" reveal_type(Future.from_value(1)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: future_awaitable disable_cache: false main: | from returns.future import Future async def main() -> None: reveal_type(await Future.from_value(1)) # N: Revealed type is "returns.io.IO[builtins.int]" reveal_type(await Future.from_value(1).awaitable()) # N: Revealed type is "returns.io.IO[builtins.int]" - case: future_bind disable_cache: false main: | from returns.future import Future def bind_future(arg: int) -> Future[str]: ... reveal_type(Future.from_value(1).bind(bind_future)) # N: Revealed type is "returns.future.Future[builtins.str]" - case: future_bind_awaitable disable_cache: false main: | from returns.future import Future async def bind_awaitable(arg: int) -> str: ... reveal_type(Future.from_value(1).bind_awaitable(bind_awaitable)) # N: Revealed type is "returns.future.Future[builtins.str]" - case: future_bind_async disable_cache: false main: | from returns.future import Future async def bind_async(arg: int) -> Future[str]: ... reveal_type(Future.from_value(1).bind_async(bind_async)) # N: Revealed type is "returns.future.Future[builtins.str]" - case: future_map disable_cache: false main: | from returns.future import Future reveal_type(Future.from_value(1).map(str)) # N: Revealed type is "returns.future.Future[builtins.str]" - case: future_apply disable_cache: false main: | from returns.future import Future def transform(arg: int) -> str: ... reveal_type(Future.from_value(1).apply(Future.from_value(transform))) # N: Revealed type is "returns.future.Future[builtins.str]" returns-0.24.0/typesafety/test_future/test_future_container/test_future_decorator.yml000066400000000000000000000013121472312074000316130ustar00rootroot00000000000000- case: future_decorator_with_args disable_cache: false main: | from typing import Optional from returns.future import future @future async def test( first: int, second: Optional[str] = None, *, kw: bool = True, ) -> int: ... reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.future.Future[builtins.int]" - case: future_composition disable_cache: false main: | from returns.future import future async def test(first: int) -> str: ... reveal_type(future(test)) # N: Revealed type is "def (first: builtins.int) -> returns.future.Future[builtins.str]" returns-0.24.0/typesafety/test_future/test_future_container/test_future_typecast.yml000066400000000000000000000017461472312074000315000ustar00rootroot00000000000000- case: future_correct_cast disable_cache: false main: | from returns.future import Future first: Future[ValueError] second: Future[Exception] = first reveal_type(second) # N: Revealed type is "returns.future.Future[builtins.Exception]" - case: future_from_value disable_cache: false main: | from returns.future import Future reveal_type(Future.from_value(1)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: future_from_io disable_cache: false main: | from returns.future import Future from returns.io import IO reveal_type(Future.from_io(IO(1))) # N: Revealed type is "returns.future.Future[builtins.int]" - case: future_from_downcast disable_cache: false main: | from returns.future import Future, FutureResult first: FutureResult[int, ValueError] reveal_type(Future.from_future_result(first)) # N: Revealed type is "returns.future.Future[returns.result.Result[builtins.int, builtins.ValueError]]" returns-0.24.0/typesafety/test_future/test_future_result_container/000077500000000000000000000000001472312074000260565ustar00rootroot00000000000000returns-0.24.0/typesafety/test_future/test_future_result_container/test_do.yml000066400000000000000000000060401472312074000302420ustar00rootroot00000000000000- case: do_sync_error disable_cache: false main: | from returns.future import FutureResult, FutureSuccess FutureResult.do( first / 2 for first in FutureSuccess(1) ) out: | main:3: error: Argument 1 to "do" of "FutureResult" has incompatible type "Generator[Any, None, None]"; expected "AsyncGenerator[Never, None]" [arg-type] main:5: error: "FutureResult[int, Any]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) [attr-defined] main:5: note: Maybe you forgot to use "await"? - case: do_all_errors disable_cache: false main: | from returns.future import FutureResult, FutureFailure reveal_type(FutureResult.do( # N: Revealed type is "returns.future.FutureResult[Any, Union[builtins.int, builtins.str]]" first / second async for first in FutureFailure(1) async for second in FutureFailure('a') )) - case: do_no_errors disable_cache: false main: | from returns.future import FutureSuccess, FutureResult reveal_type(FutureResult.do( # N: Revealed type is "returns.future.FutureResult[builtins.float, Never]" x + y async for x in FutureSuccess(1) async for y in FutureSuccess(2.5) )) - case: do_with_errors disable_cache: false main: | from returns.future import FutureSuccess, FutureResult a: FutureResult[int, str] b: FutureResult[float, bytes] reveal_type(FutureResult.do( # N: Revealed type is "returns.future.FutureResult[builtins.float, Union[builtins.str, builtins.bytes]]" x + y async for x in a async for y in b )) - case: do_types_missmatch disable_cache: false main: | from returns.future import FutureSuccess, FutureResult, Future FutureResult.do( x + y async for x in Future.from_value(1) # E: Invalid type supplied in do-notation: expected "returns.future.FutureResult[Any, Any]", got "returns.future.Future[builtins.int]" [misc] async for y in FutureSuccess(2.5) ) - case: do_types_converted disable_cache: false main: | from returns.future import FutureSuccess, FutureResult from returns.result import Result a: Result[int, str] reveal_type(FutureResult.do( # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" x + y async for x in FutureResult.from_result(a) async for y in FutureSuccess(2.5) )) - case: do_with_if disable_cache: false main: | from returns.future import FutureSuccess, FutureResult FutureResult.do( # E: Using "if" conditions inside a generator is not allowed [misc] x + y async for x in FutureSuccess(1) async for y in FutureSuccess(2.5) if y > 5 ) - case: do_with_var disable_cache: false main: | from returns.future import FutureSuccess, FutureResult x = ( x + y async for x in FutureSuccess(1) async for y in FutureSuccess(2.5) ) FutureResult.do(x) # E: Literal generator expression is required, not a variable or function call [misc] returns-0.24.0/typesafety/test_future/test_future_result_container/test_future_result_base.yml000066400000000000000000000107221472312074000335440ustar00rootroot00000000000000- case: future_result_awaitable disable_cache: false main: | from returns.future import FutureResult async def main() -> None: reveal_type(await FutureResult.from_value(1)) # N: Revealed type is "returns.io.IOResult[builtins.int, Any]" reveal_type(await FutureResult.from_value(1).awaitable()) # N: Revealed type is "returns.io.IOResult[builtins.int, Any]" reveal_type(await FutureResult.from_failure(1)) # N: Revealed type is "returns.io.IOResult[Any, builtins.int]" reveal_type(await FutureResult.from_failure(1).awaitable()) # N: Revealed type is "returns.io.IOResult[Any, builtins.int]" - case: future_result_swap disable_cache: false main: | from returns.future import FutureResult x: FutureResult[int, str] reveal_type(x.swap()) # N: Revealed type is "returns.future.FutureResult[builtins.str, builtins.int]" - case: future_result_bind disable_cache: false main: | from returns.future import FutureResult def bind(arg: int) -> FutureResult[float, str]: ... first: FutureResult[int, str] reveal_type(first.bind(bind)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_bind_awaitable disable_cache: false main: | from returns.future import FutureResult async def bind_awaitable(arg: int) -> float: ... first: FutureResult[int, str] reveal_type(first.bind_awaitable(bind_awaitable)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_bind_async disable_cache: false main: | from returns.future import FutureResult async def bind_async(arg: int) -> FutureResult[float, str]: ... first: FutureResult[int, str] reveal_type(first.bind_async(bind_async)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_bind_result disable_cache: false main: | from returns.future import FutureResult from returns.result import Result def bind(arg: int) -> Result[float, str]: ... first: FutureResult[int, str] reveal_type(first.bind_result(bind)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_bind_ioresult disable_cache: false main: | from returns.future import FutureResult from returns.io import IOResult def bind(arg: int) -> IOResult[float, str]: ... first: FutureResult[int, str] reveal_type(first.bind_ioresult(bind)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_bind_future disable_cache: false main: | from returns.future import Future, FutureResult def bind_future(arg: int) -> Future[float]: ... first: FutureResult[int, str] reveal_type(first.bind_future(bind_future)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_bind_async_future disable_cache: false main: | from returns.future import Future, FutureResult async def bind_future(arg: int) -> Future[float]: ... first: FutureResult[int, str] reveal_type(first.bind_async_future(bind_future)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_map disable_cache: false main: | from returns.future import FutureResult first: FutureResult[int, str] reveal_type(first.map(float)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_apply disable_cache: false main: | from typing import Callable from returns.future import FutureResult first: FutureResult[int, str] second: FutureResult[Callable[[int], float], str] reveal_type(first.apply(second)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: future_result_alt disable_cache: false main: | from returns.future import FutureResult first: FutureResult[int, int] reveal_type(first.alt(float)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.float]" - case: future_result_lash disable_cache: false main: | from returns.future import FutureResult def bind(arg: str) -> FutureResult[int, float]: ... first: FutureResult[int, str] reveal_type(first.lash(bind)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.float]" returns-0.24.0/typesafety/test_future/test_future_result_container/test_future_result_typecast.yml000066400000000000000000000070211472312074000344640ustar00rootroot00000000000000- case: future_result_correct_cast disable_cache: false main: | from returns.future import FutureResult, FutureResultE first: FutureResult[int, Exception] second: FutureResultE[int] test1: FutureResultE[int] = first test2: FutureResult[int, Exception] = second reveal_type(first) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.Exception]" reveal_type(second) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.Exception]" - case: future_result_covariant_cast disable_cache: false main: | from returns.future import FutureResult first: FutureResult[TypeError, ValueError] # we cast both values second: FutureResult[Exception, Exception] = first reveal_type(second) # N: Revealed type is "returns.future.FutureResult[builtins.Exception, builtins.Exception]" - case: future_result_from_typecast disable_cache: false main: | from returns.future import Future, FutureResult from returns.result import Result first: Result[int, str] reveal_type(FutureResult.from_typecast(Future.from_value(first))) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: future_result_constructor disable_cache: false main: | from returns.future import FutureResult from returns.result import Result async def test() -> Result[int, str]: ... reveal_type(FutureResult(test())) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" reveal_type(FutureResult.from_value(1)) # N: Revealed type is "returns.future.FutureResult[builtins.int, Any]" reveal_type(FutureResult.from_failure(1)) # N: Revealed type is "returns.future.FutureResult[Any, builtins.int]" - case: future_result_unit_functions disable_cache: false main: | from returns.future import FutureSuccess, FutureFailure reveal_type(FutureSuccess(1)) # N: Revealed type is "returns.future.FutureResult[builtins.int, Any]" reveal_type(FutureFailure(1)) # N: Revealed type is "returns.future.FutureResult[Any, builtins.int]" - case: future_result_from_result disable_cache: false main: | from returns.future import FutureResult from returns.result import Result, Success, Failure reveal_type(FutureResult.from_result(Success(1))) # N: Revealed type is "returns.future.FutureResult[builtins.int, Any]" reveal_type(FutureResult.from_result(Failure(1))) # N: Revealed type is "returns.future.FutureResult[Any, builtins.int]" - case: future_result_from_io disable_cache: false main: | from returns.future import FutureResult from returns.io import IO, IOSuccess, IOFailure reveal_type(FutureResult.from_ioresult(IOSuccess(1))) # N: Revealed type is "returns.future.FutureResult[builtins.int, Any]" reveal_type(FutureResult.from_ioresult(IOFailure(1))) # N: Revealed type is "returns.future.FutureResult[Any, builtins.int]" reveal_type(FutureResult.from_io(IO(1))) # N: Revealed type is "returns.future.FutureResult[builtins.int, Any]" reveal_type(FutureResult.from_failed_io(IO(1))) # N: Revealed type is "returns.future.FutureResult[Any, builtins.int]" - case: future_result_from_future disable_cache: false main: | from returns.future import Future, FutureResult reveal_type(FutureResult.from_future(Future.from_value(1))) # N: Revealed type is "returns.future.FutureResult[builtins.int, Any]" reveal_type(FutureResult.from_failed_future(Future.from_value(1))) # N: Revealed type is "returns.future.FutureResult[Any, builtins.int]" returns-0.24.0/typesafety/test_future/test_future_result_container/test_future_safe_decorator.yml000066400000000000000000000037351472312074000342220ustar00rootroot00000000000000- case: future_safe_decorator_with_args disable_cache: false main: | from typing import Optional from returns.future import future_safe @future_safe async def test( first: int, second: Optional[str] = None, *, kw: bool = True, ) -> int: return 1 reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.future.FutureResult[builtins.int, builtins.Exception]" - case: future_safe_composition_with_args disable_cache: false main: | from typing import Optional from returns.future import future_safe async def test( first: int, second: Optional[str] = None, *, kw: bool = True, ) -> int: return 1 reveal_type(future_safe(test)) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.future.FutureResult[builtins.int, builtins.Exception]" - case: future_safe_decorator_with_pos_params disable_cache: false main: | from typing import Optional from returns.future import future_safe @future_safe((ValueError,)) async def test( first: int, second: Optional[str] = None, *, kw: bool = True, ) -> int: return 1 reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.future.FutureResult[builtins.int, builtins.ValueError]" - case: future_safe_decorator_with_named_params disable_cache: false main: | from typing import Optional from returns.future import future_safe @future_safe(exceptions=(ValueError,)) async def test( first: int, second: Optional[str] = None, *, kw: bool = True, ) -> int: return 1 reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.future.FutureResult[builtins.int, builtins.ValueError]" returns-0.24.0/typesafety/test_interfaces/000077500000000000000000000000001472312074000206565ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_altable/000077500000000000000000000000001472312074000233215ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_altable/test_inheritance.yml000066400000000000000000000100501472312074000273700ustar00rootroot00000000000000- case: altable_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.altable import Altable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') E = TypeVar('E') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, E], Altable2[V, E]): def __init__(self, value: V, error: E) -> None: self.value = value self.error = error def alt( self, function: Callable[[E], N], ) -> 'MyClass[V, N]': return MyClass(self.value, function(self.error)) def test(arg: str) -> int: ... reveal_type(MyClass(1, '1').alt(test)) # N: Revealed type is "main.MyClass[builtins.int, builtins.int]" - case: altable_inheritance_correct3 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.altable import Altable3 from returns.primitives.hkt import SupportsKind3 V = TypeVar('V') T = TypeVar('T') K = TypeVar('K') N = TypeVar('N') class MyClass(SupportsKind3['MyClass', V, T, K], Altable3[V, T, K]): def __init__(self, value: V, other: T, last: K) -> None: self.value = value self.other = other self.last = last def alt( self, function: Callable[[T], N], ) -> 'MyClass[V, N, K]': return MyClass(self.value, function(self.other), self.last) def test(arg: str) -> float: ... reveal_type(MyClass(1, 'a', True).alt(test)) # N: Revealed type is "main.MyClass[builtins.int, builtins.float, builtins.bool]" - case: altable_inheritance_missing disable_cache: false main: | from typing import TypeVar from returns.interfaces.altable import Altable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, N], Altable2[V, N]): ... MyClass() out: | main:11: error: Cannot instantiate abstract class "MyClass" with abstract attribute "alt" [abstract] - case: altable_inheritance_wrong2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.altable import Altable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') E = TypeVar('E') class MyClass(SupportsKind2['MyClass', V, E], Altable2[V, E]): def alt( self, function: Callable[[V], N], ) -> 'MyClass[N, E]': ... out: | main:10: error: Return type "MyClass[N, E]" of "alt" incompatible with return type "KindN[MyClass[V, E], V, _UpdatedType, Never]" in supertype "AltableN" [override] main:12: error: Argument 1 of "alt" is incompatible with supertype "AltableN"; supertype defines the argument type as "Callable[[E], _UpdatedType]" [override] main:12: note: This violates the Liskov substitution principle main:12: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - case: altable_inheritance_wrong3 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.altable import Altable3 from returns.primitives.hkt import SupportsKind3 V = TypeVar('V') N = TypeVar('N') E = TypeVar('E') T = TypeVar('T') class MyClass(SupportsKind3['MyClass', V, E, T], Altable3[V, E, T]): def alt( self, function: Callable[[V], N], ) -> 'MyClass[N, E, T]': ... out: | main:11: error: Return type "MyClass[N, E, T]" of "alt" incompatible with return type "KindN[MyClass[V, E, T], V, _UpdatedType, T]" in supertype "AltableN" [override] main:13: error: Argument 1 of "alt" is incompatible with supertype "AltableN"; supertype defines the argument type as "Callable[[E], _UpdatedType]" [override] main:13: note: This violates the Liskov substitution principle main:13: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_applicative/000077500000000000000000000000001472312074000242165ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_applicative/test_inheritance.yml000066400000000000000000000073131472312074000302750ustar00rootroot00000000000000- case: applicative_inheritance_correct1 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.applicative import Applicative1 from returns.primitives.hkt import Kind1, SupportsKind1, dekind V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind1['MyClass', V], Applicative1[V]): def apply( self, container: Kind1['MyClass', Callable[[V], N]], ) -> 'MyClass[N]': ... @classmethod def from_value(cls, inner_value: N) -> MyClass[N]: ... - case: applicative_inheritance_correct2 disable_cache: false main: | from typing import Any, Callable, TypeVar from returns.interfaces.applicative import Applicative2 from returns.primitives.hkt import Kind2, SupportsKind2, dekind V = TypeVar('V') E = TypeVar('E') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, E], Applicative2[V, E]): def apply( self, container: Kind2['MyClass', Callable[[V], N], E], ) -> 'MyClass[N, E]': ... @classmethod def from_value(cls, inner_value: N) -> MyClass[N, Any]: ... - case: applicative_inheritance_correct3 disable_cache: false main: | from typing import Any, Callable, TypeVar from returns.interfaces.applicative import Applicative3 from returns.primitives.hkt import Kind3, SupportsKind3, dekind V = TypeVar('V') E = TypeVar('E') T = TypeVar('T') N = TypeVar('N') class MyClass(SupportsKind3['MyClass', V, E, T], Applicative3[V, E, T]): def apply( self, container: Kind3['MyClass', Callable[[V], N], E, T], ) -> 'MyClass[N, E, T]': ... @classmethod def from_value(cls, inner_value: N) -> MyClass[N, Any, Any]: ... - case: applicative_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.applicative import Applicative1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') @final class MyClass(SupportsKind1['MyClass', V], Applicative1[V]): ... out: | main:8: error: Final class main.MyClass has abstract attributes "apply", "from_value", "map" [misc] - case: applicative_inheritance_wrong disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.applicative import Applicative1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind1['MyClass', V], Applicative1[V]): def apply( self, container: 'MyClass[Callable[[V], N]]', ) -> 'MyClass[N]': ... @classmethod def from_value(cls, inner_value: V) -> None: ... out: | main:11: error: Argument 1 of "apply" is incompatible with supertype "ApplicativeN"; supertype defines the argument type as "KindN[MyClass[V], Callable[[V], _UpdatedType], Never, Never]" [override] main:11: note: This violates the Liskov substitution principle main:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:16: error: Return type "None" of "from_value" incompatible with return type "KindN[MyClass[V], _UpdatedType, Never, Never]" in supertype "ApplicativeN" [override] main:16: error: Argument 1 of "from_value" is incompatible with supertype "ApplicativeN"; supertype defines the argument type as "_UpdatedType" [override] main:16: note: This violates the Liskov substitution principle main:16: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_bimappable/000077500000000000000000000000001472312074000240115ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_bimappable/test_bimappable_inheritance.yml000066400000000000000000000007101472312074000322360ustar00rootroot00000000000000- case: bimappable_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.bimappable import BiMappable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') T = TypeVar('T') @final class MyClass(SupportsKind2['MyClass', V, T], BiMappable2[V, T]): ... out: | main:9: error: Final class main.MyClass has abstract attributes "alt", "map" [misc] returns-0.24.0/typesafety/test_interfaces/test_bindable/000077500000000000000000000000001472312074000234555ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_bindable/test_inheritance.yml000066400000000000000000000137121472312074000275340ustar00rootroot00000000000000- case: bindable_inheritance_correct1 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.bindable import Bindable1 from returns.primitives.hkt import Kind1, SupportsKind1, dekind V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind1['MyClass', V], Bindable1[V]): def __init__(self, value: V) -> None: self.value = value def bind( self, function: Callable[[V], Kind1['MyClass', N]], ) -> 'MyClass[N]': return dekind(function(self.value)) def test(arg: str) -> MyClass[int]: ... reveal_type(MyClass('1').bind(test)) # N: Revealed type is "main.MyClass[builtins.int]" - case: bindable_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.bindable import Bindable2 from returns.primitives.hkt import Kind2, SupportsKind2, dekind V = TypeVar('V') T = TypeVar('T') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, T], Bindable2[V, T]): def __init__(self, value: V, other: T) -> None: self.value = value self.other = other def bind( self, function: Callable[[V], Kind2['MyClass', N, T]], ) -> 'MyClass[N, T]': return dekind(function(self.value)) def test(arg: str) -> MyClass[int, str]: ... reveal_type(MyClass('1', 'a').bind(test)) # N: Revealed type is "main.MyClass[builtins.int, builtins.str]" - case: bindable_inheritance_correct3 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.bindable import Bindable3 from returns.primitives.hkt import Kind3, SupportsKind3, dekind V = TypeVar('V') T = TypeVar('T') K = TypeVar('K') N = TypeVar('N') class MyClass(SupportsKind3['MyClass', V, T, K], Bindable3[V, T, K]): def __init__(self, value: V, other: T, last: K) -> None: self.value = value self.other = other self.last = last def bind( self, function: Callable[[V], Kind3['MyClass', N, T, K]], ) -> 'MyClass[N, T, K]': return dekind(function(self.value)) def test(arg: str) -> MyClass[int, str, bool]: ... reveal_type(MyClass('1', 'a', True).bind(test)) # N: Revealed type is "main.MyClass[builtins.int, builtins.str, builtins.bool]" - case: bindable_inheritance_missing disable_cache: false main: | from typing import TypeVar from returns.interfaces.bindable import Bindable1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') class MyClass(SupportsKind1['MyClass', V], Bindable1[V]): ... MyClass() out: | main:10: error: Cannot instantiate abstract class "MyClass" with abstract attribute "bind" [abstract] - case: bindable_inheritance_wrong1 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.bindable import Bindable1 from returns.primitives.hkt import Kind1, SupportsKind1 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind1['MyClass', V], Bindable1[V]): def bind( self, function: Callable[[V], Kind1['MyClass', V]], ) -> 'MyClass[V]': ... out: | main:9: error: Return type "MyClass[V]" of "bind" incompatible with return type "KindN[MyClass[V], _UpdatedType, Never, Never]" in supertype "BindableN" [override] main:11: error: Argument 1 of "bind" is incompatible with supertype "BindableN"; supertype defines the argument type as "Callable[[V], KindN[MyClass[V], _UpdatedType, Never, Never]]" [override] main:11: note: This violates the Liskov substitution principle main:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - case: bindable_inheritance_wrong2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.bindable import Bindable2 from returns.primitives.hkt import Kind2, SupportsKind2 V = TypeVar('V') T = TypeVar('T') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, T], Bindable2[V, T]): def bind( self, function: Callable[[V], Kind2['MyClass', V, T]], ) -> 'MyClass[V, T]': ... out: | main:10: error: Return type "MyClass[V, T]" of "bind" incompatible with return type "KindN[MyClass[V, T], _UpdatedType, T, Never]" in supertype "BindableN" [override] main:12: error: Argument 1 of "bind" is incompatible with supertype "BindableN"; supertype defines the argument type as "Callable[[V], KindN[MyClass[V, T], _UpdatedType, T, Never]]" [override] main:12: note: This violates the Liskov substitution principle main:12: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - case: bindable_inheritance_wrong3 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.bindable import Bindable3 from returns.primitives.hkt import Kind3, SupportsKind3 V = TypeVar('V') T = TypeVar('T') E = TypeVar('E') N = TypeVar('N') class MyClass(SupportsKind3['MyClass', V, T, E], Bindable3[V, T, E]): def bind( self, function: Callable[[V], Kind3['MyClass', V, T, N]], ) -> 'MyClass[V, T, E]': ... out: | main:11: error: Return type "MyClass[V, T, E]" of "bind" incompatible with return type "KindN[MyClass[V, T, E], _UpdatedType, T, E]" in supertype "BindableN" [override] main:13: error: Argument 1 of "bind" is incompatible with supertype "BindableN"; supertype defines the argument type as "Callable[[V], KindN[MyClass[V, T, E], _UpdatedType, T, E]]" [override] main:13: note: This violates the Liskov substitution principle main:13: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_container/000077500000000000000000000000001472312074000236775ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_container/test_inheritance.yml000066400000000000000000000007321472312074000277540ustar00rootroot00000000000000- case: container_inheritance_missing disable_cache: false main: | from typing import TypeVar from returns.interfaces.container import Container1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') class MyClass(SupportsKind1['MyClass', V], Container1[V]): ... MyClass() out: | main:10: error: Cannot instantiate abstract class "MyClass" with abstract attributes "apply", "bind", "from_value" and "map" [abstract] returns-0.24.0/typesafety/test_interfaces/test_equality/000077500000000000000000000000001472312074000235525ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_equality/test_inheritance.yml000066400000000000000000000051151472312074000276270ustar00rootroot00000000000000- case: equable_correct_inheritance1 disable_cache: false main: | from returns.primitives.hkt import SupportsKind1 from returns.interfaces.equable import Equable from returns.primitives.container import container_equality from typing import TypeVar T = TypeVar('T') class MyOwn( SupportsKind1['MyOwn', T], Equable, ): def __init__(self, value: T) -> None: ... equals = container_equality reveal_type(MyOwn(1).equals(MyOwn(1))) # N: Revealed type is "builtins.bool" reveal_type(MyOwn(1).equals(MyOwn('a'))) # N: Revealed type is "builtins.bool" MyOwn(1).equals(1) # E: Argument 1 has incompatible type "int"; expected "KindN[MyOwn[Any], Any, Any, Any]" [arg-type] - case: equable_correct_inheritance2 disable_cache: false main: | from returns.primitives.hkt import SupportsKind1 from returns.interfaces.equable import Equable from typing import TypeVar T = TypeVar('T') V = TypeVar('V') class MyOwn( SupportsKind1['MyOwn', T], Equable, ): def __init__(self, value: T) -> None: ... def equals(self, other: MyOwn[V]) -> bool: ... reveal_type(MyOwn(1).equals(MyOwn(1))) # N: Revealed type is "builtins.bool" reveal_type(MyOwn(1).equals(MyOwn('a'))) # N: Revealed type is "builtins.bool" - case: equable_inheritance_missing disable_cache: false main: | from returns.primitives.hkt import SupportsKind1 from returns.interfaces.equable import Equable from typing import TypeVar, final T = TypeVar('T') @final class MyOwn( SupportsKind1['MyOwn', T], Equable, ): ... out: | main:8: error: Final class main.MyOwn has abstract attributes "equals" [misc] - case: equable_inheritance_wrong disable_cache: false main: | from returns.primitives.hkt import SupportsKind1 from returns.interfaces.equable import Equable from typing import TypeVar, final T = TypeVar('T') @final class MyOwn( SupportsKind1['MyOwn', T], Equable, ): def equals(self, other: bool) -> T: ... out: | main:12: error: Return type "T" of "equals" incompatible with return type "bool" in supertype "Equable" [override] main:12: error: Argument 1 of "equals" is incompatible with supertype "Equable"; supertype defines the argument type as "MyOwn[T]" [override] main:12: note: This violates the Liskov substitution principle main:12: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_failable/000077500000000000000000000000001472312074000234545ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_failable/test_diverse_failable.yml000066400000000000000000000112171472312074000305200ustar00rootroot00000000000000- case: diverse_failable_inheritance_correct2 disable_cache: false main: | from typing import TypeVar from returns.interfaces.failable import DiverseFailable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _UpdatedType = TypeVar('_UpdatedType') class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], DiverseFailable2[_FirstType, _SecondType], ): @classmethod def from_failure( cls, inner_value: _UpdatedType ) -> MyClass[_FirstType, _UpdatedType]: ... x: MyClass[str, int] reveal_type(MyClass.from_failure(10)) # N: Revealed type is "main.MyClass[Never, builtins.int]" - case: diverse_failable_inheritance_correct3 disable_cache: false main: | from typing import TypeVar from returns.interfaces.failable import DiverseFailable3 from returns.primitives.hkt import SupportsKind3 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') _UpdatedType = TypeVar('_UpdatedType') class MyClass( SupportsKind3['MyClass', _FirstType, _SecondType, _ThirdType], DiverseFailable3[_FirstType, _SecondType, _ThirdType], ): @classmethod def from_failure( cls, inner_value: _UpdatedType ) -> MyClass[_FirstType, _UpdatedType, _ThirdType]: ... x: MyClass[float, bool, str] reveal_type(MyClass.from_failure(10)) # N: Revealed type is "main.MyClass[Never, builtins.int, Never]" - case: diverse_failable_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.failable import DiverseFailable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') @final class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], DiverseFailable2[_FirstType, _SecondType], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "alt", "apply", "bind", "from_failure", "from_value", "lash", "map", "swap" [misc] - case: diverse_failable_inheritance_wrong2 disable_cache: false main: | from typing import TypeVar from returns.interfaces.failable import DiverseFailable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], DiverseFailable2[_FirstType, _SecondType], ): @classmethod def from_failure( cls, inner_value: _FirstType ) -> MyClass[_FirstType, _FirstType]: ... out: | main:13: error: Return type "MyClass[_FirstType, _FirstType]" of "from_failure" incompatible with return type "KindN[MyClass[_FirstType, _SecondType], _FirstType, _UpdatedType, Never]" in supertype "DiverseFailableN" [override] main:15: error: Argument 1 of "from_failure" is incompatible with supertype "DiverseFailableN"; supertype defines the argument type as "_UpdatedType" [override] main:15: note: This violates the Liskov substitution principle main:15: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - case: diverse_failable_inheritance_wrong3 disable_cache: false main: | from typing import TypeVar from returns.interfaces.failable import DiverseFailable3 from returns.primitives.hkt import SupportsKind3 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') class MyClass( SupportsKind3['MyClass', _FirstType, _SecondType, _ThirdType], DiverseFailable3[_FirstType, _SecondType, _ThirdType], ): @classmethod def from_failure( cls, inner_value: _SecondType ) -> MyClass[_FirstType, _FirstType, _FirstType]: ... out: | main:14: error: Return type "MyClass[_FirstType, _FirstType, _FirstType]" of "from_failure" incompatible with return type "KindN[MyClass[_FirstType, _SecondType, _ThirdType], _FirstType, _UpdatedType, _ThirdType]" in supertype "DiverseFailableN" [override] main:16: error: Argument 1 of "from_failure" is incompatible with supertype "DiverseFailableN"; supertype defines the argument type as "_UpdatedType" [override] main:16: note: This violates the Liskov substitution principle main:16: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_failable/test_failable.yml000066400000000000000000000027461472312074000270060ustar00rootroot00000000000000- case: failable_inheritance_correct2 disable_cache: false main: | from typing import TypeVar from returns.interfaces.failable import Failable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], Failable2[_FirstType, _SecondType], ): ... - case: failable_inheritance_correct3 disable_cache: false main: | from typing import TypeVar from returns.interfaces.failable import Failable3 from returns.primitives.hkt import SupportsKind3 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') class MyClass( SupportsKind3['MyClass', _FirstType, _SecondType, _ThirdType], Failable3[_FirstType, _SecondType, _ThirdType], ): ... - case: failable_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.failable import Failable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') @final class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], Failable2[_FirstType, _SecondType], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "apply", "bind", "from_value", "lash", "map" [misc] returns-0.24.0/typesafety/test_interfaces/test_failable/test_single_failable.yml000066400000000000000000000030501472312074000303340ustar00rootroot00000000000000- case: single_failable_inheritance_correct2 disable_cache: false main: | from typing import TypeVar from returns.interfaces.failable import SingleFailable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], SingleFailable2[_FirstType, _SecondType], ): ... - case: single_failable_inheretance_correct3 disable_cache: false main: | from typing import TypeVar from returns.interfaces.failable import SingleFailable3 from returns.primitives.hkt import SupportsKind3 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _ThirdType = TypeVar('_ThirdType') class MyClass( SupportsKind3['MyClass', _FirstType, _SecondType, _ThirdType], SingleFailable3[_FirstType, _SecondType, _ThirdType], ): ... - case: single_failable_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.failable import SingleFailable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') @final class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], SingleFailable2[_FirstType, _SecondType], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "apply", "bind", "empty", "from_value", "lash", "map" [misc] returns-0.24.0/typesafety/test_interfaces/test_lashable/000077500000000000000000000000001472312074000234705ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_lashable/test_inheritance.yml000066400000000000000000000104111472312074000275400ustar00rootroot00000000000000- case: lashable_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.lashable import Lashable2 from returns.primitives.hkt import SupportsKind2, Kind2, dekind V = TypeVar('V') E = TypeVar('E') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, E], Lashable2[V, E]): def __init__(self, value: V, error: E) -> None: self.value = value self.error = error def lash( self, function: Callable[[E], Kind2['MyClass', V, N]], ) -> 'MyClass[V, N]': return dekind(function(self.error)) def test(arg: str) -> MyClass[int, int]: ... reveal_type(MyClass(1, '1').lash(test)) # N: Revealed type is "main.MyClass[builtins.int, builtins.int]" - case: lashable_inheritance_correct3 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.lashable import Lashable3 from returns.primitives.hkt import SupportsKind3, Kind3, dekind V = TypeVar('V') T = TypeVar('T') K = TypeVar('K') N = TypeVar('N') class MyClass(SupportsKind3['MyClass', V, T, K], Lashable3[V, T, K]): def __init__(self, value: V, other: T, last: K) -> None: self.value = value self.other = other self.last = last def lash( self, function: Callable[[T], Kind3['MyClass', V, N, K]], ) -> 'MyClass[V, N, K]': return dekind(function(self.other)) def test(arg: str) -> MyClass[int, float, bool]: ... reveal_type(MyClass(1, 'a', True).lash(test)) # N: Revealed type is "main.MyClass[builtins.int, builtins.float, builtins.bool]" - case: lashable_inheritance_missing disable_cache: false main: | from typing import TypeVar from returns.interfaces.lashable import Lashable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, N], Lashable2[V, N]): ... MyClass() out: | main:11: error: Cannot instantiate abstract class "MyClass" with abstract attribute "lash" [abstract] - case: lashable_inheritance_wrong2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.lashable import Lashable2 from returns.primitives.hkt import SupportsKind2, Kind2 V = TypeVar('V') N = TypeVar('N') E = TypeVar('E') class MyClass(SupportsKind2['MyClass', V, E], Lashable2[V, E]): def lash( self, function: Callable[[E], Kind2['MyClass', V, V]], ) -> MyClass[N, E]: ... out: | main:10: error: Return type "MyClass[N, E]" of "lash" incompatible with return type "KindN[MyClass[V, E], V, _UpdatedType, Never]" in supertype "LashableN" [override] main:12: error: Argument 1 of "lash" is incompatible with supertype "LashableN"; supertype defines the argument type as "Callable[[E], KindN[MyClass[V, E], V, _UpdatedType, Never]]" [override] main:12: note: This violates the Liskov substitution principle main:12: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - case: lashable_inheritance_wrong3 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.lashable import Lashable3 from returns.primitives.hkt import SupportsKind3, Kind3 V = TypeVar('V') N = TypeVar('N') E = TypeVar('E') T = TypeVar('T') class MyClass(SupportsKind3['MyClass', V, E, T], Lashable3[V, E, T]): def lash( self, function: Callable[[E], Kind3['MyClass', V, V, V]], ) -> MyClass[N, E, T]: ... out: | main:11: error: Return type "MyClass[N, E, T]" of "lash" incompatible with return type "KindN[MyClass[V, E, T], V, _UpdatedType, T]" in supertype "LashableN" [override] main:13: error: Argument 1 of "lash" is incompatible with supertype "LashableN"; supertype defines the argument type as "Callable[[E], KindN[MyClass[V, E, T], V, _UpdatedType, T]]" [override] main:13: note: This violates the Liskov substitution principle main:13: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_mappable/000077500000000000000000000000001472312074000234765ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_mappable/test_inheritance.yml000066400000000000000000000124671472312074000275630ustar00rootroot00000000000000- case: mappable_inheritance_correct1 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.mappable import Mappable1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind1['MyClass', V], Mappable1[V]): def __init__(self, value: V) -> None: self.value = value def map(self, function: Callable[[V], N]) -> 'MyClass[N]': return MyClass(function(self.value)) reveal_type(MyClass('1').map(int)) # N: Revealed type is "main.MyClass[builtins.int]" - case: mappable_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.mappable import Mappable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') E = TypeVar('E') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, E], Mappable2[V, E]): def __init__(self, value: V, error: E) -> None: self.value = value self.error = error def map(self, function: Callable[[V], N]) -> 'MyClass[N, E]': return MyClass(function(self.value), self.error) reveal_type(MyClass('1', 1).map(int)) # N: Revealed type is "main.MyClass[builtins.int, builtins.int]" - case: mappable_inheritance_correct3 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.mappable import Mappable3 from returns.primitives.hkt import SupportsKind3 V = TypeVar('V') E = TypeVar('E') K = TypeVar('K') N = TypeVar('N') class MyClass(SupportsKind3['MyClass', V, E, K], Mappable3[V, E, K]): def __init__(self, value: V, error: E, last: K) -> None: self.value = value self.error = error self.last = last def map(self, function: Callable[[V], N]) -> 'MyClass[N, E, K]': return MyClass(function(self.value), self.error, self.last) reveal_type(MyClass('1', 1, True).map(int)) # N: Revealed type is "main.MyClass[builtins.int, builtins.int, builtins.bool]" - case: mappable_inheritance_missing disable_cache: false main: | from typing import TypeVar from returns.interfaces.mappable import Mappable1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') class MyClass(SupportsKind1['MyClass', V], Mappable1[V]): ... MyClass() out: | main:10: error: Cannot instantiate abstract class "MyClass" with abstract attribute "map" [abstract] - case: mappable_inheritance_wrong1 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.mappable import Mappable1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind1['MyClass', V], Mappable1[V]): def map(self, function: Callable[[V], int]) -> 'MyClass[V]': ... out: | main:9: error: Return type "MyClass[V]" of "map" incompatible with return type "KindN[MyClass[V], _UpdatedType, Never, Never]" in supertype "MappableN" [override] main:9: error: Argument 1 of "map" is incompatible with supertype "MappableN"; supertype defines the argument type as "Callable[[V], _UpdatedType]" [override] main:9: note: This violates the Liskov substitution principle main:9: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - case: mappable_inheritance_wrong2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.mappable import Mappable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') E = TypeVar('E') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, E], Mappable2[V, E]): def map(self, function: Callable[[E], N]) -> 'MyClass[E, N]': ... out: | main:10: error: Return type "MyClass[E, N]" of "map" incompatible with return type "KindN[MyClass[V, E], _UpdatedType, E, Never]" in supertype "MappableN" [override] main:10: error: Argument 1 of "map" is incompatible with supertype "MappableN"; supertype defines the argument type as "Callable[[V], _UpdatedType]" [override] main:10: note: This violates the Liskov substitution principle main:10: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - case: mappable_inheritance_wrong3 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.mappable import Mappable3 from returns.primitives.hkt import SupportsKind3 V = TypeVar('V') E = TypeVar('E') T = TypeVar('T') N = TypeVar('N') class MyClass(SupportsKind3['MyClass', V, E, T], Mappable3[V, E, T]): def map(self, function: Callable[[E], N]) -> 'MyClass[E, N, T]': ... out: | main:11: error: Return type "MyClass[E, N, T]" of "map" incompatible with return type "KindN[MyClass[V, E, T], _UpdatedType, E, T]" in supertype "MappableN" [override] main:11: error: Argument 1 of "map" is incompatible with supertype "MappableN"; supertype defines the argument type as "Callable[[V], _UpdatedType]" [override] main:11: note: This violates the Liskov substitution principle main:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/000077500000000000000000000000001472312074000235025ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_future/000077500000000000000000000000001472312074000260535ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_future/test_futurebased_inheritance.yml000066400000000000000000000110321472312074000345140ustar00rootroot00000000000000- case: future_inheritance_correct1 disable_cache: false main: | from typing import Callable, TypeVar, Generator, Awaitable from returns.interfaces.specific.future import FutureBased1 from returns.primitives.hkt import SupportsKind1, Kind1 from returns.io import IO from returns.future import Future T = TypeVar('T') N = TypeVar('N') class MyClass( SupportsKind1['MyClass', T], FutureBased1[T], ): def __await__(self) -> Generator[None, None, IO[T]]: ... async def awaitable(self) -> IO[T]: ... def bind_future( self, function: Callable[[T], Future[N]], ) -> MyClass[N]: ... def bind_async( self, function: Callable[[T], Awaitable[Kind1['MyClass', N]]], ) -> MyClass[N]: ... def bind_async_future( self, function: Callable[[T], Awaitable[Future[N]]], ) -> MyClass[N]: ... @classmethod def from_future( cls, inner_value: Future[N], ) -> MyClass[N]: ... def test1(arg: int) -> Future[float]: ... async def test2(arg: float) -> Future[bool]: ... async def test3(arg: bool) -> MyClass[str]: ... x: Future[int] reveal_type(MyClass.from_future(x).bind_future(test1).bind_async_future(test2).bind_async(test3)) # N: Revealed type is "main.MyClass[builtins.str]" - case: future_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar, Generator, Any, Awaitable from returns.interfaces.specific.future import FutureBased2 from returns.primitives.hkt import SupportsKind2, Kind2 from returns.io import IOResult from returns.future import Future T = TypeVar('T') E = TypeVar('E') N = TypeVar('N') class MyClass( SupportsKind2['MyClass', T, E], FutureBased2[T, E], ): def __await__(self) -> Generator[None, None, IOResult[T, E]]: ... async def awaitable(self) -> IOResult[T, E]: ... def bind_future( self, function: Callable[[T], Future[N]], ) -> MyClass[N, E]: ... def bind_async( self, function: Callable[[T], Awaitable[Kind2['MyClass', N, E]]], ) -> MyClass[N, E]: ... def bind_async_future( self, function: Callable[[T], Awaitable[Future[N]]], ) -> MyClass[N, E]: ... @classmethod def from_future( cls, inner_value: Future[N], ) -> MyClass[N, Any]: ... def test1(arg: int) -> Future[float]: ... async def test2(arg: float) -> Future[bool]: ... async def test3(arg: bool) -> MyClass[str, bool]: ... x: MyClass[int, bool] reveal_type(x.bind_future(test1).bind_async_future(test2).bind_async(test3)) # N: Revealed type is "main.MyClass[builtins.str, builtins.bool]" - case: future_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.future import FutureBased1 from returns.primitives.hkt import SupportsKind1 T = TypeVar('T') @final class MyClass( SupportsKind1['MyClass', T], FutureBased1[T], ): ... out: | main:8: error: Final class main.MyClass has abstract attributes "__await__", "apply", "awaitable", "bind", "bind_async", "bind_async_future", "bind_awaitable", "bind_future", "bind_io", "from_future", "from_io", "from_value", "map" [misc] - case: future_inheritance_wrong disable_cache: false main: | from typing import TypeVar, Generator from returns.interfaces.specific.future import FutureBased1 from returns.primitives.hkt import SupportsKind1 T = TypeVar('T') N = TypeVar('N') class MyClass( SupportsKind1['MyClass', T], FutureBased1[T], ): def __await__(self) -> Generator[None, None, T]: ... async def awaitable(self) -> None: ... out: | main:12: error: Return type "Generator[None, None, T]" of "__await__" incompatible with return type "Generator[Any, Any, IOLikeN[T, Never, Never]]" in supertype "AwaitableFutureN" [override] main:15: error: Return type "Coroutine[Any, Any, None]" of "awaitable" incompatible with return type "Coroutine[Any, Any, IOLikeN[T, Never, Never]]" in supertype "AwaitableFutureN" [override] returns-0.24.0/typesafety/test_interfaces/test_specific/test_future/test_futurelike_inheritance.yml000066400000000000000000000155251472312074000343750ustar00rootroot00000000000000- case: future_inheritance_correct1 disable_cache: false main: | from typing import Callable, TypeVar, Generator, Awaitable from returns.interfaces.specific.future import FutureLike1 from returns.primitives.hkt import SupportsKind1, Kind1 from returns.io import IO from returns.future import Future T = TypeVar('T') N = TypeVar('N') class MyClass( SupportsKind1['MyClass', T], FutureLike1[T], ): def bind_future( self, function: Callable[[T], Future[N]], ) -> MyClass[N]: ... def bind_async( self, function: Callable[[T], Awaitable[Kind1['MyClass', N]]], ) -> MyClass[N]: ... def bind_async_future( self, function: Callable[[T], Awaitable[Future[N]]], ) -> MyClass[N]: ... def bind_awaitable( self, function: Callable[[T], Awaitable[N]], ) -> MyClass[N]: ... @classmethod def from_future( cls, inner_value: Future[N], ) -> MyClass[N]: ... def test1(arg: int) -> Future[float]: ... async def test2(arg: float) -> Future[bool]: ... async def test3(arg: bool) -> MyClass[str]: ... x: Future[int] reveal_type(MyClass.from_future(x).bind_future(test1).bind_async_future(test2).bind_async(test3)) # N: Revealed type is "main.MyClass[builtins.str]" - case: future_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar, Generator, Any, Awaitable from returns.interfaces.specific.future import FutureLike2 from returns.primitives.hkt import SupportsKind2, Kind2 from returns.io import IOResult from returns.future import Future T = TypeVar('T') E = TypeVar('E') N = TypeVar('N') class MyClass( SupportsKind2['MyClass', T, E], FutureLike2[T, E], ): def bind_future( self, function: Callable[[T], Future[N]], ) -> MyClass[N, E]: ... def bind_async( self, function: Callable[[T], Awaitable[Kind2['MyClass', N, E]]], ) -> MyClass[N, E]: ... def bind_async_future( self, function: Callable[[T], Awaitable[Future[N]]], ) -> MyClass[N, E]: ... def bind_awaitable( self, function: Callable[[T], Awaitable[N]], ) -> MyClass[N, E]: ... @classmethod def from_future( cls, inner_value: Future[N], ) -> MyClass[N, Any]: ... def test1(arg: int) -> Future[float]: ... async def test2(arg: float) -> Future[bool]: ... async def test3(arg: bool) -> MyClass[str, bool]: ... x: MyClass[int, bool] reveal_type(x.bind_future(test1).bind_async_future(test2).bind_async(test3)) # N: Revealed type is "main.MyClass[builtins.str, builtins.bool]" - case: future_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.future import FutureLike1 from returns.primitives.hkt import SupportsKind1 T = TypeVar('T') @final class MyClass( SupportsKind1['MyClass', T], FutureLike1[T], ): ... out: | main:8: error: Final class main.MyClass has abstract attributes "apply", "bind", "bind_async", "bind_async_future", "bind_awaitable", "bind_future", "bind_io", "from_future", "from_io", "from_value", "map" [misc] - case: future_inheritance_wrong disable_cache: false main: | from typing import Callable, TypeVar, Generator, Awaitable from returns.interfaces.specific.future import FutureLike1 from returns.primitives.hkt import SupportsKind1, Kind1 from returns.future import Future T = TypeVar('T') N = TypeVar('N') class MyClass( SupportsKind1['MyClass', T], FutureLike1[T], ): def bind_future( self, function: Callable[[T], Future[N]], ) -> MyClass[T]: ... def bind_async( self, function: Callable[[T], Awaitable[Kind1['MyClass', T]]], ) -> MyClass[T]: ... def bind_awaitable( self, function: Callable[[T], Awaitable[int]], ) -> MyClass[N]: ... def bind_async_future( self, function: Callable[[T], Awaitable[Future[T]]], ) -> MyClass[T]: ... def from_future( cls, inner_value: Future[T], ) -> MyClass[T]: ... out: | main:13: error: Return type "MyClass[T]" of "bind_future" incompatible with return type "KindN[MyClass[T], _UpdatedType, Never, Never]" in supertype "FutureLikeN" [override] main:19: error: Return type "MyClass[T]" of "bind_async" incompatible with return type "KindN[MyClass[T], _UpdatedType, Never, Never]" in supertype "FutureLikeN" [override] main:21: error: Argument 1 of "bind_async" is incompatible with supertype "FutureLikeN"; supertype defines the argument type as "Callable[[T], Awaitable[KindN[MyClass[T], _UpdatedType, Never, Never]]]" [override] main:21: note: This violates the Liskov substitution principle main:21: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:27: error: Argument 1 of "bind_awaitable" is incompatible with supertype "FutureLikeN"; supertype defines the argument type as "Callable[[T], Awaitable[_UpdatedType]]" [override] main:27: note: This violates the Liskov substitution principle main:27: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:31: error: Return type "MyClass[T]" of "bind_async_future" incompatible with return type "KindN[MyClass[T], _UpdatedType, Never, Never]" in supertype "FutureLikeN" [override] main:33: error: Argument 1 of "bind_async_future" is incompatible with supertype "FutureLikeN"; supertype defines the argument type as "Callable[[T], Awaitable[Future[_UpdatedType]]]" [override] main:33: note: This violates the Liskov substitution principle main:33: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:37: error: Return type "MyClass[T]" of "from_future" incompatible with return type "KindN[MyClass[T], _UpdatedType, Never, Never]" in supertype "FutureLikeN" [override] main:39: error: Argument 1 of "from_future" is incompatible with supertype "FutureLikeN"; supertype defines the argument type as "Future[_UpdatedType]" [override] main:39: note: This violates the Liskov substitution principle main:39: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_future_result/000077500000000000000000000000001472312074000274515ustar00rootroot00000000000000test_future_result_based.yml000066400000000000000000000065431472312074000352320ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_future_result- case: future_result_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar, Any, Awaitable, Generator from returns.interfaces.specific.future_result import FutureResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.future import FutureResult, Future from returns.io import IOResult T = TypeVar('T') E = TypeVar('E') N = TypeVar('N') I = TypeVar('I') class MyClass( SupportsKind2['MyClass', T, E], FutureResultBased2[T, E], ): def __await__(self) -> Generator[None, None, IOResult[T, E]]: ... async def awaitable(self) -> IOResult[T, E]: ... def bind_future_result( self, function: Callable[[T], FutureResult[N, E]], ) -> MyClass[N, E]: ... def bind_async_future_result( self, function: Callable[[T], Awaitable[FutureResult[N, E]]], ) -> MyClass[N, E]: ... @classmethod def from_future_result( cls, inner_value: FutureResult[N, I], ) -> MyClass[N, I]: ... @classmethod def from_failed_future( cls, inner_value: Future[I], ) -> MyClass[Any, I]: ... def test(a: int) -> FutureResult[float, str]: ... x: MyClass[int, str] reveal_type(x.bind_future_result(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" - case: future_result_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.future_result import FutureResultBased2 from returns.primitives.hkt import SupportsKind2 T = TypeVar('T') E = TypeVar('E') @final class MyClass( SupportsKind2['MyClass', T, E], FutureResultBased2[T, E], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "__await__", "alt", "apply", "awaitable", "bind", "bind_async", "bind_async_future", "bind_async_future_result", "bind_awaitable", "bind_future", "bind_future_result", "bind_io", "bind_ioresult", "bind_result", "compose_result", "from_failed_future", "from_failed_io", "from_failure", "from_future", "from_io", "from_ioresult", "from_result", "from_value", "lash", "map", "swap" [misc] - case: future_result_inheritance_wrong2 disable_cache: false main: | from typing import TypeVar, Generator from returns.interfaces.specific.future_result import FutureResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.io import IOResult from returns.result import Result T = TypeVar('T') E = TypeVar('E') N = TypeVar('N') class MyClass( SupportsKind2['MyClass', T, E], FutureResultBased2[T, E], ): def __await__(self) -> Generator[None, None, Result[T, E]]: ... async def awaitable(self) -> IOResult[int, E]: ... out: | main:15: error: Return type "Generator[None, None, Result[T, E]]" of "__await__" incompatible with return type "Generator[Any, Any, IOLikeN[T, E, Never]]" in supertype "AwaitableFutureN" [override] main:18: error: Return type "Coroutine[Any, Any, IOResult[int, E]]" of "awaitable" incompatible with return type "Coroutine[Any, Any, IOLikeN[T, E, Never]]" in supertype "AwaitableFutureN" [override] test_future_result_like.yml000066400000000000000000000126741472312074000351020ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_future_result- case: future_result_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar, Any, Awaitable, Generator from returns.interfaces.specific.future_result import FutureResultLike2 from returns.primitives.hkt import SupportsKind2 from returns.future import FutureResult, Future T = TypeVar('T') E = TypeVar('E') N = TypeVar('N') I = TypeVar('I') class MyClass( SupportsKind2['MyClass', T, E], FutureResultLike2[T, E], ): def bind_future_result( self, function: Callable[[T], FutureResult[N, E]], ) -> MyClass[N, E]: ... def bind_async_future_result( self, function: Callable[[T], Awaitable[FutureResult[N, E]]], ) -> MyClass[N, E]: ... @classmethod def from_future_result( cls, inner_value: FutureResult[N, I], ) -> MyClass[N, I]: ... @classmethod def from_failed_future( cls, inner_value: Future[I], ) -> MyClass[Any, I]: ... def test(a: int) -> FutureResult[float, str]: ... x: MyClass[int, str] reveal_type(x.bind_future_result(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" - case: future_result_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.future_result import FutureResultLike2 from returns.primitives.hkt import SupportsKind2 T = TypeVar('T') E = TypeVar('E') @final class MyClass( SupportsKind2['MyClass', T, E], FutureResultLike2[T, E], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "alt", "apply", "bind", "bind_async", "bind_async_future", "bind_async_future_result", "bind_awaitable", "bind_future", "bind_future_result", "bind_io", "bind_ioresult", "bind_result", "compose_result", "from_failed_future", "from_failed_io", "from_failure", "from_future", "from_io", "from_ioresult", "from_result", "from_value", "lash", "map", "swap" [misc] - case: future_result_inheritance_wrong2 disable_cache: false main: | from typing import Callable, TypeVar, Generator, Awaitable, Any from returns.interfaces.specific.future_result import FutureResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.io import IOResult from returns.result import Result from returns.future import Future, FutureResult T = TypeVar('T') E = TypeVar('E') N = TypeVar('N') class MyClass( SupportsKind2['MyClass', T, E], FutureResultBased2[T, E], ): def bind_future_result( self, function: Callable[[T], Awaitable[FutureResult[N, E]]], ) -> MyClass[E, N]: ... def bind_async_future_result( self, function: Callable[[T], FutureResult[N, E]], ) -> MyClass[E, N]: ... def from_future_result( cls, inner_value: FutureResult[T, E], ) -> MyClass[T, E]: ... @classmethod def from_failed_future( cls, inner_value: Future[E], ) -> FutureResult[Any, E]: ... out: | main:16: error: Return type "MyClass[E, N]" of "bind_future_result" incompatible with return type "KindN[MyClass[T, E], _UpdatedType, E, Never]" in supertype "FutureResultLikeN" [override] main:17: error: Argument 1 of "bind_future_result" is incompatible with supertype "FutureResultLikeN"; supertype defines the argument type as "Callable[[T], FutureResult[_UpdatedType, E]]" [override] main:17: note: This violates the Liskov substitution principle main:17: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:21: error: Return type "MyClass[E, N]" of "bind_async_future_result" incompatible with return type "KindN[MyClass[T, E], _UpdatedType, E, Never]" in supertype "FutureResultLikeN" [override] main:22: error: Argument 1 of "bind_async_future_result" is incompatible with supertype "FutureResultLikeN"; supertype defines the argument type as "Callable[[T], Awaitable[FutureResult[_UpdatedType, E]]]" [override] main:22: note: This violates the Liskov substitution principle main:22: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:26: error: Return type "MyClass[T, E]" of "from_future_result" incompatible with return type "KindN[MyClass[T, E], _ValueType, _ErrorType, Never]" in supertype "FutureResultLikeN" [override] main:27: error: Argument 1 of "from_future_result" is incompatible with supertype "FutureResultLikeN"; supertype defines the argument type as "FutureResult[_ValueType, _ErrorType]" [override] main:27: note: This violates the Liskov substitution principle main:27: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:32: error: Return type "FutureResult[Any, E]" of "from_failed_future" incompatible with return type "KindN[MyClass[T, E], T, _ErrorType, Never]" in supertype "FutureResultLikeN" [override] main:33: error: Argument 1 of "from_failed_future" is incompatible with supertype "FutureResultLikeN"; supertype defines the argument type as "Future[_ErrorType]" [override] main:33: note: This violates the Liskov substitution principle main:33: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_io/000077500000000000000000000000001472312074000251505ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_io/test_io_based.yml000066400000000000000000000010311472312074000304720ustar00rootroot00000000000000- case: io_based_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.io import IOBased1 from returns.primitives.hkt import SupportsKind1 _ValueType = TypeVar('_ValueType') @final class MyClass( SupportsKind1['MyClass', _ValueType], IOBased1[_ValueType], ): ... out: | main:8: error: Final class main.MyClass has abstract attributes "apply", "bind", "bind_io", "equals", "from_io", "from_value", "map" [misc] returns-0.24.0/typesafety/test_interfaces/test_specific/test_io/test_io_like.yml000066400000000000000000000060441472312074000303510ustar00rootroot00000000000000- case: io_inheritance_correct disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.specific.io import IOLike1 from returns.primitives.hkt import SupportsKind1 from returns.io import IO _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') class MyClass( SupportsKind1['MyClass', _ValueType], IOLike1[_ValueType], ): def bind_io( self, function: Callable[[_ValueType], IO[_NewValueType]], ) -> MyClass[_NewValueType]: ... @classmethod def from_io( self, inner_value: IO[_NewValueType], ) -> MyClass[_NewValueType]: ... def test(a: int) -> IO[float]: ... x: IO[int] reveal_type(MyClass.from_io(x).bind_io(test)) # N: Revealed type is "main.MyClass[builtins.float]" - case: io_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.io import IOLike1 from returns.primitives.hkt import SupportsKind1 _ValueType = TypeVar('_ValueType') @final class MyClass( SupportsKind1['MyClass', _ValueType], IOLike1[_ValueType], ): ... out: | main:8: error: Final class main.MyClass has abstract attributes "apply", "bind", "bind_io", "from_io", "from_value", "map" [misc] - case: io_inheritance_wrong disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.specific.io import IOLike1 from returns.primitives.hkt import SupportsKind1 from returns.io import IO _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') class MyClass( SupportsKind1['MyClass', _ValueType], IOLike1[_ValueType], ): def bind_io( self, function: Callable[[_ValueType], IO[_ValueType]], ) -> MyClass[_ValueType]: ... def from_io( self, inner_value: IO[_NewValueType], ) -> MyClass[_NewValueType]: ... out: | main:13: error: Return type "MyClass[_ValueType]" of "bind_io" incompatible with return type "KindN[MyClass[_ValueType], _UpdatedType, Never, Never]" in supertype "IOLikeN" [override] main:15: error: Argument 1 of "bind_io" is incompatible with supertype "IOLikeN"; supertype defines the argument type as "Callable[[_ValueType], IO[_UpdatedType]]" [override] main:15: note: This violates the Liskov substitution principle main:15: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:19: error: Signature of "from_io" incompatible with supertype "IOLikeN" [override] main:19: note: Superclass: main:19: note: def [_UpdatedType] from_io(cls, inner_value: IO[_UpdatedType]) -> KindN[MyClass[_ValueType], _UpdatedType, Never, Never] main:19: note: Subclass: main:19: note: def [_NewValueType] from_io(self, inner_value: IO[_NewValueType]) -> MyClass[_NewValueType] returns-0.24.0/typesafety/test_interfaces/test_specific/test_ioresult/000077500000000000000000000000001472312074000264075ustar00rootroot00000000000000test_ioresultbased_inheritance.yml000066400000000000000000000217531472312074000353400ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_ioresult- case: ioresult_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.io import IO, IOResult _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultBased2[_ValueType, _ErrorType], ): def bind_ioresult( self, function: Callable[ [_ValueType], IOResult[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType]: ... def unwrap(self) -> IO[_ValueType]: ... def failure(self) -> IO[_ErrorType]: ... @classmethod def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[Any, _NewErrorType]: ... def test(a: int) -> IOResult[float, str]: ... x: IOResult[int, str] reveal_type(MyClass.from_ioresult(x).bind_ioresult(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" - case: ioresult_inheritance_correct3 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultBased3 from returns.primitives.hkt import SupportsKind3 from returns.io import IO, IOResult _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') _T = TypeVar('_T') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _T], IOResultBased3[_ValueType, _ErrorType, _T], ): def bind_ioresult( self, function: Callable[ [_ValueType], IOResult[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType, _T]: ... def unwrap(self) -> IO[_ValueType]: ... def failure(self) -> IO[_ErrorType]: ... @classmethod def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType, Any]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[Any, _NewErrorType, Any]: ... def test(a: int) -> IOResult[float, str]: ... x: IOResult[int, str] reveal_type(MyClass.from_ioresult(x).bind_ioresult(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str, Any]" - case: ioresult_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.ioresult import IOResultBased2 from returns.primitives.hkt import SupportsKind2 _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') @final class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultBased2[_ValueType, _ErrorType], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "alt", "apply", "bind", "bind_io", "bind_ioresult", "bind_result", "compose_result", "equals", "failure", "from_failed_io", "from_failure", "from_io", "from_ioresult", "from_result", "from_value", "lash", "map", "swap", "unwrap" [misc] - case: ioresult_inheritance_wrong2 disable_cache: false # TODO: unskip, mypy fails sometimes on different python versions skip: True main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.io import IO, IOResult from returns.result import Result _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultBased2[_ValueType, _ErrorType], ): def bind_ioresult( self, function: Callable[ [_ValueType], Result[_NewValueType, _ErrorType], ], ) -> int: ... def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[_NewErrorType, Any]: ... out: | main:18: error: Argument 1 of "bind_ioresult" is incompatible with supertype "IOResultLikeN"; supertype defines the argument type as "Callable[[_ValueType], IOResult[_UpdatedType, _ErrorType]]" main:18: note: This violates the Liskov substitution principle main:18: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:18: error: Return type "int" of "bind_ioresult" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType], _UpdatedType, _ErrorType, Never]" in supertype "IOResultLikeN" main:27: error: Signature of "from_ioresult" incompatible with supertype "IOResultLikeN" main:27: note: Superclass: main:27: note: def [_ValueType, _ErrorType] from_ioresult(cls, inner_value: IOResult[_ValueType, _ErrorType]) -> KindN[MyClass[_ValueType, _ErrorType], _ValueType, _ErrorType, Never] main:27: note: Subclass: main:27: note: def [_NewValueType, _NewErrorType] from_ioresult(self, inner_value: IOResult[_NewValueType, _NewErrorType]) -> MyClass[_NewValueType, _NewErrorType] main:33: error: Signature of "from_failed_io" incompatible with supertype "IOResultLikeN" main:33: note: Superclass: main:33: note: def [_ErrorType] from_failed_io(cls, inner_value: IO[_ErrorType]) -> KindN[MyClass[_ValueType, _ErrorType], _ValueType, _ErrorType, Never] main:33: note: Subclass: main:33: note: @classmethod main:33: note: def [_NewErrorType] from_failed_io(cls, inner_value: IO[_NewErrorType]) -> MyClass[_NewErrorType, Any] - case: ioresult_inheritance_wrong3 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultBased3 from returns.primitives.hkt import SupportsKind3 from returns.io import IO, IOResult _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') _T = TypeVar('_T') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _T], IOResultBased3[_ValueType, _ErrorType, _T], ): def bind_ioresult( self, function: Callable[ [_ValueType], IOResult[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType, int]: ... def unwrap(self) -> IO[_ErrorType]: ... def failure(self) -> IO[_ValueType]: ... @classmethod def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType, str]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[Any, _NewErrorType, Exception]: ... out: | main:18: error: Return type "MyClass[_NewValueType, _ErrorType, int]" of "bind_ioresult" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType, _T], _UpdatedType, _ErrorType, _T]" in supertype "IOResultLikeN" [override] main:27: error: Return type "IO[_ErrorType]" of "unwrap" incompatible with return type "IO[_ValueType]" in supertype "Unwrappable" [override] main:30: error: Return type "IO[_ValueType]" of "failure" incompatible with return type "IO[_ErrorType]" in supertype "Unwrappable" [override] main:34: error: Return type "MyClass[_NewValueType, _NewErrorType, str]" of "from_ioresult" incompatible with return type "KindN[MyClass[_ValueType@MyClass, _ErrorType@MyClass, _T], _ValueType@from_ioresult, _ErrorType@from_ioresult, _T]" in supertype "IOResultLikeN" [override] main:40: error: Return type "MyClass[Any, _NewErrorType, Exception]" of "from_failed_io" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType@MyClass, _T], _ValueType, _ErrorType@from_failed_io, _T]" in supertype "IOResultLikeN" [override] test_ioresultlike_inheritance.yml000066400000000000000000000204511472312074000352000ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_ioresult- case: ioresult_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultLike2 from returns.primitives.hkt import SupportsKind2 from returns.io import IO, IOResult _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultLike2[_ValueType, _ErrorType], ): def bind_ioresult( self, function: Callable[ [_ValueType], IOResult[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType]: ... @classmethod def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[Any, _NewErrorType]: ... def test(a: int) -> IOResult[float, str]: ... x: IOResult[int, str] reveal_type(MyClass.from_ioresult(x).bind_ioresult(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" - case: ioresult_inheritance_correct3 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultLike3 from returns.primitives.hkt import SupportsKind3 from returns.io import IO, IOResult _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') _T = TypeVar('_T') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _T], IOResultLike3[_ValueType, _ErrorType, _T], ): def bind_ioresult( self, function: Callable[ [_ValueType], IOResult[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType, _T]: ... @classmethod def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType, Any]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[Any, _NewErrorType, Any]: ... def test(a: int) -> IOResult[float, str]: ... x: IOResult[int, str] reveal_type(MyClass.from_ioresult(x).bind_ioresult(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str, Any]" - case: ioresult_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.ioresult import IOResultLike2 from returns.primitives.hkt import SupportsKind2 _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') @final class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultLike2[_ValueType, _ErrorType], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "alt", "apply", "bind", "bind_io", "bind_ioresult", "bind_result", "compose_result", "from_failed_io", "from_failure", "from_io", "from_ioresult", "from_result", "from_value", "lash", "map", "swap" [misc] - case: ioresult_inheritance_wrong # TODO: unskip, mypy fails sometimes on different python versions skip: True disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultLike2 from returns.primitives.hkt import SupportsKind2 from returns.io import IO, IOResult from returns.result import Result _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultLike2[_ValueType, _ErrorType], ): def bind_ioresult( self, function: Callable[ [_ValueType], Result[_NewValueType, _ErrorType], ], ) -> int: ... def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[_NewErrorType, Any]: ... out: | main:18: error: Argument 1 of "bind_ioresult" is incompatible with supertype "IOResultLikeN"; supertype defines the argument type as "Callable[[_ValueType], IOResult[_UpdatedType, _ErrorType]]" main:18: note: This violates the Liskov substitution principle main:18: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:18: error: Return type "int" of "bind_ioresult" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType], _UpdatedType, _ErrorType, Never]" in supertype "IOResultLikeN" main:27: error: Signature of "from_ioresult" incompatible with supertype "IOResultLikeN" main:27: note: Superclass: main:27: note: def [_ValueType, _ErrorType] from_ioresult(cls, inner_value: IOResult[_ValueType, _ErrorType]) -> KindN[MyClass[_ValueType, _ErrorType], _ValueType, _ErrorType, Never] main:27: note: Subclass: main:27: note: def [_NewValueType, _NewErrorType] from_ioresult(self, inner_value: IOResult[_NewValueType, _NewErrorType]) -> MyClass[_NewValueType, _NewErrorType] main:33: error: Signature of "from_failed_io" incompatible with supertype "IOResultLikeN" main:33: note: Superclass: main:33: note: def [_ErrorType] from_failed_io(cls, inner_value: IO[_ErrorType]) -> KindN[MyClass[_ValueType, _ErrorType], _ValueType, _ErrorType, Never] main:33: note: Subclass: main:33: note: @classmethod main:33: note: def [_NewErrorType] from_failed_io(cls, inner_value: IO[_NewErrorType]) -> MyClass[_NewErrorType, Any] - case: ioresult_inheritance_wrong3 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultLike3 from returns.primitives.hkt import SupportsKind3 from returns.io import IO, IOResult _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') _T = TypeVar('_T') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _T], IOResultLike3[_ValueType, _ErrorType, _T], ): def bind_ioresult( self, function: Callable[ [_ValueType], IOResult[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType, int]: ... @classmethod def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType, str]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[Any, _NewErrorType, Exception]: ... out: | main:18: error: Return type "MyClass[_NewValueType, _ErrorType, int]" of "bind_ioresult" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType, _T], _UpdatedType, _ErrorType, _T]" in supertype "IOResultLikeN" [override] main:28: error: Return type "MyClass[_NewValueType, _NewErrorType, str]" of "from_ioresult" incompatible with return type "KindN[MyClass[_ValueType@MyClass, _ErrorType@MyClass, _T], _ValueType@from_ioresult, _ErrorType@from_ioresult, _T]" in supertype "IOResultLikeN" [override] main:34: error: Return type "MyClass[Any, _NewErrorType, Exception]" of "from_failed_io" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType@MyClass, _T], _ValueType, _ErrorType@from_failed_io, _T]" in supertype "IOResultLikeN" [override] returns-0.24.0/typesafety/test_interfaces/test_specific/test_maybe/000077500000000000000000000000001472312074000256365ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_maybe/test_maybe_based.yml000066400000000000000000000037721472312074000316640ustar00rootroot00000000000000- case: maybe_based_inheritance_correct disable_cache: false main: | from typing import TypeVar, Callable, Union from returns.interfaces.specific.maybe import MaybeBased2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, None], MaybeBased2[V, None]): def or_else_call( self, function: Callable[[], N], ) -> Union[V, N]: ... - case: maybe_based_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.maybe import MaybeBased2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') @final class MyClass(SupportsKind2['MyClass', V, None], MaybeBased2[V, None]): ... out: | main:8: error: Final class main.MyClass has abstract attributes "apply", "bind", "bind_optional", "empty", "equals", "failure", "from_optional", "from_value", "lash", "map", "or_else_call", "unwrap" [misc] - case: maybe_based_inheritance_wrong disable_cache: false main: | from typing import TypeVar, Callable, Union from returns.interfaces.specific.maybe import MaybeBased2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, None], MaybeBased2[V, None]): def or_else_call( self, function: Callable[[], V], ) -> Union[None, V]: ... out: | main:9: error: Return type "Optional[V]" of "or_else_call" incompatible with return type "Union[V, _ValueType]" in supertype "MaybeBasedN" [override] main:11: error: Argument 1 of "or_else_call" is incompatible with supertype "MaybeBasedN"; supertype defines the argument type as "Callable[[], _ValueType]" [override] main:11: note: This violates the Liskov substitution principle main:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_maybe/test_maybe_like.yml000066400000000000000000000044121472312074000315220ustar00rootroot00000000000000- case: maybe_like_inheritance_correct disable_cache: false main: | from typing import TypeVar, Callable, Optional from returns.interfaces.specific.maybe import MaybeLike2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, None], MaybeLike2[V, None]): def bind_optional( self, function: Callable[[V], Optional[N]], ) -> MyClass[N]: ... @classmethod def from_optional(cls, inner_value: Optional[N]) -> MyClass[N]: ... - case: maybe_like_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.maybe import MaybeLike2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') @final class MyClass(SupportsKind2['MyClass', V, None], MaybeLike2[V, None]): ... out: | main:8: error: Final class main.MyClass has abstract attributes "apply", "bind", "bind_optional", "empty", "from_optional", "from_value", "lash", "map" [misc] - case: maybe_like_inheritance_wrong disable_cache: false main: | from typing import TypeVar, Callable, Optional from returns.interfaces.specific.maybe import MaybeLike2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, None], MaybeLike2[V, None]): def bind_optional( self, function: Callable[[V], N], ) -> MyClass[Optional[N]]: ... def from_optional(self, inner_value: Optional[N]) -> MyClass[N]: ... out: | main:9: error: Return type "MyClass[Optional[N]]" of "bind_optional" incompatible with return type "KindN[MyClass[V], _UpdatedType, None, Never]" in supertype "MaybeLikeN" [override] main:15: error: Signature of "from_optional" incompatible with supertype "MaybeLikeN" [override] main:15: note: Superclass: main:15: note: def [_ValueType] from_optional(cls, inner_value: Optional[_ValueType]) -> KindN[MyClass[V], _ValueType, None, Never] main:15: note: Subclass: main:15: note: def [N] from_optional(self, inner_value: Optional[N]) -> MyClass[N] returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader/000077500000000000000000000000001472312074000260035ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader/test_reader_based2.yml000066400000000000000000000131041472312074000322460ustar00rootroot00000000000000- case: reader_inheritance_correct disable_cache: false main: | from typing import Callable, TypeVar, ClassVar from returns.interfaces.specific.reader import ReaderBased2 from returns.primitives.hkt import SupportsKind2 from returns.context import Reader, NoDeps _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') _ReturnType = TypeVar('_ReturnType', covariant=True) _NewReturnType = TypeVar('_NewReturnType') class MyClass( # type: ignore[type-var] SupportsKind2['MyClass', _ReturnType, _EnvType], ReaderBased2[_ReturnType, _EnvType], ): no_args: ClassVar[NoDeps] = object() def __call__(self, deps: _EnvType) -> _ReturnType: ... def bind_context( self, function: Callable[ [_ReturnType], Reader[_NewReturnType, _EnvType], ], ) -> MyClass[_NewReturnType, _EnvType]: ... def modify_env( self, function: Callable[[_NewEnvType], _EnvType], ) -> MyClass[_ReturnType, _NewEnvType]: ... @classmethod def ask(cls) -> MyClass[_EnvType, _EnvType]: ... @classmethod def from_context( self, inner_value: Reader[_NewReturnType, _NewEnvType], ) -> MyClass[_NewReturnType, _NewEnvType]: ... def test(a: int) -> Reader[float, str]: ... x: Reader[int, str] reveal_type(MyClass.from_context(x).bind_context(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" - case: reader_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader import ReaderBased2 from returns.primitives.hkt import SupportsKind2 _EnvType = TypeVar('_EnvType') _ReturnType = TypeVar('_ReturnType') @final class MyClass( SupportsKind2['MyClass', _ReturnType, _EnvType], ReaderBased2[_ReturnType, _EnvType], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "__call__", "apply", "ask", "bind", "bind_context", "from_context", "from_value", "map", "modify_env", "no_args" [misc] - case: reader_inheritance_wrong disable_cache: false main: | from typing import Callable, TypeVar, ClassVar from returns.interfaces.specific.reader import ReaderBased2 from returns.primitives.hkt import SupportsKind2 from returns.context import Reader, NoDeps _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') _ReturnType = TypeVar('_ReturnType', covariant=True) _NewReturnType = TypeVar('_NewReturnType') class MyClass( # type: ignore[type-var] SupportsKind2['MyClass', _ReturnType, _EnvType], ReaderBased2[_ReturnType, _EnvType], ): def __call__(self, deps: _ReturnType) -> _EnvType: ... def bind_context( self, function: Callable[ [_ReturnType], Reader[_NewReturnType, _EnvType], ], ) -> MyClass[_ReturnType, _EnvType]: ... def modify_env( self, function: Callable[[_EnvType], _NewEnvType], ) -> MyClass[_ReturnType, _NewEnvType]: ... @classmethod def ask(cls) -> MyClass[_ReturnType, _ReturnType]: ... @classmethod def from_context( self, inner_value: Reader[_ReturnType, _EnvType], ) -> MyClass[_ReturnType, _EnvType]: ... out: | main:15: error: Return type "_EnvType" of "__call__" incompatible with return type "_ReturnType" in supertype "Contextable" [override] main:15: error: Argument 1 of "__call__" is incompatible with supertype "Contextable"; supertype defines the argument type as "_EnvType" [override] main:15: note: This violates the Liskov substitution principle main:15: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:15: error: Cannot use a covariant type variable as a parameter [misc] main:15: error: Cannot use a contravariant type variable as return type [misc] main:18: error: Return type "MyClass[_ReturnType, _EnvType]" of "bind_context" incompatible with return type "KindN[MyClass[_ReturnType, _EnvType], _UpdatedType, _EnvType, Any]" in supertype "ReaderLike2" [override] main:29: error: Argument 1 of "modify_env" is incompatible with supertype "ReaderLike2"; supertype defines the argument type as "Callable[[_UpdatedType], _EnvType]" [override] main:29: note: This violates the Liskov substitution principle main:29: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:34: error: Return type "MyClass[_ReturnType, _ReturnType]" of "ask" incompatible with return type "KindN[MyClass[_ReturnType, _EnvType], _EnvType, _EnvType, Any]" in supertype "ReaderLike2" [override] main:38: error: Return type "MyClass[_ReturnType, _EnvType@MyClass]" of "from_context" incompatible with return type "KindN[MyClass[_ReturnType, _EnvType@MyClass], _ValueType, _EnvType@from_context, Any]" in supertype "ReaderLike2" [override] main:39: error: Argument 1 of "from_context" is incompatible with supertype "ReaderLike2"; supertype defines the argument type as "RequiresContext[_ValueType, _EnvType]" [override] main:39: note: This violates the Liskov substitution principle main:39: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader/test_reader_like2.yml000066400000000000000000000114471472312074000321240ustar00rootroot00000000000000- case: reader_inheritance_correct disable_cache: false main: | from typing import Callable, TypeVar, ClassVar from returns.interfaces.specific.reader import ReaderLike2 from returns.primitives.hkt import SupportsKind2 from returns.context import Reader, NoDeps _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') _ReturnType = TypeVar('_ReturnType', covariant=True) _NewReturnType = TypeVar('_NewReturnType') class MyClass( # type: ignore[type-var] SupportsKind2['MyClass', _ReturnType, _EnvType], ReaderLike2[_ReturnType, _EnvType], ): no_args: ClassVar[NoDeps] = object() def bind_context( self, function: Callable[ [_ReturnType], Reader[_NewReturnType, _EnvType], ], ) -> MyClass[_NewReturnType, _EnvType]: ... def modify_env( self, function: Callable[[_NewEnvType], _EnvType], ) -> MyClass[_ReturnType, _NewEnvType]: ... @classmethod def ask(cls) -> MyClass[_EnvType, _EnvType]: ... @classmethod def from_context( self, inner_value: Reader[_NewReturnType, _NewEnvType], ) -> MyClass[_NewReturnType, _NewEnvType]: ... def test(a: int) -> Reader[float, str]: ... x: Reader[int, str] reveal_type(MyClass.from_context(x).bind_context(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" - case: reader_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader import ReaderLike2 from returns.primitives.hkt import SupportsKind2 _EnvType = TypeVar('_EnvType') _ReturnType = TypeVar('_ReturnType') @final class MyClass( SupportsKind2['MyClass', _ReturnType, _EnvType], ReaderLike2[_ReturnType, _EnvType], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "apply", "ask", "bind", "bind_context", "from_context", "from_value", "map", "modify_env", "no_args" [misc] - case: reader_inheritance_wrong disable_cache: false main: | from typing import Callable, TypeVar, ClassVar from returns.interfaces.specific.reader import ReaderLike2 from returns.primitives.hkt import SupportsKind2 from returns.context import Reader, NoDeps _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') _ReturnType = TypeVar('_ReturnType', covariant=True) _NewReturnType = TypeVar('_NewReturnType') class MyClass( # type: ignore[type-var] SupportsKind2['MyClass', _ReturnType, _EnvType], ReaderLike2[_ReturnType, _EnvType], ): def bind_context( self, function: Callable[ [_ReturnType], Reader[_NewReturnType, _EnvType], ], ) -> MyClass[_ReturnType, _EnvType]: ... def modify_env( self, function: Callable[[_EnvType], _NewEnvType], ) -> MyClass[_ReturnType, _NewEnvType]: ... @classmethod def ask(cls) -> MyClass[_ReturnType, _ReturnType]: ... @classmethod def from_context( self, inner_value: Reader[_ReturnType, _EnvType], ) -> MyClass[_ReturnType, _EnvType]: ... out: | main:15: error: Return type "MyClass[_ReturnType, _EnvType]" of "bind_context" incompatible with return type "KindN[MyClass[_ReturnType, _EnvType], _UpdatedType, _EnvType, Any]" in supertype "ReaderLike2" [override] main:26: error: Argument 1 of "modify_env" is incompatible with supertype "ReaderLike2"; supertype defines the argument type as "Callable[[_UpdatedType], _EnvType]" [override] main:26: note: This violates the Liskov substitution principle main:26: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:31: error: Return type "MyClass[_ReturnType, _ReturnType]" of "ask" incompatible with return type "KindN[MyClass[_ReturnType, _EnvType], _EnvType, _EnvType, Any]" in supertype "ReaderLike2" [override] main:35: error: Return type "MyClass[_ReturnType, _EnvType@MyClass]" of "from_context" incompatible with return type "KindN[MyClass[_ReturnType, _EnvType@MyClass], _ValueType, _EnvType@from_context, Any]" in supertype "ReaderLike2" [override] main:36: error: Argument 1 of "from_context" is incompatible with supertype "ReaderLike2"; supertype defines the argument type as "RequiresContext[_ValueType, _EnvType]" [override] main:36: note: This violates the Liskov substitution principle main:36: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader/test_reader_like3.yml000066400000000000000000000134621472312074000321240ustar00rootroot00000000000000- case: reader_inheritance_correct disable_cache: false main: | from typing import Callable, TypeVar, ClassVar, Union, Any from returns.interfaces.specific.reader import ReaderLike3 from returns.primitives.hkt import SupportsKind3 from returns.context import Reader, NoDeps _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType', covariant=True) class MyClass( # type: ignore[type-var] SupportsKind3['MyClass', _ValueType, _ErrorType, _EnvType], ReaderLike3[_ValueType, _ErrorType, _EnvType], ): no_args: ClassVar[NoDeps] = object() def __call__(self, deps: _EnvType) -> Union[_ValueType, _ErrorType]: ... def bind_context( self, function: Callable[ [_ValueType], Reader[_NewValueType, _EnvType], ], ) -> MyClass[_NewValueType, _ErrorType, _EnvType]: ... def modify_env( self, function: Callable[[_NewEnvType], _EnvType], ) -> 'MyClass[_ValueType, _ErrorType, _NewEnvType]': ... @classmethod def ask(cls) -> 'MyClass[_EnvType, _ErrorType, _EnvType]': ... @classmethod def from_context( self, inner_value: Reader[_NewValueType, _NewEnvType], ) -> MyClass[_NewValueType, Any, _NewEnvType]: ... def test(a: int) -> Reader[float, str]: ... x: Reader[int, str] reveal_type(MyClass.from_context(x).bind_context(test)) # N: Revealed type is "main.MyClass[builtins.float, Any, builtins.str]" - case: reader_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader import ReaderLike3 from returns.primitives.hkt import SupportsKind3 _EnvType = TypeVar('_EnvType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') @final class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _EnvType], ReaderLike3[_ValueType, _ErrorType, _EnvType], ): ... out: | main:10: error: Final class main.MyClass has abstract attributes "apply", "ask", "bind", "bind_context", "from_context", "from_value", "map", "modify_env", "no_args" [misc] - case: reader_inheritance_wrong disable_cache: false main: | from typing import Callable, TypeVar, ClassVar, Any from returns.interfaces.specific.reader import ReaderLike3 from returns.primitives.hkt import SupportsKind3 from returns.context import Reader _EnvType = TypeVar('_EnvType', contravariant=True) _NewEnvType = TypeVar('_NewEnvType') _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _ErrorType = TypeVar('_ErrorType', covariant=True) class MyClass( # type: ignore[type-var] SupportsKind3['MyClass', _ValueType, _ErrorType, _EnvType], ReaderLike3[_ValueType, _ErrorType, _EnvType], ): def __call__(self, deps: _ValueType) -> Any: ... def bind_context( self, function: Callable[ [_ValueType], Reader[_ValueType, _EnvType], ], ) -> MyClass[_ValueType, _ErrorType, _EnvType]: ... def modify_env( self, function: Callable[[_EnvType], _NewEnvType], ) -> 'MyClass[_ValueType, _ErrorType, _NewEnvType]': ... @classmethod def ask(cls) -> 'MyClass[_ValueType, _ErrorType, _EnvType]': ... @classmethod def from_context( self, inner_value: Reader[_ValueType, _EnvType], ) -> MyClass[_ValueType, Exception, _EnvType]: ... out: | main:16: error: Cannot use a covariant type variable as a parameter [misc] main:19: error: Return type "MyClass[_ValueType, _ErrorType, _EnvType]" of "bind_context" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType, _EnvType], _UpdatedType, _ErrorType, _EnvType]" in supertype "ReaderLike3" [override] main:21: error: Argument 1 of "bind_context" is incompatible with supertype "ReaderLike3"; supertype defines the argument type as "Callable[[_ValueType], RequiresContext[_UpdatedType, _EnvType]]" [override] main:21: note: This violates the Liskov substitution principle main:21: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:30: error: Argument 1 of "modify_env" is incompatible with supertype "ReaderLike3"; supertype defines the argument type as "Callable[[_UpdatedType], _EnvType]" [override] main:30: note: This violates the Liskov substitution principle main:30: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:35: error: Return type "MyClass[_ValueType, _ErrorType, _EnvType]" of "ask" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType, _EnvType], _EnvType, _ErrorType, _EnvType]" in supertype "ReaderLike3" [override] main:39: error: Return type "MyClass[_ValueType@MyClass, Exception, _EnvType@MyClass]" of "from_context" incompatible with return type "KindN[MyClass[_ValueType@MyClass, _ErrorType, _EnvType@MyClass], _ValueType@from_context, _ErrorType, _EnvType@from_context]" in supertype "ReaderLike3" [override] main:40: error: Argument 1 of "from_context" is incompatible with supertype "ReaderLike3"; supertype defines the argument type as "RequiresContext[_ValueType, _EnvType]" [override] main:40: note: This violates the Liskov substitution principle main:40: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_future_result/000077500000000000000000000000001472312074000307735ustar00rootroot00000000000000test_reader_future_result_based.yml000066400000000000000000000054421472312074000400730ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_future_result- case: reader_future_result_based_inheritance_correct disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.reader_future_result import ( ReaderFutureResultBased3, ) from returns.primitives.hkt import SupportsKind3 from returns.future import FutureResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderFutureResultBased3[T, V, U], ): def __call__(self, deps: U) -> FutureResult[T, V]: ... - case: reader_future_result_based_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader_future_result import ( ReaderFutureResultBased3, ) from returns.primitives.hkt import SupportsKind3 T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') @final class MyClass( SupportsKind3['MyClass', T, V, U], ReaderFutureResultBased3[T, V, U], ): ... out: | main:12: error: Final class main.MyClass has abstract attributes "__call__", "alt", "apply", "ask", "bind", "bind_async", "bind_async_context_future_result", "bind_async_future", "bind_async_future_result", "bind_awaitable", "bind_context", "bind_context_future_result", "bind_context_ioresult", "bind_context_result", "bind_future", "bind_future_result", "bind_io", "bind_ioresult", "bind_result", "compose_result", "from_context", "from_failed_context", "from_failed_future", "from_failed_io", "from_failure", "from_future", "from_future_result_context", "from_io", "from_ioresult", "from_ioresult_context", "from_result", "from_result_context", "from_value", "lash", "map", "modify_env", "no_args", "swap" [misc] - case: reader_future_result_based_inheritance_wrong disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.reader_future_result import ( ReaderFutureResultBased3, ) from returns.primitives.hkt import SupportsKind3 from returns.future import FutureResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderFutureResultBased3[T, V, U], ): def __call__(self, deps: T) -> FutureResult[V, U]: ... out: | main:16: error: Return type "FutureResult[V, U]" of "__call__" incompatible with return type "FutureResult[T, V]" in supertype "Contextable" [override] main:16: error: Argument 1 of "__call__" is incompatible with supertype "Contextable"; supertype defines the argument type as "U" [override] main:16: note: This violates the Liskov substitution principle main:16: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides test_reader_future_result_like.yml000066400000000000000000000120001472312074000377250ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_future_result- case: reader_future_result_like_inheritance_correct disable_cache: false main: | from typing import TypeVar, Callable, Awaitable from returns.interfaces.specific.reader_future_result import ( ReaderFutureResultLike3, ) from returns.primitives.hkt import SupportsKind3 from returns.context import ReaderFutureResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') N = TypeVar('N') R1 = TypeVar('R1') R2 = TypeVar('R2') R3 = TypeVar('R3') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderFutureResultLike3[T, V, U], ): def bind_context_future_result( self, function: Callable[[T], ReaderFutureResult[N, V, U]], ) -> MyClass[N, V, U]: ... def bind_async_context_future_result( self, function: Callable[[T], Awaitable[ReaderFutureResult[N, V, U]]], ) -> MyClass[N, V, U]: ... @classmethod def from_future_result_context( cls, inner_value: ReaderFutureResult[R1, R2, R3], ) -> MyClass[R1, R2, R3]: ... - case: reader_future_result_like_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader_future_result import ( ReaderFutureResultLike3, ) from returns.primitives.hkt import SupportsKind3 T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') @final class MyClass( SupportsKind3['MyClass', T, V, U], ReaderFutureResultLike3[T, V, U], ): ... out: | main:12: error: Final class main.MyClass has abstract attributes "alt", "apply", "ask", "bind", "bind_async", "bind_async_context_future_result", "bind_async_future", "bind_async_future_result", "bind_awaitable", "bind_context", "bind_context_future_result", "bind_context_ioresult", "bind_context_result", "bind_future", "bind_future_result", "bind_io", "bind_ioresult", "bind_result", "compose_result", "from_context", "from_failed_context", "from_failed_future", "from_failed_io", "from_failure", "from_future", "from_future_result_context", "from_io", "from_ioresult", "from_ioresult_context", "from_result", "from_result_context", "from_value", "lash", "map", "modify_env", "no_args", "swap" [misc] - case: reader_future_result_like_inheritance_wrong disable_cache: false main: | from typing import TypeVar, Callable, Awaitable from returns.interfaces.specific.reader_future_result import ( ReaderFutureResultLike3, ) from returns.primitives.hkt import SupportsKind3 from returns.context import ReaderFutureResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') N = TypeVar('N') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderFutureResultLike3[T, V, U], ): def bind_context_future_result( self, function: Callable[[T], ReaderFutureResult[T, V, U]], ) -> MyClass[T, V, U]: ... def bind_async_context_future_result( self, function: Callable[[T], ReaderFutureResult[N, V, U]], ) -> MyClass[N, V, U]: ... @classmethod def from_future_result_context( cls, inner_value: ReaderFutureResult[T, V, U], ) -> MyClass[T, V, U]: ... out: | main:17: error: Return type "MyClass[T, V, U]" of "bind_context_future_result" incompatible with return type "KindN[MyClass[T, V, U], _UpdatedType, V, U]" in supertype "ReaderFutureResultLikeN" [override] main:19: error: Argument 1 of "bind_context_future_result" is incompatible with supertype "ReaderFutureResultLikeN"; supertype defines the argument type as "Callable[[T], RequiresContextFutureResult[_UpdatedType, V, U]]" [override] main:19: note: This violates the Liskov substitution principle main:19: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:25: error: Argument 1 of "bind_async_context_future_result" is incompatible with supertype "ReaderFutureResultLikeN"; supertype defines the argument type as "Callable[[T], Awaitable[RequiresContextFutureResult[_UpdatedType, V, U]]]" [override] main:25: note: This violates the Liskov substitution principle main:25: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:30: error: Return type "MyClass[T, V, U]" of "from_future_result_context" incompatible with return type "KindN[MyClass[T, V, U], _ValueType, _ErrorType, _EnvType]" in supertype "ReaderFutureResultLikeN" [override] main:32: error: Argument 1 of "from_future_result_context" is incompatible with supertype "ReaderFutureResultLikeN"; supertype defines the argument type as "RequiresContextFutureResult[_ValueType, _ErrorType, _EnvType]" [override] main:32: note: This violates the Liskov substitution principle main:32: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_ioresult/000077500000000000000000000000001472312074000277315ustar00rootroot00000000000000test_reader_ioresult_based.yml000066400000000000000000000041661472312074000357710ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_ioresult- case: reader_ioresult_based_inheritance_correct disable_cache: false main: | from typing import TypeVar, Callable from returns.interfaces.specific.reader_ioresult import ReaderIOResultBased3 from returns.primitives.hkt import SupportsKind3 from returns.io import IOResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderIOResultBased3[T, V, U], ): def __call__(self, deps: U) -> IOResult[T, V]: ... - case: reader_ioresult_based_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader_ioresult import ReaderIOResultBased3 from returns.primitives.hkt import SupportsKind3 T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') @final class MyClass( SupportsKind3['MyClass', T, V, U], ReaderIOResultBased3[T, V, U], ): ... out: | main:10: error: Final class main.MyClass has abstract attributes "__call__", "alt", "apply", "ask", "bind", "bind_context", "bind_context_ioresult", "bind_context_result", "bind_io", "bind_ioresult", "bind_result", "compose_result", "from_context", "from_failed_context", "from_failed_io", "from_failure", "from_io", "from_ioresult", "from_ioresult_context", "from_result", "from_result_context", "from_value", "lash", "map", "modify_env", "no_args", "swap" [misc] - case: reader_ioresult_based_inheritance_wrong disable_cache: false main: | from typing import TypeVar, Callable from returns.interfaces.specific.reader_ioresult import ReaderIOResultBased3 from returns.primitives.hkt import SupportsKind3 from returns.io import IOResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderIOResultBased3[T, V, U], ): def __call__(self, deps: U) -> IOResult[V, T]: ... out: | main:14: error: Return type "IOResult[V, T]" of "__call__" incompatible with return type "IOResult[T, V]" in supertype "Contextable" [override] test_reader_ioresult_like.yml000066400000000000000000000074271472312074000356420ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_ioresult- case: reader_ioresult_like_inheritance_correct disable_cache: false main: | from typing import TypeVar, Callable from returns.interfaces.specific.reader_ioresult import ReaderIOResultLike3 from returns.primitives.hkt import SupportsKind3 from returns.context import ReaderIOResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') N = TypeVar('N') R1 = TypeVar('R1') R2 = TypeVar('R2') R3 = TypeVar('R3') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderIOResultLike3[T, V, U], ): def bind_context_ioresult( self, function: Callable[[T], ReaderIOResult[N, V, U]], ) -> MyClass[N, V, U]: ... @classmethod def from_ioresult_context( cls, inner_value: ReaderIOResult[R1, R2, R3], ) -> MyClass[R1, R2, R3]: ... - case: reader_ioresult_like_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader_ioresult import ReaderIOResultLike3 from returns.primitives.hkt import SupportsKind3 T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') @final class MyClass( SupportsKind3['MyClass', T, V, U], ReaderIOResultLike3[T, V, U], ): ... out: | main:10: error: Final class main.MyClass has abstract attributes "alt", "apply", "ask", "bind", "bind_context", "bind_context_ioresult", "bind_context_result", "bind_io", "bind_ioresult", "bind_result", "compose_result", "from_context", "from_failed_context", "from_failed_io", "from_failure", "from_io", "from_ioresult", "from_ioresult_context", "from_result", "from_result_context", "from_value", "lash", "map", "modify_env", "no_args", "swap" [misc] - case: reader_ioresult_like_inheritance_wrong disable_cache: false main: | from typing import TypeVar, Callable from returns.interfaces.specific.reader_ioresult import ReaderIOResultLike3 from returns.primitives.hkt import SupportsKind3 from returns.context import ReaderIOResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') N = TypeVar('N') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderIOResultLike3[T, V, U], ): def bind_context_ioresult( self, function: Callable[[N], ReaderIOResult[T, V, U]], ) -> MyClass[N, U, V]: ... @classmethod def from_ioresult_context( cls, inner_value: ReaderIOResult[T, V, U], ) -> MyClass[T, V, U]: ... out: | main:15: error: Return type "MyClass[N, U, V]" of "bind_context_ioresult" incompatible with return type "KindN[MyClass[T, V, U], _UpdatedType, V, U]" in supertype "ReaderIOResultLikeN" [override] main:17: error: Argument 1 of "bind_context_ioresult" is incompatible with supertype "ReaderIOResultLikeN"; supertype defines the argument type as "Callable[[T], RequiresContextIOResult[_UpdatedType, V, U]]" [override] main:17: note: This violates the Liskov substitution principle main:17: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:22: error: Return type "MyClass[T, V, U]" of "from_ioresult_context" incompatible with return type "KindN[MyClass[T, V, U], _ValueType, _ErrorType, _EnvType]" in supertype "ReaderIOResultLikeN" [override] main:24: error: Argument 1 of "from_ioresult_context" is incompatible with supertype "ReaderIOResultLikeN"; supertype defines the argument type as "RequiresContextIOResult[_ValueType, _ErrorType, _EnvType]" [override] main:24: note: This violates the Liskov substitution principle main:24: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_result/000077500000000000000000000000001472312074000274015ustar00rootroot00000000000000test_reader_result_based.yml000066400000000000000000000044101472312074000351010ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_result- case: reader_result_based_inheritance_correct disable_cache: false main: | from typing import TypeVar, Callable from returns.interfaces.specific.reader_result import ReaderResultBased3 from returns.primitives.hkt import SupportsKind3 from returns.result import Result T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderResultBased3[T, V, U], ): def __call__(self, deps: U) -> Result[T, V]: ... - case: reader_result_based_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader_result import ReaderResultBased3 from returns.primitives.hkt import SupportsKind3 T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') @final class MyClass( SupportsKind3['MyClass', T, V, U], ReaderResultBased3[T, V, U], ): ... out: | main:10: error: Final class main.MyClass has abstract attributes "__call__", "alt", "apply", "ask", "bind", "bind_context", "bind_context_result", "bind_result", "from_context", "from_failed_context", "from_failure", "from_result", "from_result_context", "from_value", "lash", "map", "modify_env", "no_args", "swap" [misc] - case: reader_result_based_inheritance_wrong disable_cache: false main: | from typing import TypeVar, Callable from returns.interfaces.specific.reader_result import ReaderResultBased3 from returns.primitives.hkt import SupportsKind3 from returns.result import Result T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderResultBased3[T, V, U], ): def __call__(self, deps: T) -> Result[V, U]: ... out: | main:14: error: Return type "Result[V, U]" of "__call__" incompatible with return type "Result[T, V]" in supertype "Contextable" [override] main:14: error: Argument 1 of "__call__" is incompatible with supertype "Contextable"; supertype defines the argument type as "U" [override] main:14: note: This violates the Liskov substitution principle main:14: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides test_reader_result_like.yml000066400000000000000000000107421472312074000347540ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_reader_result- case: reader_result_like_inheritance_correct disable_cache: false main: | from typing import TypeVar, Callable from returns.interfaces.specific.reader_result import ReaderResultLike3 from returns.primitives.hkt import SupportsKind3 from returns.context import Reader, ReaderResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') N = TypeVar('N') R1 = TypeVar('R1') R2 = TypeVar('R2') R3 = TypeVar('R3') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderResultLike3[T, V, U], ): def bind_context_result( self, function: Callable[[T], ReaderResult[N, V, U]], ) -> MyClass[N, V, U]: ... @classmethod def from_failed_context( cls, inner_value: Reader[R1, R2], ) -> MyClass[T, R1, R2]: ... @classmethod def from_result_context( cls, inner_value: ReaderResult[R1, R2, R3], ) -> MyClass[R1, R2, R3]: ... - case: reader_result_like_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.reader_result import ReaderResultLike3 from returns.primitives.hkt import SupportsKind3 T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') @final class MyClass( SupportsKind3['MyClass', T, V, U], ReaderResultLike3[T, V, U], ): ... out: | main:10: error: Final class main.MyClass has abstract attributes "alt", "apply", "ask", "bind", "bind_context", "bind_context_result", "bind_result", "from_context", "from_failed_context", "from_failure", "from_result", "from_result_context", "from_value", "lash", "map", "modify_env", "no_args", "swap" [misc] - case: reader_result_like_inheritance_wrong disable_cache: false main: | from typing import TypeVar, Callable from returns.interfaces.specific.reader_result import ReaderResultLike3 from returns.primitives.hkt import SupportsKind3 from returns.context import Reader, ReaderResult T = TypeVar('T') V = TypeVar('V') U = TypeVar('U') N = TypeVar('N') R1 = TypeVar('R1') R2 = TypeVar('R2') class MyClass( SupportsKind3['MyClass', T, V, U], ReaderResultLike3[T, V, U], ): def bind_context_result( self, function: Callable[[V], ReaderResult[N, V, U]], ) -> MyClass[T, V, U]: ... @classmethod def from_failed_context( cls, inner_value: Reader[R1, R1], ) -> MyClass[R1, R1, R1]: ... @classmethod def from_result_context( cls, inner_value: ReaderResult[T, V, U], ) -> MyClass[T, V, U]: ... out: | main:18: error: Return type "MyClass[T, V, U]" of "bind_context_result" incompatible with return type "KindN[MyClass[T, V, U], _UpdatedType, V, U]" in supertype "ReaderResultLikeN" [override] main:20: error: Argument 1 of "bind_context_result" is incompatible with supertype "ReaderResultLikeN"; supertype defines the argument type as "Callable[[T], RequiresContextResult[_UpdatedType, V, U]]" [override] main:20: note: This violates the Liskov substitution principle main:20: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:25: error: Signature of "from_failed_context" incompatible with supertype "ReaderResultLikeN" [override] main:25: note: Superclass: main:25: note: def [_ErrorType, _EnvType] from_failed_context(cls, inner_value: RequiresContext[_ErrorType, _EnvType]) -> KindN[MyClass[T, V, U], T, _ErrorType, _EnvType] main:25: note: Subclass: main:25: note: @classmethod main:25: note: def [R1] from_failed_context(cls, inner_value: RequiresContext[R1, R1]) -> MyClass[R1, R1, R1] main:32: error: Return type "MyClass[T, V, U]" of "from_result_context" incompatible with return type "KindN[MyClass[T, V, U], _ValueType, _ErrorType, _EnvType]" in supertype "ReaderResultLikeN" [override] main:34: error: Argument 1 of "from_result_context" is incompatible with supertype "ReaderResultLikeN"; supertype defines the argument type as "RequiresContextResult[_ValueType, _ErrorType, _EnvType]" [override] main:34: note: This violates the Liskov substitution principle main:34: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides returns-0.24.0/typesafety/test_interfaces/test_specific/test_result/000077500000000000000000000000001472312074000260575ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_specific/test_result/test_resultbased_inheritance.yml000066400000000000000000000113161472312074000345310ustar00rootroot00000000000000- case: result_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.result import ResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.result import Result _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], ResultBased2[_ValueType, _ErrorType], ): def swap(self) -> MyClass[_ErrorType, _ValueType]: ... def bind_result( self, function: Callable[ [_ValueType], Result[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType]: ... def unwrap(self) -> _ValueType: ... def failure(self) -> _ErrorType: ... @classmethod def from_result( self, inner_value: Result[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType]: ... @classmethod def from_failure( cls, inner_value: _NewErrorType, ) -> MyClass[Any, _NewErrorType]: ... def test(a: int) -> Result[float, str]: ... x: Result[int, str] reveal_type(MyClass.from_result(x).bind_result(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" - case: result_inheritance_correct3 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.result import ResultBased3 from returns.primitives.hkt import SupportsKind3 from returns.result import Result _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') _T = TypeVar('_T') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _T], ResultBased3[_ValueType, _ErrorType, _T], ): def swap(self) -> MyClass[_ErrorType, _ValueType, _T]: ... def bind_result( self, function: Callable[ [_ValueType], Result[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType, _T]: ... def unwrap(self) -> _ValueType: ... def failure(self) -> _ErrorType: ... @classmethod def from_result( self, inner_value: Result[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType, Any]: ... @classmethod def from_failure( cls, inner_value: _NewErrorType, ) -> MyClass[Any, _NewErrorType, Any]: ... def test(a: int) -> Result[float, str]: ... x: Result[int, str] reveal_type(MyClass.from_result(x).bind_result(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str, Any]" - case: result_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.result import ResultBased2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') @final class MyClass(SupportsKind2['MyClass', V, N], ResultBased2[V, N]): ... out: | main:9: error: Final class main.MyClass has abstract attributes "alt", "apply", "bind", "bind_result", "equals", "failure", "from_failure", "from_result", "from_value", "lash", "map", "swap", "unwrap" [misc] - case: result_inheritance_wrong disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.specific.result import ResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.result import Result _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') class MyClass( # type: ignore[type-var] SupportsKind2['MyClass', _ValueType, _ErrorType], ResultBased2[_ValueType, _ErrorType], ): def unwrap(self) -> _ErrorType: ... def failure(self) -> _ValueType: ... out: | main:17: error: Return type "_ErrorType" of "unwrap" incompatible with return type "_ValueType" in supertype "Unwrappable" [override] main:20: error: Return type "_ValueType" of "failure" incompatible with return type "_ErrorType" in supertype "Unwrappable" [override] returns-0.24.0/typesafety/test_interfaces/test_specific/test_result/test_resultlike_inheritance.yml000066400000000000000000000153521472312074000344030ustar00rootroot00000000000000- case: result_inheritance_correct2 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.result import ResultLike2 from returns.primitives.hkt import SupportsKind2 from returns.result import Result _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], ResultLike2[_ValueType, _ErrorType], ): def bind_result( self, function: Callable[ [_ValueType], Result[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType]: ... @classmethod def from_result( self, inner_value: Result[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType]: ... def test(a: int) -> Result[float, str]: ... x: Result[int, str] reveal_type(MyClass.from_result(x).bind_result(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" - case: result_inheritance_correct3 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.result import ResultLike3 from returns.primitives.hkt import SupportsKind3 from returns.result import Result _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') _T = TypeVar('_T') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _T], ResultLike3[_ValueType, _ErrorType, _T], ): def bind_result( self, function: Callable[ [_ValueType], Result[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType, _T]: ... @classmethod def from_result( self, inner_value: Result[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType, Any]: ... def test(a: int) -> Result[float, str]: ... x: Result[int, str] reveal_type(MyClass.from_result(x).bind_result(test)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str, Any]" - case: result_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.specific.result import ResultLike2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') @final class MyClass(SupportsKind2['MyClass', V, N], ResultLike2[V, N]): ... out: | main:9: error: Final class main.MyClass has abstract attributes "alt", "apply", "bind", "bind_result", "from_failure", "from_result", "from_value", "lash", "map", "swap" [misc] - case: result_inheritance_wrong2 disable_cache: false main: | from typing import Callable, TypeVar from returns.interfaces.specific.result import ResultLike2 from returns.primitives.hkt import SupportsKind2 from returns.result import Result _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') class MyClass( # type: ignore[type-var] SupportsKind2['MyClass', _ValueType, _ErrorType], ResultLike2[_ValueType, _ErrorType], ): def bind_result( self, function: Callable[ [_ValueType], Result[_ValueType, _ErrorType], ], ) -> MyClass[_ValueType, _ErrorType]: ... def from_result( self, inner_value: Result[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType]: ... out: | main:17: error: Return type "MyClass[_ValueType, _ErrorType]" of "bind_result" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType], _UpdatedType, _ErrorType, Never]" in supertype "ResultLikeN" [override] main:19: error: Argument 1 of "bind_result" is incompatible with supertype "ResultLikeN"; supertype defines the argument type as "Callable[[_ValueType], Result[_UpdatedType, _ErrorType]]" [override] main:19: note: This violates the Liskov substitution principle main:19: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:26: error: Signature of "from_result" incompatible with supertype "ResultLikeN" [override] main:26: note: Superclass: main:26: note: def [_ValueType, _ErrorType] from_result(cls, inner_value: Result[_ValueType, _ErrorType]) -> KindN[MyClass[_ValueType@MyClass, _ErrorType@MyClass], _ValueType@from_result, _ErrorType@from_result, Never] main:26: note: Subclass: main:26: note: def [_NewValueType, _NewErrorType] from_result(self, inner_value: Result[_NewValueType, _NewErrorType]) -> MyClass[_NewValueType, _NewErrorType] - case: result_inheritance_wrong3 disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.result import ResultLike3 from returns.primitives.hkt import SupportsKind3 from returns.result import Result _ValueType = TypeVar('_ValueType', covariant=True) _NewValueType = TypeVar('_NewValueType') _T = TypeVar('_T') # Result related: _ErrorType = TypeVar('_ErrorType', covariant=True) _NewErrorType = TypeVar('_NewErrorType') class MyClass( # type: ignore[type-var] SupportsKind3['MyClass', _ValueType, _ErrorType, _T], ResultLike3[_ValueType, _ErrorType, _T], ): def bind_result( self, function: Callable[ [_ValueType], Result[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType, str]: ... @classmethod def from_result( self, inner_value: Result[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType, bool]: ... out: | main:18: error: Return type "MyClass[_NewValueType, _ErrorType, str]" of "bind_result" incompatible with return type "KindN[MyClass[_ValueType, _ErrorType, _T], _UpdatedType, _ErrorType, _T]" in supertype "ResultLikeN" [override] main:28: error: Return type "MyClass[_NewValueType, _NewErrorType, bool]" of "from_result" incompatible with return type "KindN[MyClass[_ValueType@MyClass, _ErrorType@MyClass, _T], _ValueType@from_result, _ErrorType@from_result, _T]" in supertype "ResultLikeN" [override] returns-0.24.0/typesafety/test_interfaces/test_swappable/000077500000000000000000000000001472312074000236735ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_swappable/test_inheritance.yml000066400000000000000000000034541472312074000277540ustar00rootroot00000000000000- case: swappable_inheritance_correct2 disable_cache: false main: | from typing import TypeVar from returns.interfaces.swappable import Swappable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], Swappable2[_FirstType, _SecondType], ): def swap(self) -> MyClass[_SecondType, _FirstType]: ... - case: swappable_inheritance_missing disable_cache: false main: | from typing import TypeVar, final from returns.interfaces.swappable import Swappable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') @final class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], Swappable2[_FirstType, _SecondType], ): ... out: | main:9: error: Final class main.MyClass has abstract attributes "alt", "map", "swap" [misc] - case: swappable_inheritance_incorrect2 disable_cache: false main: | from typing import TypeVar from returns.interfaces.swappable import Swappable2 from returns.primitives.hkt import SupportsKind2 _FirstType = TypeVar('_FirstType') _SecondType = TypeVar('_SecondType') _UpdatedType = TypeVar('_UpdatedType') class MyClass( SupportsKind2['MyClass', _FirstType, _SecondType], Swappable2[_FirstType, _SecondType], ): def swap(self) -> MyClass[_FirstType, _SecondType]: ... out: | main:13: error: Return type "MyClass[_FirstType, _SecondType]" of "swap" incompatible with return type "KindN[MyClass[_FirstType, _SecondType], _SecondType, _FirstType, Never]" in supertype "SwappableN" [override] returns-0.24.0/typesafety/test_interfaces/test_unwrappable/000077500000000000000000000000001472312074000242355ustar00rootroot00000000000000returns-0.24.0/typesafety/test_interfaces/test_unwrappable/test_inheritance.yml000066400000000000000000000036121472312074000303120ustar00rootroot00000000000000- case: unwrappable_correct_inheritance disable_cache: false main: | from returns.primitives.hkt import SupportsKind2 from returns.interfaces.unwrappable import Unwrappable from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyOwn( SupportsKind2['MyOwn', T, N], Unwrappable[T, N], ): def __init__(self, value: T, error: N) -> None: ... def unwrap(self) -> T: ... def failure(self) -> N: ... x = MyOwn(1, 'a') reveal_type(x.unwrap()) # N: Revealed type is "builtins.int" reveal_type(x.failure()) # N: Revealed type is "builtins.str" - case: unwrappable_missing_inheritance disable_cache: false main: | from returns.primitives.hkt import SupportsKind2 from returns.interfaces.unwrappable import Unwrappable from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyOwn( SupportsKind2['MyOwn', T, N], Unwrappable[T, N], ): ... MyOwn() # E: Cannot instantiate abstract class "MyOwn" with abstract attributes "failure" and "unwrap" [abstract] - case: unwrappable_wrong_inheritance disable_cache: false main: | from returns.primitives.hkt import SupportsKind2 from returns.interfaces.unwrappable import Unwrappable from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyOwn( SupportsKind2['MyOwn', T, N], Unwrappable[T, N], ): def __init__(self, value: T, error: N) -> None: ... def unwrap(self) -> int: ... def failure(self) -> T: ... out: | main:15: error: Return type "int" of "unwrap" incompatible with return type "T" in supertype "Unwrappable" [override] main:18: error: Return type "T" of "failure" incompatible with return type "N" in supertype "Unwrappable" [override] returns-0.24.0/typesafety/test_io/000077500000000000000000000000001472312074000171425ustar00rootroot00000000000000returns-0.24.0/typesafety/test_io/test_io_container/000077500000000000000000000000001472312074000226525ustar00rootroot00000000000000returns-0.24.0/typesafety/test_io/test_io_container/test_do.yml000066400000000000000000000015311472312074000250360ustar00rootroot00000000000000- case: do_types_missmatch disable_cache: false main: | from returns.io import IO from returns.result import Success IO.do( x + y for x in Success(1) # E: Invalid type supplied in do-notation: expected "returns.io.IO[Any]", got "returns.result.Success[builtins.int]" [misc] for y in IO(2.5) ) - case: do_with_if disable_cache: false main: | from returns.io import IO IO.do( # E: Using "if" conditions inside a generator is not allowed [misc] x + y for x in IO(1) for y in IO(2.5) if y > 5 ) - case: do_with_var disable_cache: false main: | from returns.io import IO x = ( x + y for x in IO(1) for y in IO(2.5) ) IO.do(x) # E: Literal generator expression is required, not a variable or function call [misc] returns-0.24.0/typesafety/test_io/test_io_container/test_impure.yml000066400000000000000000000036041472312074000257400ustar00rootroot00000000000000- case: impure_decorator_no_params disable_cache: false main: | from returns.io import impure @impure def test() -> int: return 1 reveal_type(test) # N: Revealed type is "def () -> returns.io.IO[builtins.int]" - case: impure_composition_no_params disable_cache: false main: | from returns.io import impure def test() -> int: return 1 reveal_type(impure(test)) # N: Revealed type is "def () -> returns.io.IO[builtins.int]" - case: impure_decorator_with_args disable_cache: false main: | from typing import Optional from returns.io import impure @impure def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: return 1 reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.io.IO[builtins.int]" - case: impure_composition_with_args disable_cache: false main: | from typing import Optional from returns.io import impure def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: return 1 reveal_type(impure(test)) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.io.IO[builtins.int]" - case: impure_decorator_with_args_kwargs disable_cache: false main: | from returns.io import impure @impure def test(*args, **kwargs) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> returns.io.IO[builtins.int]" - case: impure_decorator_with_typed_args_kwargs disable_cache: false main: | from returns.io import impure @impure def test(*args: int, **kwargs: str) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: builtins.int, **kwargs: builtins.str) -> returns.io.IO[builtins.int]" returns-0.24.0/typesafety/test_io/test_io_container/test_io_base.yml000066400000000000000000000025411472312074000260370ustar00rootroot00000000000000- case: io_constructor1 disable_cache: false main: | from returns.io import IO reveal_type(IO(1)) # N: Revealed type is "returns.io.IO[builtins.int]" - case: io_constructor2 disable_cache: false main: | from returns.io import IO reveal_type(IO.from_value(1)) # N: Revealed type is "returns.io.IO[builtins.int]" - case: io_constructor3 disable_cache: false main: | from returns.io import IO reveal_type(IO.from_io(IO(1))) # N: Revealed type is "returns.io.IO[builtins.int]" - case: io_bind disable_cache: false main: | from returns.io import IO def bind_io(input_io: int) -> IO[str]: ... reveal_type(IO(1).bind(bind_io)) # N: Revealed type is "returns.io.IO[builtins.str]" - case: io_bind_io disable_cache: false main: | from returns.io import IO def bind_io(input_io: int) -> IO[str]: ... reveal_type(IO(1).bind_io(bind_io)) # N: Revealed type is "returns.io.IO[builtins.str]" - case: io_map disable_cache: false main: | from returns.io import IO reveal_type(IO(1).map(str)) # N: Revealed type is "returns.io.IO[builtins.str]" - case: io_apply disable_cache: false main: | from returns.io import IO def transform(arg: int) -> str: ... reveal_type(IO(1).apply(IO(transform))) # N: Revealed type is "returns.io.IO[builtins.str]" returns-0.24.0/typesafety/test_io/test_io_container/test_io_type_cast.yml000066400000000000000000000012231472312074000271140ustar00rootroot00000000000000- case: io_correct_cast disable_cache: false main: | from returns.io import IO first: IO[ValueError] second: IO[Exception] = first reveal_type(second) # N: Revealed type is "returns.io.IO[builtins.Exception]" - case: io_from_ioresult disable_cache: false main: | from returns.io import IO, IOResult x: IOResult[int, str] reveal_type(IO.from_ioresult(x)) # N: Revealed type is "returns.io.IO[returns.result.Result[builtins.int, builtins.str]]" - case: io_getattr disable_cache: false main: | from returns.io import IO x: IO[int] x.missing # E: "IO[int]" has no attribute "missing" [attr-defined] returns-0.24.0/typesafety/test_io/test_ioresult_container/000077500000000000000000000000001472312074000241115ustar00rootroot00000000000000returns-0.24.0/typesafety/test_io/test_ioresult_container/test_construct_iofailure.yml000066400000000000000000000013451472312074000317610ustar00rootroot00000000000000- case: iofailure_lash disable_cache: false main: | from returns.io import IOFailure, IOResult def returns_result(param: int) -> IOResult[str, Exception]: ... first: IOResult[str, int] = IOFailure(1) reveal_type(first.lash(returns_result)) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.Exception]" - case: iofailure_alt disable_cache: false main: | from returns.io import IOFailure reveal_type(IOFailure(1).alt(str)) # N: Revealed type is "returns.io.IOResult[Any, builtins.str]" - case: iofailure_iofailure disable_cache: false main: | from returns.io import IOFailure reveal_type(IOFailure(1).failure()) # N: Revealed type is "returns.io.IO[builtins.int]" returns-0.24.0/typesafety/test_io/test_ioresult_container/test_construct_iosucess.yml000066400000000000000000000037071472312074000316430ustar00rootroot00000000000000- case: iosuccess_bind disable_cache: false main: | from returns.io import IOSuccess, IOResult def returns_result(param: int) -> IOResult[str, Exception]: ... first: IOResult[int, Exception] = IOSuccess(1) reveal_type(first.bind(returns_result)) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.Exception]" - case: iosuccess_bind_result disable_cache: false main: | from returns.io import IOSuccess, IOResult from returns.result import Result def returns_result(param: int) -> Result[str, Exception]: ... first: IOResult[int, Exception] = IOSuccess(1) reveal_type(first.bind_result(returns_result)) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.Exception]" - case: iosuccess_bind_io disable_cache: false main: | from returns.io import IO, IOSuccess, IOResult def returns_io(param: int) -> IO[str]: ... first: IOResult[int, Exception] = IOSuccess(1) reveal_type(first.bind_io(returns_io)) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.Exception]" - case: iosuccess_map disable_cache: false main: | from returns.io import IOSuccess, IOResult reveal_type(IOSuccess(1).map(str)) # N: Revealed type is "returns.io.IOResult[builtins.str, Any]" - case: iosuccess_apply disable_cache: false main: | from returns.io import IOSuccess, IOResult def transform(arg: int) -> str: ... reveal_type(IOSuccess(1).apply(IOSuccess(transform))) # N: Revealed type is "returns.io.IOResult[builtins.str, Any]" - case: iosuccess_value_or disable_cache: false main: | from returns.io import IOSuccess reveal_type(IOSuccess(1).value_or(None)) # N: Revealed type is "returns.io.IO[Union[builtins.int, None]]" - case: iosuccess_unwrap disable_cache: false main: | from returns.io import IOSuccess reveal_type(IOSuccess(1).unwrap()) # N: Revealed type is "returns.io.IO[builtins.int]" returns-0.24.0/typesafety/test_io/test_ioresult_container/test_do.yml000066400000000000000000000052521472312074000263010ustar00rootroot00000000000000- case: do_all_errors disable_cache: false main: | from returns.io import IOResult, IOFailure reveal_type(IOResult.do( # N: Revealed type is "returns.io.IOResult[Any, Union[builtins.int, builtins.str]]" first / second for first in IOFailure(1) for second in IOFailure('a') )) - case: do_no_errors disable_cache: false main: | from returns.io import IOSuccess, IOResult reveal_type(IOResult.do( # N: Revealed type is "returns.io.IOResult[builtins.float, Never]" x + y for x in IOSuccess(1) for y in IOSuccess(2.5) )) - case: do_with_errors disable_cache: false main: | from returns.io import IOSuccess, IOResult a: IOResult[int, str] b: IOResult[float, bytes] reveal_type(IOResult.do( # N: Revealed type is "returns.io.IOResult[builtins.float, Union[builtins.str, builtins.bytes]]" x + y for x in a for y in b )) - case: do_types_missmatch disable_cache: false main: | from returns.io import IOSuccess, IOResult from returns.result import Success IOResult.do( x + y for x in Success(1) # E: Invalid type supplied in do-notation: expected "returns.io.IOResult[Any, Any]", got "returns.result.Success[builtins.int]" [misc] for y in IOSuccess(2.5) ) - case: do_types_converted disable_cache: false main: | from returns.io import IOSuccess, IOResult from returns.result import Result a: Result[int, str] reveal_type(IOResult.do( # N: Revealed type is "returns.io.IOResult[builtins.float, builtins.str]" x + y for x in IOResult.from_result(a) for y in IOSuccess(2.5) )) - case: do_with_if disable_cache: false main: | from returns.io import IOSuccess, IOResult IOResult.do( # E: Using "if" conditions inside a generator is not allowed [misc] x + y for x in IOSuccess(1) for y in IOSuccess(2.5) if y > 5 ) - case: do_with_var disable_cache: false main: | from returns.io import IOSuccess, IOResult x = ( x + y for x in IOSuccess(1) for y in IOSuccess(2.5) ) IOResult.do(x) # E: Literal generator expression is required, not a variable or function call [misc] - case: do_with_var disable_cache: false main: | from returns.io import IOResult, IOResultE, IOSuccess x: IOResultE[int] reveal_type(IOResult.do( # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.Exception]" a + 2 for a in x )) reveal_type(IOResultE.do( # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.Exception]" a + 2 for a in x )) returns-0.24.0/typesafety/test_io/test_ioresult_container/test_impure_safe.yml000066400000000000000000000015431472312074000301750ustar00rootroot00000000000000- case: impure_decorator_no_params disable_cache: false main: | from returns.io import impure_safe @impure_safe def test(arg: str) -> int: return 1 reveal_type(test) # N: Revealed type is "def (arg: builtins.str) -> returns.io.IOResult[builtins.int, builtins.Exception]" - case: impure_decorator_passing_exceptions_no_params disable_cache: false main: | from returns.io import impure_safe @impure_safe((ValueError,)) def test1(arg: str) -> int: return 1 reveal_type(test1) # N: Revealed type is "def (arg: builtins.str) -> returns.io.IOResult[builtins.int, builtins.ValueError]" @impure_safe(exceptions=(ValueError,)) def test2(arg: str) -> int: return 1 reveal_type(test2) # N: Revealed type is "def (arg: builtins.str) -> returns.io.IOResult[builtins.int, builtins.ValueError]" returns-0.24.0/typesafety/test_io/test_ioresult_container/test_ioresult_helpers.yml000066400000000000000000000014221472312074000312620ustar00rootroot00000000000000- case: ioresult_from_typecast disable_cache: false main: | from returns.io import IO, IOResult from returns.result import Result container: IO[Result[int, str]] reveal_type(IOResult.from_typecast(container)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: ioresult_from_io disable_cache: false main: | from returns.io import IO, IOResult container: IO[str] reveal_type(IOResult.from_io(container)) # N: Revealed type is "returns.io.IOResult[builtins.str, Any]" - case: ioresult_from_failed_io disable_cache: false main: | from returns.io import IO, IOResult container: IO[str] reveal_type(IOResult.from_failed_io(container)) # N: Revealed type is "returns.io.IOResult[Any, builtins.str]" returns-0.24.0/typesafety/test_io/test_ioresult_container/test_ioresult_typecast.yml000066400000000000000000000075511472312074000314650ustar00rootroot00000000000000- case: ioresult_success_cast disable_cache: false main: | from returns.io import IOResult, IOSuccess first: IOResult[int, Exception] = IOSuccess(1) reveal_type(first) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.Exception]" - case: ioresult_failure_cast1 disable_cache: false main: | from returns.io import IOResult, IOFailure first: IOResult[int, Exception] = IOFailure(Exception()) reveal_type(first) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.Exception]" - case: ioresult_failure_cast2 disable_cache: false main: | from returns.io import IOResult, IOFailure first: IOResult[int, Exception] = IOFailure(TypeError()) reveal_type(first) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.Exception]" - case: ioresult_swap disable_cache: false main: | from returns.io import IOResult x: IOResult[int, str] reveal_type(x.swap()) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.int]" - case: ioresult_getattr disable_cache: false main: | from returns.io import IOResult x: IOResult[int, str] x.missing # E: "IOResult[int, str]" has no attribute "missing" [attr-defined] - case: ioresult_from_value disable_cache: false main: | from returns.io import IOResult reveal_type(IOResult.from_value(1)) # N: Revealed type is "returns.io.IOResult[builtins.int, Any]" - case: ioresult_from_failure disable_cache: false main: | from returns.io import IOResult reveal_type(IOResult.from_failure(1)) # N: Revealed type is "returns.io.IOResult[Any, builtins.int]" - case: ioresult_covariant_cast disable_cache: false main: | from returns.io import IOResult first: IOResult[TypeError, ValueError] # we cast both values second: IOResult[Exception, Exception] = first reveal_type(second) # N: Revealed type is "returns.io.IOResult[builtins.Exception, builtins.Exception]" - case: ioresult_success_bind_contra1 disable_cache: false main: | from returns.io import IOResult, IOSuccess def test(some: int) -> IOResult[float, str]: ... first: IOResult[int, str] = IOSuccess(4) reveal_type(first.bind(test)) # N: Revealed type is "returns.io.IOResult[builtins.float, builtins.str]" - case: ioresult_success_bind_contra2 disable_cache: false main: | from returns.io import IOResult, IOSuccess def test(some: int) -> IOResult[int, ValueError]: ... first: IOResult[int, Exception] second = first.bind(test) reveal_type(second) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.Exception]" - case: ioresult_correct_usage disable_cache: false main: | from returns.io import IOResult, IOSuccess, IOFailure def factory(inner_value: int) -> IOResult[int, str]: if inner_value > 0: return IOSuccess(inner_value + 2) return IOFailure(str(inner_value)) reveal_type(factory(1)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: ioresulte_typecast1 disable_cache: false main: | from returns.io import IOResult, IOResultE, IOSuccess, IOFailure def function(arg: int) -> IOResultE[int]: if arg > 0: return IOSuccess(arg + 1) return IOFailure(ValueError(arg)) result: IOResult[int, Exception] = function(1) reveal_type(result) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.Exception]" - case: ioresulte_typecast2 disable_cache: false main: | from returns.io import IOResult, IOResultE, IOSuccess, IOFailure def function(arg: int) -> IOResult[int, Exception]: if arg > 0: return IOSuccess(arg + 1) return IOFailure(ValueError(arg)) result: IOResultE[int] = function(1) reveal_type(result) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.Exception]" returns-0.24.0/typesafety/test_iterables/000077500000000000000000000000001472312074000205055ustar00rootroot00000000000000returns-0.24.0/typesafety/test_iterables/test_fold/000077500000000000000000000000001472312074000224705ustar00rootroot00000000000000returns-0.24.0/typesafety/test_iterables/test_fold/test_fold_collect.yml000066400000000000000000000141561472312074000267120ustar00rootroot00000000000000- case: fold_collect_iterable_types disable_cache: false main: | from typing import List, Iterable, Sequence, Iterator, Tuple, Generator from returns.result import Result from returns.iterables import Fold x1: List[Result[int, str]] x2: Iterable[Result[int, str]] x3: Sequence[Result[int, str]] x4: Iterator[Result[int, str]] x5: Tuple[Result[int, str], ...] x6: Tuple[Result[int, str]] x7: Generator[Result[int, str], None, None] acc: Result[Tuple[()], str] reveal_type(Fold.collect(x1, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect(x2, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect(x3, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect(x4, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect(x5, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect(x6, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect(x7, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" - case: fold_collect_io disable_cache: false main: | from returns.iterables import Fold from returns.io import IO from typing import Iterable acc = IO(()) x: Iterable[IO[float]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.io.IO[builtins.tuple[builtins.float, ...]]" - case: fold_collect_maybe disable_cache: false main: | from returns.iterables import Fold from returns.maybe import Maybe from typing import Iterable acc = Maybe.from_value(()) x: Iterable[Maybe[float]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.maybe.Maybe[builtins.tuple[builtins.float, ...]]" - case: fold_collect_result disable_cache: false main: | from returns.iterables import Fold from returns.result import Result from typing import Iterable, Tuple acc: Result[Tuple[()], str] x: Iterable[Result[float, str]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.float, ...], builtins.str]" - case: fold_collect_ioresult disable_cache: false main: | from returns.iterables import Fold from returns.io import IOResult from typing import Iterable, Tuple acc: IOResult[Tuple[()], str] x: Iterable[IOResult[float, str]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.io.IOResult[builtins.tuple[builtins.float, ...], builtins.str]" - case: fold_collect_requires_context disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContext from typing import Iterable, Tuple acc: RequiresContext[Tuple[()], str] x: Iterable[RequiresContext[float, str]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.tuple[builtins.float, ...], builtins.str]" - case: fold_collect_requires_context_result disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextResult from typing import Iterable, Tuple acc: RequiresContextResult[Tuple[()], str, bool] x: Iterable[RequiresContextResult[float, str, bool]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.tuple[builtins.float, ...], builtins.str, builtins.bool]" - case: fold_collect_requires_context_ioresult disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextIOResult from typing import Iterable, Tuple acc: RequiresContextIOResult[Tuple[()], str, bool] x: Iterable[RequiresContextIOResult[float, str, bool]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.tuple[builtins.float, ...], builtins.str, builtins.bool]" - case: fold_collect_requires_context_future_result disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextFutureResult from typing import Iterable, Tuple acc: RequiresContextFutureResult[Tuple[()], str, bool] x: Iterable[RequiresContextFutureResult[float, str, bool]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.tuple[builtins.float, ...], builtins.str, builtins.bool]" - case: fold_collect_future disable_cache: false main: | from returns.iterables import Fold from returns.future import Future from typing import Iterable, Tuple acc: Future[Tuple[()]] x: Iterable[Future[float]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.future.Future[builtins.tuple[builtins.float, ...]]" - case: fold_collect_future_result disable_cache: false main: | from returns.iterables import Fold from returns.future import FutureResult from typing import Iterable, Tuple acc: FutureResult[Tuple[()], str] x: Iterable[FutureResult[float, str]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "returns.future.FutureResult[builtins.tuple[builtins.float, ...], builtins.str]" - case: fold_collect_custom_type disable_cache: false main: | from typing import TypeVar, Iterable, Tuple from returns.iterables import Fold from returns.interfaces.applicative import Applicative1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') class MyClass(SupportsKind1['MyClass', V], Applicative1[V]): ... acc: MyClass[Tuple[()]] x: Iterable[MyClass[float]] reveal_type(Fold.collect(x, acc)) # N: Revealed type is "main.MyClass[builtins.tuple[builtins.float, ...]]" returns-0.24.0/typesafety/test_iterables/test_fold/test_fold_collect_all.yml000066400000000000000000000127661472312074000275470ustar00rootroot00000000000000- case: fold_collect_all_iterable_types disable_cache: false main: | from typing import List, Iterable, Sequence, Iterator, Tuple, Generator from returns.result import Result from returns.iterables import Fold x1: List[Result[int, str]] x2: Iterable[Result[int, str]] x3: Sequence[Result[int, str]] x4: Iterator[Result[int, str]] x5: Tuple[Result[int, str], ...] x6: Tuple[Result[int, str]] x7: Generator[Result[int, str], None, None] acc: Result[Tuple[()], str] reveal_type(Fold.collect_all(x1, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect_all(x2, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect_all(x3, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect_all(x4, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect_all(x5, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect_all(x6, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" reveal_type(Fold.collect_all(x7, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.int, ...], builtins.str]" - case: fold_collect_all_wrong_type disable_cache: false main: | from returns.iterables import Fold from returns.io import IO from typing import Iterable acc = IO(()) x: Iterable[IO[float]] Fold.collect_all(x, acc) # E: Value of type variable "_FailableKind" of "collect_all" of "AbstractFold" cannot be "IO[Any]" [type-var] - case: fold_collect_all_maybe disable_cache: false main: | from returns.iterables import Fold from returns.maybe import Maybe from typing import Iterable acc = Maybe.from_value(()) x: Iterable[Maybe[float]] reveal_type(Fold.collect_all(x, acc)) # N: Revealed type is "returns.maybe.Maybe[builtins.tuple[builtins.float, ...]]" - case: fold_collect_all_result disable_cache: false main: | from returns.iterables import Fold from returns.result import Result from typing import Iterable, Tuple acc: Result[Tuple[()], str] x: Iterable[Result[float, str]] reveal_type(Fold.collect_all(x, acc)) # N: Revealed type is "returns.result.Result[builtins.tuple[builtins.float, ...], builtins.str]" - case: fold_collect_all_ioresult disable_cache: false main: | from returns.iterables import Fold from returns.io import IOResult from typing import Iterable, Tuple acc: IOResult[Tuple[()], str] x: Iterable[IOResult[float, str]] reveal_type(Fold.collect_all(x, acc)) # N: Revealed type is "returns.io.IOResult[builtins.tuple[builtins.float, ...], builtins.str]" - case: fold_collect_all_requires_context_result disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextResult from typing import Iterable, Tuple acc: RequiresContextResult[Tuple[()], str, bool] x: Iterable[RequiresContextResult[float, str, bool]] reveal_type(Fold.collect_all(x, acc)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.tuple[builtins.float, ...], builtins.str, builtins.bool]" - case: fold_collect_all_requires_context_ioresult disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextIOResult from typing import Iterable, Tuple acc: RequiresContextIOResult[Tuple[()], str, bool] x: Iterable[RequiresContextIOResult[float, str, bool]] reveal_type(Fold.collect_all(x, acc)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.tuple[builtins.float, ...], builtins.str, builtins.bool]" - case: fold_collect_all_requires_context_future_result disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextFutureResult from typing import Iterable, Tuple acc: RequiresContextFutureResult[Tuple[()], str, bool] x: Iterable[RequiresContextFutureResult[float, str, bool]] reveal_type(Fold.collect_all(x, acc)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.tuple[builtins.float, ...], builtins.str, builtins.bool]" - case: fold_collect_all_future_result disable_cache: false main: | from returns.iterables import Fold from returns.future import FutureResult from typing import Iterable, Tuple acc: FutureResult[Tuple[()], str] x: Iterable[FutureResult[float, str]] reveal_type(Fold.collect_all(x, acc)) # N: Revealed type is "returns.future.FutureResult[builtins.tuple[builtins.float, ...], builtins.str]" - case: fold_collect_all_custom_type disable_cache: false main: | from typing import TypeVar, Iterable, Tuple from returns.iterables import Fold from returns.interfaces.failable import Failable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind2['MyClass', V, N], Failable2[V, N]): ... acc: MyClass[Tuple[()], str] x: Iterable[MyClass[float, str]] reveal_type(Fold.collect_all(x, acc)) # N: Revealed type is "main.MyClass[builtins.tuple[builtins.float, ...], builtins.str]" returns-0.24.0/typesafety/test_iterables/test_fold/test_fold_loop.yml000066400000000000000000000157521472312074000262410ustar00rootroot00000000000000- case: fold_loop_iterable_types disable_cache: false main: | from typing import ( List, Iterable, Sequence, Iterator, Tuple, Generator, Callable, ) from returns.result import Result from returns.iterables import Fold x1: List[Result[int, str]] x2: Iterable[Result[int, str]] x3: Sequence[Result[int, str]] x4: Iterator[Result[int, str]] x5: Tuple[Result[int, str], ...] x6: Tuple[Result[int, str]] x7: Generator[Result[int, str], None, None] acc: Result[float, str] def div(first: int) -> Callable[[float], float]: return lambda second: first / second reveal_type(Fold.loop(x1, acc, div)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" reveal_type(Fold.loop(x2, acc, div)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" reveal_type(Fold.loop(x3, acc, div)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" reveal_type(Fold.loop(x4, acc, div)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" reveal_type(Fold.loop(x5, acc, div)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" reveal_type(Fold.loop(x6, acc, div)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" reveal_type(Fold.loop(x7, acc, div)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" - case: fold_loop_io disable_cache: false main: | from returns.iterables import Fold from returns.io import IO from typing import Iterable, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: IO[float] x: Iterable[IO[int]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.io.IO[builtins.float]" - case: fold_loop_maybe disable_cache: false main: | from returns.iterables import Fold from returns.maybe import Maybe from typing import Iterable, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: Maybe[float] x: Iterable[Maybe[int]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.maybe.Maybe[builtins.float]" - case: fold_loop_result disable_cache: false main: | from returns.iterables import Fold from returns.result import Result from typing import Iterable, Tuple, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: Result[float, str] x: Iterable[Result[int, str]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" - case: fold_loop_ioresult disable_cache: false main: | from returns.iterables import Fold from returns.io import IOResult from typing import Iterable, Tuple, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: IOResult[float, str] x: Iterable[IOResult[int, str]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.io.IOResult[builtins.float, builtins.str]" - case: fold_loop_requires_context disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContext from typing import Iterable, Tuple, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: RequiresContext[float, str] x: Iterable[RequiresContext[int, str]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.float, builtins.str]" - case: fold_loop_requires_context_result disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextResult from typing import Iterable, Tuple, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: RequiresContextResult[float, str, bool] x: Iterable[RequiresContextResult[int, str, bool]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.float, builtins.str, builtins.bool]" - case: fold_loop_requires_context_ioresult disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextIOResult from typing import Iterable, Tuple, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: RequiresContextIOResult[float, str, bool] x: Iterable[RequiresContextIOResult[int, str, bool]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.str, builtins.bool]" - case: fold_loop_requires_context_future_result disable_cache: false main: | from returns.iterables import Fold from returns.context import RequiresContextFutureResult from typing import Iterable, Tuple, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: RequiresContextFutureResult[float, str, bool] x: Iterable[RequiresContextFutureResult[int, str, bool]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.float, builtins.str, builtins.bool]" - case: fold_loop_future disable_cache: false main: | from returns.iterables import Fold from returns.future import Future from typing import Iterable, Tuple, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: Future[float] x: Iterable[Future[int]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.future.Future[builtins.float]" - case: fold_loop_future_result disable_cache: false main: | from returns.iterables import Fold from returns.future import FutureResult from typing import Iterable, Tuple, Callable def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: FutureResult[float, str] x: Iterable[FutureResult[int, str]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: fold_loop_custom_type disable_cache: false main: | from typing import TypeVar, Iterable, Tuple, Callable from returns.iterables import Fold from returns.interfaces.applicative import Applicative1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') class MyClass(SupportsKind1['MyClass', V], Applicative1[V]): ... def div(first: int) -> Callable[[float], float]: return lambda second: first / second acc: MyClass[float] x: Iterable[MyClass[int]] reveal_type(Fold.loop(x, acc, div)) # N: Revealed type is "main.MyClass[builtins.float]" returns-0.24.0/typesafety/test_maybe/000077500000000000000000000000001472312074000176305ustar00rootroot00000000000000returns-0.24.0/typesafety/test_maybe/test_do.yml000066400000000000000000000031271472312074000220170ustar00rootroot00000000000000- case: do_all_errors disable_cache: false main: | from returns.maybe import Maybe reveal_type(Maybe.do( # N: Revealed type is "returns.maybe.Maybe[Any]" first / second for first in Maybe.empty for second in Maybe.empty )) - case: do_types_missmatch disable_cache: false main: | from returns.maybe import Maybe from returns.result import Success Maybe.do( x + y for x in Success(1) # E: Invalid type supplied in do-notation: expected "returns.maybe.Maybe[Any]", got "returns.result.Success[builtins.int]" [misc] for y in Maybe.from_value(2.5) ) - case: do_types_converted disable_cache: false main: | from returns.maybe import Maybe from returns.result import Result from returns.converters import result_to_maybe a: Result[int, str] reveal_type(Maybe.do( # N: Revealed type is "returns.maybe.Maybe[builtins.float]" x + y for x in result_to_maybe(a) for y in Maybe.from_value(2.5) )) - case: do_with_if disable_cache: false main: | from returns.maybe import Maybe Maybe.do( # E: Using "if" conditions inside a generator is not allowed [misc] x + y for x in Maybe.from_value(1) for y in Maybe.from_value(2.5) if y > 5 ) - case: do_with_var disable_cache: false main: | from returns.maybe import Maybe x = ( x + y for x in Maybe.from_value(1) for y in Maybe.from_value(2.5) ) Maybe.do(x) # E: Literal generator expression is required, not a variable or function call [misc] returns-0.24.0/typesafety/test_maybe/test_maybe_decorator.yml000066400000000000000000000055331472312074000245570ustar00rootroot00000000000000- case: maybe_decorator_no_params disable_cache: false main: | from returns.maybe import maybe @maybe def test() -> int: return 1 reveal_type(test) # N: Revealed type is "def () -> returns.maybe.Maybe[builtins.int]" - case: maybe_decorator_no_params_optional disable_cache: false main: | from typing import Optional from returns.maybe import maybe @maybe def test() -> Optional[int]: return 1 reveal_type(test) # N: Revealed type is "def () -> returns.maybe.Maybe[builtins.int]" - case: maybe_composition_no_params disable_cache: false main: | from returns.maybe import maybe def test() -> int: return 1 reveal_type(maybe(test)) # N: Revealed type is "def () -> returns.maybe.Maybe[builtins.int]" - case: maybe_decorator_with_args disable_cache: false main: | from typing import Optional from returns.maybe import maybe @maybe def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: return 1 reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.maybe.Maybe[builtins.int]" - case: maybe_composition_with_args disable_cache: false main: | from typing import Optional from returns.maybe import maybe def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: return 1 reveal_type(maybe(test)) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.maybe.Maybe[builtins.int]" - case: maybe_decorator_with_args_kwargs disable_cache: false main: | from returns.maybe import maybe @maybe def test(*args, **kwargs) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> returns.maybe.Maybe[builtins.int]" - case: maybe_decorator_with_typed_args_kwargs disable_cache: false main: | from returns.maybe import maybe @maybe def test(*args: int, **kwargs: str) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: builtins.int, **kwargs: builtins.str) -> returns.maybe.Maybe[builtins.int]" - case: maybe_decorator_with_optional disable_cache: false main: | from typing import Optional from returns.maybe import maybe @maybe def test() -> Optional[int]: return 1 reveal_type(test) # N: Revealed type is "def () -> returns.maybe.Maybe[builtins.int]" - case: maybe_multiple_decorators disable_cache: false main: | from typing import Optional from returns.maybe import maybe from returns.io import impure @impure @maybe def test() -> Optional[int]: return 1 reveal_type(test) # N: Revealed type is "def () -> returns.io.IO[returns.maybe.Maybe[builtins.int]]" returns-0.24.0/typesafety/test_maybe/test_maybe_type.yml000066400000000000000000000076621472312074000235630ustar00rootroot00000000000000- case: maybe_from_value1 disable_cache: false main: | from returns.maybe import Maybe value: int reveal_type(Maybe.from_value(value)) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" - case: maybe_from_value2 disable_cache: false main: | from typing import Optional from returns.maybe import Maybe value: Optional[int] reveal_type(Maybe.from_value(value)) # N: Revealed type is "returns.maybe.Maybe[Union[builtins.int, None]]" - case: maybe_from_optional1 disable_cache: false main: | from typing import Optional from returns.maybe import Maybe value: int reveal_type(Maybe.from_optional(value)) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" - case: maybe_from_optional2 disable_cache: false main: | from typing import Optional from returns.maybe import Maybe value: Optional[int] reveal_type(Maybe.from_optional(value)) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" - case: maybe_map_regular disable_cache: false main: | from returns.maybe import Maybe result = Maybe.from_value(1).map(lambda i: i / i) reveal_type(result) # N: Revealed type is "returns.maybe.Maybe[builtins.float]" - case: maybe_map_optional1 disable_cache: false main: | from returns.maybe import Maybe result = Maybe.from_value({'a': 'b'}).map(lambda d: d.get('a', None)) reveal_type(result) # N: Revealed type is "returns.maybe.Maybe[Union[builtins.str, None]]" - case: maybe_map_optional2 disable_cache: false main: | from returns.maybe import Maybe result = Maybe.from_value(1).bind(lambda d: Maybe.from_value(str(d))) reveal_type(result) # N: Revealed type is "returns.maybe.Maybe[builtins.str]" - case: maybe_apply disable_cache: false main: | from returns.maybe import Maybe result = Maybe.from_value(1).apply(Maybe.from_value(float)) reveal_type(result) # N: Revealed type is "returns.maybe.Maybe[builtins.float]" - case: maybe_bind1 disable_cache: false main: | from returns.maybe import Maybe def test(arg: int) -> Maybe[str]: ... reveal_type(Maybe.from_value(1).bind(test)) # N: Revealed type is "returns.maybe.Maybe[builtins.str]" - case: maybe_bind2 disable_cache: false main: | from returns.maybe import Maybe from typing import Optional def test(arg: int) -> Maybe[Optional[str]]: ... reveal_type(Maybe.from_value(1).bind(test)) # N: Revealed type is "returns.maybe.Maybe[Union[builtins.str, None]]" - case: maybe_bind_optional disable_cache: false main: | from returns.maybe import Maybe from typing import Optional def test(arg: int) -> Optional[str]: ... reveal_type(Maybe.from_value(1).bind_optional(test)) # N: Revealed type is "returns.maybe.Maybe[builtins.str]" - case: maybe_value_or disable_cache: false main: | from returns.maybe import Maybe result = Maybe.from_value(1).value_or(None) reveal_type(result) # N: Revealed type is "Union[builtins.int, None]" - case: maybe_or_else1 disable_cache: false main: | from returns.maybe import Maybe result = Maybe.from_value(1).or_else_call(lambda: 2) reveal_type(result) # N: Revealed type is "builtins.int" - case: maybe_or_else2 disable_cache: false main: | from returns.maybe import Maybe def fallback() -> str: ... result = Maybe.from_value(1).or_else_call(fallback) reveal_type(result) # N: Revealed type is "Union[builtins.int, builtins.str]" - case: maybe_or_else3 disable_cache: false main: | from returns.maybe import Maybe from typing_extensions import Never def fallback() -> Never: ... result = Maybe.from_value(1).or_else_call(fallback) reveal_type(result) # N: Revealed type is "builtins.int" - case: maybe_unwrap disable_cache: false main: | from returns.maybe import Some reveal_type(Some(1).unwrap()) # N: Revealed type is "builtins.int" returns-0.24.0/typesafety/test_maybe/test_maybe_type_cast.yml000066400000000000000000000014441472312074000245650ustar00rootroot00000000000000- case: maybe_correct_cast disable_cache: false main: | from returns.maybe import Maybe first: Maybe[ValueError] second: Maybe[Exception] = first reveal_type(second) # N: Revealed type is "returns.maybe.Maybe[builtins.Exception]" - case: maybe_getattr disable_cache: false main: | from returns.maybe import Maybe x: Maybe[int] x.missing # E: "Maybe[int]" has no attribute "missing" [attr-defined] - case: maybe_some_constructor disable_cache: false main: | from returns.maybe import Some reveal_type(Some(1)) # N: Revealed type is "returns.maybe.Some[builtins.int]" - case: maybe_nothing_const disable_cache: false main: | from returns.maybe import Nothing reveal_type(Nothing) # N: Revealed type is "returns.maybe.Maybe[Never]" returns-0.24.0/typesafety/test_methods/000077500000000000000000000000001472312074000201765ustar00rootroot00000000000000returns-0.24.0/typesafety/test_methods/test_cond.yml000066400000000000000000000052551472312074000227120ustar00rootroot00000000000000- case: cond_result disable_cache: false main: | from returns.methods import cond from returns.result import Result reveal_type(cond(Result, True, 42, '42')) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: cond_ioresult disable_cache: false main: | from returns.io import IOResult from returns.methods import cond reveal_type(cond(IOResult, False, 'success', 'failure')) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.str]" - case: cond_future_result disable_cache: false main: | from returns.future import FutureResult from returns.methods import cond reveal_type(cond(FutureResult, False, True, False)) # N: Revealed type is "returns.future.FutureResult[builtins.bool, builtins.bool]" - case: cond_reader_result disable_cache: false main: | from returns.methods import cond from returns.context import ReaderResult reveal_type(cond(ReaderResult, True, 1.0, False)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.float, builtins.bool, Any]" - case: cond_reader_ioresult disable_cache: false main: | from returns.methods import cond from returns.context import ReaderIOResult reveal_type(cond(ReaderIOResult, True, 1.0, False)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.bool, Any]" - case: cond_reader_future_result disable_cache: false main: | from returns.methods import cond from returns.context import ReaderFutureResult reveal_type(cond(ReaderFutureResult, True, 1, 1.0)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.float, Any]" - case: cond_maybe disable_cache: false main: | from returns.methods import cond from returns.maybe import Maybe reveal_type(cond(Maybe, True, 'test')) # N: Revealed type is "returns.maybe.Maybe[builtins.str]" - case: cond_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.result import ResultLike2 from returns.methods import cond from returns.primitives.hkt import SupportsKind2 ValueType = TypeVar('ValueType') ErrorType = TypeVar('ErrorType') class MyOwn( SupportsKind2['MyOwn', ValueType, ErrorType], ResultLike2[ValueType, ErrorType] ): ... reveal_type(cond(MyOwn, True, 'test', 1.0)) out: | main:16: note: Revealed type is "main.MyOwn[builtins.str, builtins.float]" main:16: error: Only concrete class can be given where "Type[MyOwn[Any, Any]]" is expected [type-abstract] returns-0.24.0/typesafety/test_methods/test_partition.yml000066400000000000000000000020671472312074000237760ustar00rootroot00000000000000- case: partition_result disable_cache: false main: | from typing import List from returns.result import Success, Failure, Result from returns.methods import partition x: List[Result[int, str]] reveal_type(partition(x)) # N: Revealed type is "Tuple[builtins.list[builtins.int], builtins.list[builtins.str]]" - case: partition_io_results disable_cache: false main: | from typing import Tuple from returns.result import Success, Failure from returns.methods import partition from returns.io import IO, IOResult, IOSuccess x: Tuple[IOResult[int, str], IOResult[int, str]] reveal_type(partition(x)) # N: Revealed type is "Tuple[builtins.list[returns.io.IO[builtins.int]], builtins.list[returns.io.IO[builtins.str]]]" - case: partition_maybe disable_cache: false main: | from typing import List, Tuple from returns.maybe import Maybe from returns.methods import partition x: List[Maybe[int]] reveal_type(partition(x)) # N: Revealed type is "Tuple[builtins.list[builtins.int], builtins.list[None]]" returns-0.24.0/typesafety/test_methods/test_unwrap_or_failure.yml000066400000000000000000000020441472312074000255030ustar00rootroot00000000000000- case: unwrap_or_failure_result disable_cache: false main: | from returns.methods import unwrap_or_failure from returns.result import Result x: Result[int, str] reveal_type(unwrap_or_failure(x)) # N: Revealed type is "Union[builtins.int, builtins.str]" - case: unwrap_or_failure_ioresult disable_cache: false main: | from returns.methods import unwrap_or_failure from returns.io import IOResult x: IOResult[int, str] reveal_type(unwrap_or_failure(x)) # N: Revealed type is "Union[returns.io.IO[builtins.int], returns.io.IO[builtins.str]]" - case: unwrap_or_failure_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.unwrappable import Unwrappable from returns.methods import unwrap_or_failure ValueType = TypeVar('ValueType') ErrorType = TypeVar('ErrorType') class MyOwn(Unwrappable[ValueType, ErrorType]): ... x: MyOwn[int, str] reveal_type(unwrap_or_failure(x)) # N: Revealed type is "Union[builtins.int, builtins.str]" returns-0.24.0/typesafety/test_pipeline/000077500000000000000000000000001472312074000203405ustar00rootroot00000000000000returns-0.24.0/typesafety/test_pipeline/test_flow/000077500000000000000000000000001472312074000223465ustar00rootroot00000000000000returns-0.24.0/typesafety/test_pipeline/test_flow/test_flow_args.yml000066400000000000000000000021711472312074000261140ustar00rootroot00000000000000- case: flow_zero_args disable_cache: false main: | from returns.pipeline import flow reveal_type(flow()) out: | main:3: error: Missing positional argument "instance" in call to "flow" [call-arg] main:3: note: Revealed type is "Never" - case: flow_one_arg disable_cache: false main: | from returns.pipeline import flow reveal_type(flow(1)) out: | main:3: error: Too few arguments for "flow" [misc] main:3: note: Revealed type is "Never" - case: flow_star_args disable_cache: false main: | from returns.pipeline import flow from returns.functions import identity reveal_type( flow( # N: Revealed type is "builtins.int" 1, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, ) ) returns-0.24.0/typesafety/test_pipeline/test_flow/test_flow_base.yml000066400000000000000000000111501472312074000260670ustar00rootroot00000000000000- case: flow_function disable_cache: false main: | from returns.pipeline import flow def inc(arg: int) -> int: ... reveal_type(flow(1, inc, inc, inc, inc, inc)) # N: Revealed type is "builtins.int" - case: flow_function_with_overloads1 disable_cache: false main: | from returns.pipeline import flow def convert(arg: str) -> float: ... reveal_type(flow('1.0', convert, int, bool)) # N: Revealed type is "builtins.bool" - case: flow_function_with_overloads2 disable_cache: false main: | from returns.pipeline import flow from returns.functions import identity def convert(arg: str) -> float: ... reveal_type(flow('1.0', identity, convert, identity, int, identity, bool, identity)) # N: Revealed type is "builtins.bool" - case: flow_with_object1 disable_cache: false main: | from returns.pipeline import flow class Test(object): def __call__(self, arg: int) -> float: ... reveal_type(flow(1, Test())) # N: Revealed type is "builtins.float" - case: flow_with_object2 disable_cache: false main: | from returns.pipeline import flow class Test(object): def __init__(self, arg: int) -> None: ... reveal_type(flow(1, Test)) # N: Revealed type is "main.Test" - case: flow_with_lambdas disable_cache: false main: | from returns.pipeline import flow reveal_type( flow( # N: Revealed type is "builtins.float" 1, lambda x: x, str, lambda y: y.split(' '), lambda z: ''.join(z), lambda x: x, lambda f: float(f), ) ) - case: flow_with_methods disable_cache: false main: | from returns.pipeline import flow class Test(object): def method(self, arg: int) -> int: ... @classmethod def method_class(cls, arg: int) -> int: ... @staticmethod def method_static(arg: int) -> int: ... reveal_type( flow( # N: Revealed type is "builtins.int" 1, Test().method, Test.method_class, Test.method_static, ) ) - case: flow_with_any disable_cache: false main: | from returns.pipeline import flow from typing import Any def test(arg: int) -> Any: ... reveal_type(flow(1, test)) # N: Revealed type is "Any" - case: flow_with_containers disable_cache: false main: | from returns.pipeline import flow from returns.pointfree import bind, map_ from returns.result import Result from returns.functions import identity x: Result[int, str] def bound(arg: int) -> Result[float, str]: ... def mappable(arg: float) -> bool: ... reveal_type(flow(x, bind(bound), identity, map_(mappable))) # N: Revealed type is "returns.result.Result[builtins.bool, builtins.str]" - case: bind_result_and_flow1 disable_cache: false main: | from returns.result import Result from returns.io import IOResult from returns.functions import identity from returns.pointfree import bind_result from returns.pipeline import flow def test(arg: int) -> Result[float, str]: ... def second(arg: float) -> Result[bool, str]: ... r: IOResult[int, str] reveal_type(flow(r, identity, bind_result(test), bind_result(second))) # N: Revealed type is "returns.io.IOResult[builtins.bool, builtins.str]" - case: bind_result_and_flow2 disable_cache: false main: | from returns.result import Result from returns.io import IOResult from returns.functions import identity from returns.pointfree import bind_result from returns.pipeline import flow def test(arg: int) -> Result[float, str]: ... def second(arg: float) -> Result[bool, str]: ... r: IOResult[int, str] reveal_type(flow(r, bind_result(test), identity, bind_result(second))) # N: Revealed type is "returns.io.IOResult[builtins.bool, builtins.str]" # Regression to # https://github.com/dry-python/returns/issues/461 - case: bind_result_and_flow3 disable_cache: false main: | from returns.result import Result from returns.io import IOResult from returns.functions import identity from returns.pointfree import bind_result from returns.pipeline import flow def test(arg: int) -> Result[float, str]: ... def second(arg: float) -> Result[bool, str]: ... r: IOResult[int, str] reveal_type(flow(r, bind_result(test), bind_result(second))) # N: Revealed type is "returns.io.IOResult[builtins.bool, builtins.str]" returns-0.24.0/typesafety/test_pipeline/test_flow/test_flow_curry.yml000066400000000000000000000012401472312074000263200ustar00rootroot00000000000000- case: flow_function_with_curried1 disable_cache: false main: | from returns.pipeline import flow from returns.curry import curry from returns.functions import identity @curry def curried(a: int, b: int) -> float: ... reveal_type(flow(1, curried)(1)) # N: Revealed type is "builtins.float" - case: flow_function_with_curried2 disable_cache: false main: | from returns.pipeline import flow from returns.curry import curry from returns.functions import identity @curry def curried(a: int, b: int) -> float: ... reveal_type(flow(1, curried, identity)(1)) # N: Revealed type is "builtins.float" returns-0.24.0/typesafety/test_pipeline/test_flow/test_flow_errors.yml000066400000000000000000000033201472312074000264710ustar00rootroot00000000000000- case: flow_function_error disable_cache: false main: | from returns.pipeline import flow def convert(arg: str) -> float: ... reveal_type(flow('1', int, convert)) out: | main:6: error: Argument 1 to "convert" has incompatible type "int"; expected "str" [arg-type] main:6: note: Revealed type is "builtins.float" - case: flow_wrong_steps_error disable_cache: false main: | from returns.pipeline import flow reveal_type(flow('a', [], int)) out: | main:3: error: "List[Never]" not callable [operator] main:3: note: Revealed type is "builtins.int" - case: flow_function_first_arg_error disable_cache: false main: | from returns.pipeline import flow def convert(arg: str) -> float: ... reveal_type(flow(1, convert)) out: | main:6: error: Argument 1 to "convert" has incompatible type "int"; expected "str" [arg-type] main:6: note: Revealed type is "builtins.float" - case: flow_function_without_args_error disable_cache: false main: | from returns.pipeline import flow def convert() -> float: ... reveal_type(flow(1, convert)) out: | main:6: error: Too many arguments for "convert" [call-arg] main:6: note: Revealed type is "builtins.float" - case: flow_function_with_too_many_args_error disable_cache: false main: | from returns.pipeline import flow def convert(first: str, other: int) -> float: ... reveal_type(flow(1, convert)) out: | main:6: error: Missing positional argument "other" in call to "convert" [call-arg] main:6: error: Argument 1 to "convert" has incompatible type "int"; expected "str" [arg-type] main:6: note: Revealed type is "builtins.float" returns-0.24.0/typesafety/test_pipeline/test_flow/test_flow_generics.yml000066400000000000000000000014011472312074000267520ustar00rootroot00000000000000- case: flow_generic_function disable_cache: false main: | from returns.pipeline import flow from returns.functions import identity from typing import TypeVar _NewValueType = TypeVar('_NewValueType') def test(arg: _NewValueType) -> _NewValueType: x = flow(arg, identity) reveal_type(x) # N: Revealed type is "_NewValueType`-1" return x - case: flow_generic_argument disable_cache: false main: | from returns.pipeline import flow from returns.functions import identity from typing import TypeVar _NewValueType = TypeVar('_NewValueType') def test(arg: _NewValueType) -> _NewValueType: x = flow(arg, str) reveal_type(x) # N: Revealed type is "builtins.str" return arg returns-0.24.0/typesafety/test_pipeline/test_is_successful.yml000066400000000000000000000035471472312074000250050ustar00rootroot00000000000000- case: result_is_successful disable_cache: false main: | from returns.pipeline import is_successful from returns.result import Result def returns_result() -> Result[int, str]: ... reveal_type(is_successful(returns_result())) # N: Revealed type is "builtins.bool" - case: ioresult_is_successful disable_cache: false main: | from returns.pipeline import is_successful from returns.io import IOResult def returns_ioresult() -> IOResult[int, str]: ... reveal_type(is_successful(returns_ioresult())) # N: Revealed type is "builtins.bool" - case: maybe_is_successful disable_cache: false main: | from returns.pipeline import is_successful from returns.maybe import Maybe reveal_type(is_successful(Maybe.from_value(1))) # N: Revealed type is "builtins.bool" - case: custom_type_is_successful disable_cache: false main: | from returns.pipeline import is_successful from returns.primitives.hkt import Kind2 from returns.primitives.exceptions import UnwrapFailedError from returns.interfaces.unwrappable import Unwrappable from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyOwn( Kind2['MyOwn', T, N], Unwrappable[T, N], ): def __init__(self, value: T, error: N) -> None: self.value = value self.error = error def unwrap(self) -> T: if self.error: raise UnwrapFailedError(self) return self.value def failure(self) -> N: if self.value: raise UnwrapFailedError(self) return self.error x: MyOwn[int, str] reveal_type(x.unwrap()) # N: Revealed type is "builtins.int" reveal_type(x.failure()) # N: Revealed type is "builtins.str" reveal_type(is_successful(x)) # N: Revealed type is "builtins.bool" returns-0.24.0/typesafety/test_pipeline/test_managed/000077500000000000000000000000001472312074000227735ustar00rootroot00000000000000returns-0.24.0/typesafety/test_pipeline/test_managed/test_managed_errors.yml000066400000000000000000000033211472312074000275440ustar00rootroot00000000000000- case: managed_with_non_none_release disable_cache: false main: | from returns.io import IOResult from returns.pipeline import managed from returns.result import Result def use(acquired_value: int) -> IOResult[float, str]: ... def release( acquired_value: int, use_value: Result[float, str], ) -> IOResult[float, str]: ... x: IOResult[int, str] managed(use, release)(x) # E: Argument 2 to "managed" has incompatible type "Callable[[int, Result[float, str]], IOResult[float, str]]"; expected "Callable[[int, Result[float, str]], KindN[IOResult[Any, Any], None, str, Never]]" [arg-type] - case: managed_with_non_matching_use_release_types disable_cache: false main: | from returns.io import IOResult from returns.pipeline import managed from returns.result import Result def use(acquired_value: int) -> IOResult[float, str]: ... def release( acquired_value: int, use_value: Result[str, str], ) -> IOResult[None, str]: ... x: IOResult[int, str] managed(use, release)(x) # E: Cannot infer type argument 3 of "managed" [misc] - case: managed_with_wrong_container_input disable_cache: false main: | from returns.io import IOResult from returns.pipeline import managed from returns.result import Result def use(acquired_value: int) -> IOResult[float, str]: ... def release( acquired_value: int, use_value: Result[float, str], ) -> IOResult[None, str]: ... x: IOResult[str, str] managed(use, release)(x) # E: Argument 1 has incompatible type "IOResult[str, str]"; expected "KindN[IOResult[Any, Any], int, str, Never]" [arg-type] returns-0.24.0/typesafety/test_pipeline/test_managed/test_managed_types.yml000066400000000000000000000076071472312074000274070ustar00rootroot00000000000000- case: managed_with_ioresult disable_cache: false main: | from returns.io import IOResult from returns.pipeline import managed from returns.result import Result def use(acquired_value: int) -> IOResult[float, str]: ... def release( acquired_value: int, use_value: Result[float, str], ) -> IOResult[None, str]: ... x: IOResult[int, str] reveal_type(managed(use, release)(x)) # N: Revealed type is "returns.io.IOResult[builtins.float, builtins.str]" - case: managed_with_future_result disable_cache: false main: | from returns.future import FutureResult from returns.pipeline import managed from returns.result import Result def use(acquired_value: int) -> FutureResult[float, str]: ... def release( acquired_value: int, use_value: Result[float, str], ) -> FutureResult[None, str]: ... x: FutureResult[int, str] reveal_type(managed(use, release)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.float, builtins.str]" - case: managed_with_reader_ioresult disable_cache: false main: | from returns.context import ReaderIOResult from returns.pipeline import managed from returns.result import Result def use(acquired_value: int) -> ReaderIOResult[float, str, bool]: ... def release( acquired_value: int, use_value: Result[float, str], ) -> ReaderIOResult[None, str, bool]: ... x: ReaderIOResult[int, str, bool] reveal_type(managed(use, release)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.str, builtins.bool]" - case: managed_with_reader_future_result disable_cache: false main: | from returns.context import ReaderFutureResult from returns.pipeline import managed from returns.result import Result def use(acquired_value: int) -> ReaderFutureResult[float, str, bool]: ... def release( acquired_value: int, use_value: Result[float, str], ) -> ReaderFutureResult[None, str, bool]: ... x: ReaderFutureResult[int, str, bool] reveal_type(managed(use, release)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.float, builtins.str, builtins.bool]" - case: managed_custom_type disable_cache: false main: | from typing import Callable, TypeVar, Any from returns.interfaces.specific.ioresult import IOResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.io import IO, IOResult from returns.pipeline import managed from returns.result import Result _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') # Result related: _ErrorType = TypeVar('_ErrorType') _NewErrorType = TypeVar('_NewErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultBased2[_ValueType, _ErrorType], ): def bind_ioresult( self, function: Callable[ [_ValueType], IOResult[_NewValueType, _ErrorType], ], ) -> MyClass[_NewValueType, _ErrorType]: ... @classmethod def from_ioresult( self, inner_value: IOResult[_NewValueType, _NewErrorType], ) -> MyClass[_NewValueType, _NewErrorType]: ... @classmethod def from_failed_io( cls, inner_value: IO[_NewErrorType], ) -> MyClass[Any, _NewErrorType]: ... def use(acquired_value: int) -> MyClass[float, str]: ... def release( acquired_value: int, use_value: Result[float, str], ) -> MyClass[None, str]: ... x: MyClass[int, str] reveal_type(managed(use, release)(x)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" returns-0.24.0/typesafety/test_pipeline/test_pipe/000077500000000000000000000000001472312074000223345ustar00rootroot00000000000000returns-0.24.0/typesafety/test_pipeline/test_pipe/test_pipe_base.yml000066400000000000000000000100331472312074000260420ustar00rootroot00000000000000- case: pipe_function1 disable_cache: false main: | from returns.pipeline import pipe def convert(arg: str) -> float: ... predefined = pipe(convert, int, bool) reveal_type(predefined('1.0')) # N: Revealed type is "builtins.bool" - case: pipe_function2 disable_cache: false main: | from returns.pipeline import pipe from returns.functions import identity def convert(arg: str) -> float: ... predefined = pipe(identity, convert, identity, identity, int, identity, bool, identity) reveal_type(predefined('1.0')) # N: Revealed type is "builtins.bool" - case: pipe_callable_instances disable_cache: false main: | from returns.pipeline import pipe def convert(arg: str) -> float: ... class Test(object): def __init__(self, arg: int) -> None: ... def __call__(self, arg: float) -> bool: ... def with_instance(arg: Test) -> str: ... predefined = pipe(Test, with_instance, convert, Test(1)) reveal_type(predefined(1)) # N: Revealed type is "builtins.bool" - case: pipe_star_args disable_cache: false main: | from returns.pipeline import pipe from returns.functions import identity reveal_type( pipe( # N: Revealed type is "builtins.int" identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, identity, # very long amount of args! )(1) ) - case: pipe_with_lambdas disable_cache: false main: | from returns.pipeline import pipe reveal_type( pipe( # N: Revealed type is "builtins.float" lambda x: x, str, lambda y: y.split(' '), lambda z: ''.join(z), lambda x: x, lambda f: float(f), )(1) ) - case: pipe_with_methods disable_cache: false main: | from returns.pipeline import pipe class Test(object): def method(self, arg: int) -> int: ... @classmethod def method_class(cls, arg: int) -> int: ... @staticmethod def method_static(arg: int) -> int: ... reveal_type( pipe( # N: Revealed type is "builtins.int" Test().method, Test.method_class, Test.method_static, )(1) ) - case: pipe_with_any disable_cache: false main: | from returns.pipeline import pipe from typing import Any def test(arg: int) -> Any: ... reveal_type(pipe(test)(1)) # N: Revealed type is "Any" - case: pipe_with_overloads disable_cache: false main: | from returns.pipeline import pipe x = pipe(int, str, int) reveal_type(x(1.0)) # N: Revealed type is "builtins.int" reveal_type(x('a')) # N: Revealed type is "builtins.int" - case: pipe_with_containers disable_cache: false main: | from returns.pipeline import pipe from returns.pointfree import bind, map_ from returns.result import Result from returns.functions import identity x: Result[int, str] def bound(arg: int) -> Result[float, str]: ... def mappable(arg: float) -> bool: ... reveal_type(pipe(bind(bound), identity, map_(mappable))(x)) # N: Revealed type is "returns.result.Result[builtins.bool, builtins.str]" - case: pipe_with_containers2 disable_cache: false main: | from returns.result import Result from returns.io import IOResult from returns.functions import identity from returns.pointfree import bind_result from returns.pipeline import pipe def test(arg: int) -> Result[float, str]: ... def second(arg: float) -> Result[bool, str]: ... r: IOResult[int, str] reveal_type(pipe(bind_result(test), bind_result(second))(r)) # N: Revealed type is "returns.io.IOResult[builtins.bool, builtins.str]" returns-0.24.0/typesafety/test_pipeline/test_pipe/test_pipe_callable_protocol.yml000066400000000000000000000024161472312074000306160ustar00rootroot00000000000000- case: pipe_regular_callable disable_cache: false main: | from typing import Callable from returns.pipeline import pipe def convert(arg: str) -> float: ... def callback(f: Callable[[str], bool]) -> bool: return f('a') predefined = pipe(convert, int, bool) reveal_type(callback(predefined)) # N: Revealed type is "builtins.bool" - case: pipe_generic_callable1 disable_cache: false main: | from typing import Callable, TypeVar from returns.pipeline import pipe T = TypeVar('T') R = TypeVar('R') def callback(f: Callable[[T], R], i: T) -> R: return f(i) def first(a: int) -> float: ... def second(a: float) -> str: ... predefined = pipe(first, second) reveal_type(callback(predefined, 1)) # N: Revealed type is "builtins.str" - case: pipe_generic_callable2 disable_cache: false main: | from typing import Callable, TypeVar from returns.pipeline import pipe T = TypeVar('T') R = TypeVar('R') def callback(f: Callable[[T], R]) -> R: ... def first(a: int) -> float: ... def second(a: float) -> str: ... predefined = pipe(first, second) reveal_type(callback(predefined)) # N: Revealed type is "builtins.str" returns-0.24.0/typesafety/test_pipeline/test_pipe/test_pipe_curry.yml000066400000000000000000000005231472312074000262770ustar00rootroot00000000000000- case: pipe_function_with_curried disable_cache: false main: | from returns.pipeline import pipe from returns.curry import curry from returns.functions import identity @curry def curried(a: int, b: int) -> float: ... reveal_type(pipe(curried, identity)(1)(2)) # N: Revealed type is "builtins.float" returns-0.24.0/typesafety/test_pipeline/test_pipe/test_pipe_errors.yml000066400000000000000000000043161472312074000264530ustar00rootroot00000000000000- case: pipe_function_error disable_cache: false main: | from returns.pipeline import pipe def convert(arg: str) -> float: ... reveal_type(pipe(int, convert)('a')) out: | main:6: error: Argument 1 to "convert" has incompatible type "int"; expected "str" [arg-type] main:6: note: Revealed type is "builtins.float" - case: pipe_wrong_steps_error disable_cache: false main: | from returns.pipeline import pipe pipe([], int)('a') out: | main:3: error: "List[Never]" not callable [operator] main:3: error: "Never" not callable [misc] main:3: error: Argument 1 to "__call__" of "_Pipe" has incompatible type "str"; expected "Never" [arg-type] - case: pipe_function_without_steps disable_cache: false main: | from returns.pipeline import pipe pipe() out: | main:3: error: Too few arguments for "pipe" [call-arg] - case: pipe_function_first_arg_error disable_cache: false main: | from returns.pipeline import pipe def convert(arg: str) -> float: ... reveal_type(pipe(convert)(1)) out: | main:6: error: Argument 1 to "convert" has incompatible type "int"; expected "str" [arg-type] main:6: note: Revealed type is "builtins.float" main:6: error: Argument 1 to "__call__" of "_Pipe" has incompatible type "int"; expected "str" [arg-type] - case: pipe_function_without_args_error disable_cache: false main: | from returns.pipeline import pipe def convert() -> float: ... reveal_type(pipe(convert)(1)) out: | main:6: error: Too many arguments for "convert" [call-arg] main:6: note: Revealed type is "builtins.float" - case: pipe_function_with_too_many_args_error disable_cache: false main: | from returns.pipeline import pipe def convert(first: str, other: int) -> float: ... reveal_type(pipe(convert)(1)) out: | main:6: error: Missing positional argument "other" in call to "convert" [call-arg] main:6: error: Argument 1 to "convert" has incompatible type "int"; expected "str" [arg-type] main:6: note: Revealed type is "builtins.float" main:6: error: Argument 1 to "__call__" of "_Pipe" has incompatible type "int"; expected "str" [arg-type] returns-0.24.0/typesafety/test_pipeline/test_pipe/test_pipe_generic.yml000066400000000000000000000014131472312074000265460ustar00rootroot00000000000000- case: pipe_generic_function disable_cache: false main: | from returns.pipeline import pipe from returns.functions import identity from typing import TypeVar _NewValueType = TypeVar('_NewValueType') def test(arg: _NewValueType) -> _NewValueType: x = pipe(identity)(arg) reveal_type(x) # N: Revealed type is "_NewValueType`-1" return x - case: pipe_generic_argument disable_cache: false main: | from returns.pipeline import pipe from returns.functions import identity from typing import TypeVar _NewValueType = TypeVar('_NewValueType') def test(arg: _NewValueType) -> _NewValueType: x = pipe(identity, str)(arg) reveal_type(x) # N: Revealed type is "builtins.str" return arg returns-0.24.0/typesafety/test_pointfree/000077500000000000000000000000001472312074000205265ustar00rootroot00000000000000returns-0.24.0/typesafety/test_pointfree/test_alt.yml000066400000000000000000000062001472312074000230660ustar00rootroot00000000000000- case: alt_and_flow disable_cache: false main: | from returns.result import Result from returns.pointfree import alt from returns.pipeline import flow def test(arg: int) -> float: ... def stringify(arg: float) -> str: ... r: Result[str, int] reveal_type(flow(r, alt(test), alt(stringify))) # N: Revealed type is "returns.result.Result[builtins.str, builtins.str]" - case: alt_result disable_cache: false main: | from returns.pointfree import alt from returns.result import Result def test(arg: float) -> int: ... x: Result[str, float] reveal_type(alt(test)(x)) # N: Revealed type is "returns.result.Result[builtins.str, builtins.int]" - case: alt_ioresult disable_cache: false main: | from returns.pointfree import alt from returns.io import IOResult def test(arg: float) -> int: ... x: IOResult[str, float] reveal_type(alt(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.int]" - case: alt_requires_context_result disable_cache: false main: | from returns.pointfree import alt from returns.context import RequiresContextResult def test(arg: float) -> int: ... x: RequiresContextResult[str, float, bool] reveal_type(alt(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.str, builtins.int, builtins.bool]" - case: alt_requires_context_ioresult disable_cache: false main: | from returns.pointfree import alt from returns.context import RequiresContextIOResult def test(arg: float) -> int: ... x: RequiresContextIOResult[str, float, bool] reveal_type(alt(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.str, builtins.int, builtins.bool]" - case: alt_requires_context_future_result disable_cache: false main: | from returns.pointfree import alt from returns.context import RequiresContextFutureResult def test(arg: float) -> int: ... x: RequiresContextFutureResult[str, float, bool] reveal_type(alt(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.str, builtins.int, builtins.bool]" - case: alt_future_result disable_cache: false main: | from returns.pointfree import alt from returns.future import FutureResult def test(arg: float) -> int: ... x: FutureResult[str, float] reveal_type(alt(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.str, builtins.int]" - case: alt_custom_type disable_cache: false main: | from returns.pointfree import alt from returns.primitives.hkt import SupportsKind2 from returns.interfaces.altable import Altable2 from typing import TypeVar T = TypeVar('T') E = TypeVar('E') class MyOwn( SupportsKind2['MyOwn', T, E], Altable2[T, E], ): ... def test(arg: float) -> int: ... x: MyOwn[str, float] reveal_type(alt(test)(x)) # N: Revealed type is "main.MyOwn[builtins.str, builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_apply.yml000066400000000000000000000133111472312074000234340ustar00rootroot00000000000000- case: apply_result_reverse disable_cache: false main: | from returns.pointfree import apply from returns.result import Result from returns.pipeline import flow from typing import Callable test: Result[Callable[[float], int], str] x: Result[float, str] reveal_type(flow(test, x.apply)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: apply_wrong_extra_types disable_cache: false main: | from returns.pointfree import apply from returns.context import RequiresContextFutureResult from typing import Callable test: RequiresContextFutureResult[Callable[[float], int], str, str] x: RequiresContextFutureResult[float, float, float] apply(test)(x) # E: Argument 1 has incompatible type "RequiresContextFutureResult[float, float, float]"; expected "KindN[RequiresContextFutureResult[Any, Any, Any], float, str, str]" [arg-type] - case: apply_wrong_value_type disable_cache: false main: | from returns.pointfree import apply from returns.context import RequiresContextFutureResult from typing import Callable test: RequiresContextFutureResult[Callable[[float], int], str, str] x: RequiresContextFutureResult[str, str, str] apply(test)(x) # E: Argument 1 has incompatible type "RequiresContextFutureResult[str, str, str]"; expected "KindN[RequiresContextFutureResult[Any, Any, Any], float, str, str]" [arg-type] - case: apply_io disable_cache: false main: | from returns.pointfree import apply from returns.io import IO def test(arg: float) -> int: ... x: IO[float] reveal_type(apply(IO(test))(x)) # N: Revealed type is "returns.io.IO[builtins.int]" - case: apply_maybe disable_cache: false main: | from returns.pointfree import apply from returns.maybe import Maybe def test(arg: float) -> int: ... x: Maybe[float] reveal_type(apply(Maybe.from_value(test))(x)) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" - case: apply_result disable_cache: false main: | from returns.pointfree import apply from returns.result import Result from typing import Callable test: Result[Callable[[float], int], str] x: Result[float, str] reveal_type(apply(test)(x)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: apply_ioresult disable_cache: false main: | from returns.pointfree import apply from returns.io import IOResult from typing import Callable test: IOResult[Callable[[float], int], str] x: IOResult[float, str] reveal_type(apply(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: apply_requires_context disable_cache: false main: | from returns.pointfree import apply from returns.context import RequiresContext from typing import Callable test: RequiresContext[Callable[[float], int], bool] x: RequiresContext[float, bool] reveal_type(apply(test)(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.int, builtins.bool]" - case: apply_requires_context_result disable_cache: false main: | from returns.pointfree import apply from returns.context import RequiresContextResult from typing import Callable test: RequiresContextResult[Callable[[float], int], str, bool] x: RequiresContextResult[float, str, bool] reveal_type(apply(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.str, builtins.bool]" - case: apply_requires_context_ioresult disable_cache: false main: | from returns.pointfree import apply from returns.context import RequiresContextIOResult from typing import Callable test: RequiresContextIOResult[Callable[[float], int], str, bool] x: RequiresContextIOResult[float, str, bool] reveal_type(apply(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]" - case: apply_requires_context_future_result disable_cache: false main: | from returns.pointfree import apply from returns.context import RequiresContextFutureResult from typing import Callable test: RequiresContextFutureResult[Callable[[float], int], str, bool] x: RequiresContextFutureResult[float, str, bool] reveal_type(apply(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.bool]" - case: apply_future disable_cache: false main: | from returns.pointfree import apply from returns.future import Future def test(arg: float) -> int: ... x: Future[float] reveal_type(apply(Future.from_value(test))(x)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: apply_future_result disable_cache: false main: | from returns.pointfree import apply from returns.future import FutureResult from typing import Callable test: FutureResult[Callable[[float], int], str] x: FutureResult[float, str] reveal_type(apply(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: apply_custom_type disable_cache: false main: | from typing import Callable, TypeVar from returns.pointfree import apply from returns.interfaces.applicative import Applicative1 from returns.primitives.hkt import SupportsKind1 V = TypeVar('V') N = TypeVar('N') class MyClass(SupportsKind1['MyClass', V], Applicative1[V]): ... test: MyClass[Callable[[float], int]] x: MyClass[float] reveal_type(apply(test)(x)) # N: Revealed type is "main.MyClass[builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_bimap.yml000066400000000000000000000070701472312074000234040ustar00rootroot00000000000000- case: bimap_and_flow disable_cache: false main: | from returns.result import Result from returns.pointfree import bimap from returns.pipeline import flow def first(arg: float) -> str: ... def second(arg: int) -> str: ... r: Result[float, int] reveal_type(flow(r, bimap(first, second))) # N: Revealed type is "returns.result.Result[builtins.str, builtins.str]" - case: bimap_result disable_cache: false main: | from returns.pointfree import bimap from returns.result import Result def first(arg: float) -> str: ... def second(arg: int) -> str: ... x: Result[float, int] reveal_type(bimap(first, second)(x)) # N: Revealed type is "returns.result.Result[builtins.str, builtins.str]" - case: bimap_ioresult disable_cache: false main: | from returns.pointfree import bimap from returns.io import IOResult def first(arg: float) -> str: ... def second(arg: int) -> str: ... x: IOResult[float, int] reveal_type(bimap(first, second)(x)) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.str]" - case: bimap_requires_context_result disable_cache: false main: | from returns.pointfree import bimap from returns.context import RequiresContextResult def first(arg: float) -> str: ... def second(arg: int) -> str: ... x: RequiresContextResult[float, int, bool] reveal_type(bimap(first, second)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.str, builtins.str, builtins.bool]" - case: bimap_requires_context_ioresult disable_cache: false main: | from returns.pointfree import bimap from returns.context import RequiresContextIOResult def first(arg: float) -> str: ... def second(arg: int) -> str: ... x: RequiresContextIOResult[float, int, bool] reveal_type(bimap(first, second)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.str, builtins.str, builtins.bool]" - case: bimap_requires_context_future_result disable_cache: false main: | from returns.pointfree import bimap from returns.context import RequiresContextFutureResult def first(arg: float) -> str: ... def second(arg: int) -> str: ... x: RequiresContextFutureResult[float, int, bool] reveal_type(bimap(first, second)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.str, builtins.str, builtins.bool]" - case: bimap_future_result disable_cache: false main: | from returns.pointfree import bimap from returns.future import FutureResult def first(arg: float) -> str: ... def second(arg: int) -> str: ... x: FutureResult[float, int] reveal_type(bimap(first, second)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.str, builtins.str]" - case: bimap_custom_type disable_cache: false main: | from returns.pointfree import bimap from returns.primitives.hkt import SupportsKind2 from returns.interfaces.bimappable import BiMappable2 from typing import TypeVar T = TypeVar('T') E = TypeVar('E') class MyOwn( SupportsKind2['MyOwn', T, E], BiMappable2[T, E], ): ... def first(arg: float) -> str: ... def second(arg: int) -> str: ... x: MyOwn[float, int] reveal_type(bimap(first, second)(x)) # N: Revealed type is "main.MyOwn[builtins.str, builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_bind.yml000066400000000000000000000131121472312074000232220ustar00rootroot00000000000000- case: bind_variance disable_cache: false main: | from returns.pointfree import bind from returns.maybe import Maybe class A: ... class B(A): ... class C(B): ... def test(a: A) -> Maybe[C]: ... x: Maybe[B] reveal_type(bind(test)(x)) # N: Revealed type is "returns.maybe.Maybe[main.C]" - case: bind_wrong_instance_type disable_cache: false main: | from returns.pointfree import bind from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: Result[str, str] bind(test)(x) out: | main:8: error: Argument 1 has incompatible type "Result[str, str]"; expected "KindN[Result[Any, Any], float, str, Never]" [arg-type] - case: bind_wrong_error_type disable_cache: false main: | from returns.pointfree import bind from returns.result import Result def test(arg: float) -> Result[int, Exception]: ... x: Result[float, str] bind(test)(x) out: | main:8: error: Argument 1 has incompatible type "Result[float, str]"; expected "KindN[Result[Any, Any], float, Exception, Never]" [arg-type] - case: bind_with_flow disable_cache: false main: | from returns.pointfree import bind from returns.pipeline import flow from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: Result[float, str] reveal_type(flow(x, bind(test))) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: bind_io disable_cache: false main: | from returns.pointfree import bind from returns.io import IO def test(arg: float) -> IO[int]: ... x: IO[float] reveal_type(bind(test)(x)) # N: Revealed type is "returns.io.IO[builtins.int]" - case: bind_maybe disable_cache: false main: | from returns.pointfree import bind from returns.maybe import Maybe def test(arg: float) -> Maybe[int]: ... x: Maybe[float] reveal_type(bind(test)(x)) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" - case: bind_result disable_cache: false main: | from returns.pointfree import bind from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: Result[float, str] reveal_type(bind(test)(x)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: bind_ioresult disable_cache: false main: | from returns.pointfree import bind from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: IOResult[float, str] reveal_type(bind(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: bind_requires_context disable_cache: false main: | from returns.pointfree import bind from returns.context import RequiresContext def test(arg: float) -> RequiresContext[int, str]: ... x: RequiresContext[float, str] reveal_type(bind(test)(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.int, builtins.str]" - case: bind_requires_context_result disable_cache: false main: | from returns.pointfree import bind from returns.context import RequiresContextResult def test(arg: float) -> RequiresContextResult[int, bool, str]: ... x: RequiresContextResult[float, bool, str] reveal_type(bind(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.bool, builtins.str]" - case: bind_requires_context_ioresult disable_cache: false main: | from returns.pointfree import bind from returns.context import RequiresContextIOResult def test(arg: float) -> RequiresContextIOResult[int, bool, str]: ... x: RequiresContextIOResult[float, bool, str] reveal_type(bind(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.bool, builtins.str]" - case: bind_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind from returns.context import RequiresContextFutureResult def test(arg: float) -> RequiresContextFutureResult[int, bool, str]: ... x: RequiresContextFutureResult[float, bool, str] reveal_type(bind(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: bind_future disable_cache: false main: | from returns.pointfree import bind from returns.future import Future def test(arg: float) -> Future[int]: ... x: Future[float] reveal_type(bind(test)(x)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: bind_future_result disable_cache: false main: | from returns.pointfree import bind from returns.future import FutureResult def test(arg: float) -> FutureResult[int, str]: ... x: FutureResult[float, str] reveal_type(bind(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_custom_type disable_cache: false main: | from returns.pointfree import bind from returns.primitives.hkt import SupportsKind1 from returns.interfaces.bindable import Bindable1 from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyOwn( SupportsKind1['MyOwn', T], Bindable1[T], ): ... def test(arg: float) -> MyOwn[int]: ... x: MyOwn[float] reveal_type(bind(test)(x)) # N: Revealed type is "main.MyOwn[builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_bind_async.yml000066400000000000000000000077121472312074000244300ustar00rootroot00000000000000- case: bind_async_wrong_value disable_cache: false main: | from returns.pointfree import bind_async from returns.future import Future async def test(arg: str) -> Future[int]: ... x: Future[float] bind_async(test)(x) # E: Argument 1 has incompatible type "Future[float]"; expected "KindN[Future[Any], str, Never, Never]" [arg-type] - case: bind_async_wrong_second_type disable_cache: false main: | from returns.pointfree import bind_async from returns.future import FutureResult async def test(arg: str) -> FutureResult[int, str]: ... x: FutureResult[str, bool] bind_async(test)(x) # E: Argument 1 has incompatible type "FutureResult[str, bool]"; expected "KindN[FutureResult[Any, Any], str, str, Never]" [arg-type] - case: bind_async_wrong_instance_type disable_cache: false main: | from returns.pointfree import bind_async from returns.io import IO async def test(arg: str) -> IO[int]: ... x: IO[float] bind_async(test)(x) out: | main:8: error: Value of type variable "_FutureKind" of "bind_async" cannot be "IO[Any]" [type-var] main:8: error: Argument 1 has incompatible type "IO[float]"; expected "KindN[IO[Any], str, Never, Never]" [arg-type] - case: bind_async_wrong_function_type disable_cache: false main: | from returns.pointfree import bind_async from returns.future import Future def test(arg: float) -> Future[int]: ... x: Future[float] bind_async(test)(x) out: | main:8: error: Value of type variable "_FutureKind" of "bind_async" cannot be "IO[Any]" [type-var] main:8: error: Argument 1 has incompatible type "Future[float]"; expected "KindN[IO[Any], float, Never, Never]" [arg-type] main:8: note: Maybe you forgot to use "await"? - case: bind_async_with_flow disable_cache: false main: | from returns.pointfree import bind_async from returns.pipeline import flow from returns.future import Future async def test1(arg: float) -> Future[int]: ... async def test2(arg: int) -> Future[str]: ... x: Future[float] reveal_type(flow(x, bind_async(test1), bind_async(test2))) # N: Revealed type is "returns.future.Future[builtins.str]" - case: bind_async_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_async from returns.context import RequiresContextFutureResult async def test(arg: float) -> RequiresContextFutureResult[int, bool, str]: ... x: RequiresContextFutureResult[float, bool, str] reveal_type(bind_async(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: bind_async_future disable_cache: false main: | from returns.pointfree import bind_async from returns.future import Future async def test(arg: float) -> Future[int]: ... x: Future[float] reveal_type(bind_async(test)(x)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: bind_async_future_result disable_cache: false main: | from returns.pointfree import bind_async from returns.future import FutureResult async def test(arg: float) -> FutureResult[int, str]: ... x: FutureResult[float, str] reveal_type(bind_async(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_async_custom_type disable_cache: false main: | from returns.pointfree import bind_async from returns.primitives.hkt import SupportsKind1 from returns.interfaces.specific.future import FutureLike1 from typing import TypeVar T = TypeVar('T') class MyClass( SupportsKind1['MyClass', T], FutureLike1[T], ): ... async def test(arg: float) -> MyClass[int]: ... x: MyClass[float] reveal_type(bind_async(test)(x)) # N: Revealed type is "main.MyClass[builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_bind_async_context_future_result.yml000066400000000000000000000022101472312074000311500ustar00rootroot00000000000000- case: bind_async_context_future_result_wrong disable_cache: false main: | from returns.pointfree import bind_async_context_future_result from returns.context import RequiresContextFutureResult async def test(arg: float) -> RequiresContextFutureResult[int, str, str]: ... x: RequiresContextFutureResult[float, bool, bool] bind_async_context_future_result(test)(x) # E: Argument 1 has incompatible type "RequiresContextFutureResult[float, bool, bool]"; expected "KindN[RequiresContextFutureResult[Any, Any, Any], float, str, str]" [arg-type] - case: bind_async_context_future_result_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_async_context_future_result from returns.context import RequiresContextFutureResult async def test(arg: float) -> RequiresContextFutureResult[int, str, bool]: ... x: RequiresContextFutureResult[float, str, bool] reveal_type(bind_async_context_future_result(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.bool]" returns-0.24.0/typesafety/test_pointfree/test_bind_async_future.yml000066400000000000000000000046071472312074000260220ustar00rootroot00000000000000- case: bind_async_future_with_flow disable_cache: false main: | from returns.pointfree import bind_async_future from returns.pipeline import flow from returns.future import Future, FutureResult async def test1(arg: float) -> Future[int]: ... async def test2(arg: int) -> Future[str]: ... x: FutureResult[float, str] reveal_type(flow(x, bind_async_future(test1), bind_async_future(test2))) # N: Revealed type is "returns.future.FutureResult[builtins.str, builtins.str]" - case: bind_async_future_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_async_future from returns.context import RequiresContextFutureResult from returns.future import Future async def test(arg: float) -> Future[int]: ... x: RequiresContextFutureResult[float, bool, str] reveal_type(bind_async_future(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: bind_async_future_future disable_cache: false main: | from returns.pointfree import bind_async_future from returns.future import Future async def test(arg: float) -> Future[int]: ... x: Future[float] reveal_type(bind_async_future(test)(x)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: bind_async_future_future_result disable_cache: false main: | from returns.pointfree import bind_async_future from returns.future import Future, FutureResult async def test(arg: float) -> Future[int]: ... x: FutureResult[float, str] reveal_type(bind_async_future(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_async_future_custom_type disable_cache: false main: | from returns.pointfree import bind_async_future from returns.primitives.hkt import SupportsKind1 from returns.interfaces.specific.future import FutureLike1 from returns.future import Future from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyClass( SupportsKind1['MyClass', T], FutureLike1[T], ): ... async def test(arg: float) -> Future[int]: ... x: MyClass[float] reveal_type(bind_async_future(test)(x)) # N: Revealed type is "main.MyClass[builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_bind_async_future_result.yml000066400000000000000000000051211472312074000274100ustar00rootroot00000000000000- case: bind_async_future_result_wrong_value disable_cache: false main: | from returns.pointfree import bind_async_future_result from returns.future import FutureResult async def test(arg: str) -> FutureResult[int, str]: ... x: FutureResult[float, str] bind_async_future_result(test)(x) # E: Argument 1 has incompatible type "FutureResult[float, str]"; expected "KindN[FutureResult[Any, Any], str, str, Never]" [arg-type] - case: bind_async_future_result_wrong_error disable_cache: false main: | from returns.pointfree import bind_async_future_result from returns.future import FutureResult async def test(arg: float) -> FutureResult[int, str]: ... x: FutureResult[float, bool] bind_async_future_result(test)(x) # E: Argument 1 has incompatible type "FutureResult[float, bool]"; expected "KindN[FutureResult[Any, Any], float, str, Never]" [arg-type] - case: bind_async_future_result_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_async_future_result from returns.context import RequiresContextFutureResult from returns.future import FutureResult async def test(arg: float) -> FutureResult[int, bool]: ... x: RequiresContextFutureResult[float, bool, str] reveal_type(bind_async_future_result(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: bind_async_future_result_future_result disable_cache: false main: | from returns.pointfree import bind_async_future_result from returns.future import FutureResult async def test(arg: float) -> FutureResult[int, str]: ... x: FutureResult[float, str] reveal_type(bind_async_future_result(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_async_future_result_custom_type disable_cache: false main: | from returns.pointfree import bind_async_future_result from returns.primitives.hkt import SupportsKind2 from returns.interfaces.specific.future_result import FutureResultLike2 from returns.future import FutureResult from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyClass( SupportsKind2['MyClass', T, N], FutureResultLike2[T, N], ): ... async def test(arg: float) -> FutureResult[int, str]: ... x: MyClass[float, str] reveal_type(bind_async_future_result(test)(x)) # N: Revealed type is "main.MyClass[builtins.int, builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_bind_awaitable.yml000066400000000000000000000066731472312074000252510ustar00rootroot00000000000000- case: bind_awaitable_wrong_value disable_cache: false main: | from returns.pointfree import bind_awaitable from returns.future import Future async def test(arg: str) -> int: ... x: Future[float] bind_awaitable(test)(x) # E: Argument 1 has incompatible type "Future[float]"; expected "KindN[Future[Any], str, Never, Never]" [arg-type] - case: bind_awaitable_wrong_instance_type disable_cache: false main: | from returns.pointfree import bind_awaitable from returns.io import IO async def test(arg: str) -> int: ... x: IO[float] bind_awaitable(test)(x) out: | main:8: error: Value of type variable "_FutureKind" of function cannot be "IO[Any]" [type-var] main:8: error: Argument 1 has incompatible type "IO[float]"; expected "KindN[IO[Any], str, Never, Never]" [arg-type] - case: bind_awaitable_wrong_function_type disable_cache: false main: | from returns.pointfree import bind_awaitable from returns.future import Future def test(arg: float) -> int: ... x: Future[float] bind_awaitable(test)(x) # E: Argument 1 to "bind_awaitable" has incompatible type "Callable[[float], int]"; expected "Callable[[float], Awaitable[Never]]" [arg-type] - case: bind_awaitable_with_flow disable_cache: false main: | from returns.pointfree import bind_awaitable from returns.pipeline import flow from returns.future import Future async def test1(arg: float) -> int: ... async def test2(arg: int) -> str: ... x: Future[float] reveal_type(flow(x, bind_awaitable(test1), bind_awaitable(test2))) # N: Revealed type is "returns.future.Future[builtins.str]" - case: bind_awaitable_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_awaitable from returns.context import RequiresContextFutureResult async def test(arg: float) -> int: ... x: RequiresContextFutureResult[float, bool, str] reveal_type(bind_awaitable(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: bind_awaitable_future disable_cache: false main: | from returns.pointfree import bind_awaitable from returns.future import Future async def test(arg: float) -> int: ... x: Future[float] reveal_type(bind_awaitable(test)(x)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: bind_awaitable_future_result disable_cache: false main: | from returns.pointfree import bind_awaitable from returns.future import FutureResult async def test(arg: float) -> int: ... x: FutureResult[float, str] reveal_type(bind_awaitable(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_awaitable_custom_type disable_cache: false main: | from returns.pointfree import bind_awaitable from returns.primitives.hkt import SupportsKind1 from returns.interfaces.specific.future import FutureLike1 from returns.future import Future from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyClass( SupportsKind1['MyClass', T], FutureLike1[T], ): ... async def test(arg: float) -> int: ... x: MyClass[float] reveal_type(bind_awaitable(test)(x)) # N: Revealed type is "main.MyClass[builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_bind_context2.yml000066400000000000000000000054371472312074000250630ustar00rootroot00000000000000- case: bind_context2_wrong_env disable_cache: false main: | from returns.pointfree import bind_context2 from returns.context import RequiresContext def test(arg: float) -> RequiresContext[str, int]: ... x: RequiresContext[float, str] bind_context2(test)(x) out: | main:8: error: Argument 1 has incompatible type "RequiresContext[float, str]"; expected "KindN[RequiresContext[Any, Any], float, int, Any]" [arg-type] - case: bind_context2_wrong_type disable_cache: false main: | from returns.pointfree import bind_context2 from returns.context import RequiresContext, RequiresContextResult def test(arg: float) -> RequiresContext[str, int]: ... x: RequiresContextResult[float, str, int] bind_context2(test)(x) out: | main:8: error: Value of type variable "_Reader2Kind" of function cannot be "RequiresContextResult[Any, Any, Any]" [type-var] main:8: error: Argument 1 has incompatible type "RequiresContextResult[float, str, int]"; expected "KindN[RequiresContextResult[Any, Any, Any], float, int, Any]" [arg-type] - case: bind_context2_and_flow disable_cache: false main: | from returns.context import RequiresContext from returns.pointfree import bind_context2 from returns.pipeline import flow def test(arg: int) -> RequiresContext[float, str]: ... def second(arg: float) -> RequiresContext[bool, str]: ... r: RequiresContext[int, str] reveal_type(flow(r, bind_context2(test), bind_context2(second))) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.bool, builtins.str]" - case: bind_context2_requires_context disable_cache: false main: | from returns.pointfree import bind_context2 from returns.context import RequiresContext, RequiresContextResult def test(arg: float) -> RequiresContext[int, str]: ... x: RequiresContext[float, str] reveal_type(bind_context2(test)(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.int, builtins.str]" - case: bind_context2_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.reader import ReaderBased2 from returns.primitives.hkt import SupportsKind2 from returns.context import Reader from returns.pointfree import bind_context2 _EnvType = TypeVar('_EnvType') _ReturnType = TypeVar('_ReturnType') _NewReturnType = TypeVar('_NewReturnType') class MyClass( SupportsKind2['MyClass', _ReturnType, _EnvType], ReaderBased2[_ReturnType, _EnvType], ): ... def test(a: int) -> Reader[float, str]: ... x: MyClass[int, str] reveal_type(bind_context2(test)(x)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_bind_context3.yml000066400000000000000000000103731472312074000250570ustar00rootroot00000000000000- case: bind_context_wrong_env disable_cache: false main: | from returns.pointfree import bind_context3 from returns.context import RequiresContext, RequiresContextResult def test(arg: float) -> RequiresContext[str, int]: ... x: RequiresContextResult[float, bool, str] bind_context3(test)(x) out: | main:8: error: Argument 1 has incompatible type "RequiresContextResult[float, bool, str]"; expected "KindN[RequiresContextResult[Any, Any, Any], float, bool, int]" [arg-type] - case: bind_context_wrong_type disable_cache: false main: | from returns.pointfree import bind_context3 from returns.context import RequiresContext def test(arg: float) -> RequiresContext[str, int]: ... x: RequiresContext[float, int] bind_context3(test)(x) out: | main:8: error: Value of type variable "_Reader3Kind" of function cannot be "RequiresContext[Any, Any]" [type-var] - case: bind_context_alias disable_cache: false main: | from returns.pointfree import bind_context3 from returns.context import RequiresContext, RequiresContextResult def test(arg: float) -> RequiresContext[int, str]: ... x: RequiresContextResult[float, Exception, str] reveal_type(bind_context3(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.Exception, builtins.str]" - case: bind_context_and_flow disable_cache: false main: | from returns.context import RequiresContext, RequiresContextResult from returns.pointfree import bind_context from returns.pipeline import flow def test(arg: int) -> RequiresContext[float, str]: ... def second(arg: float) -> RequiresContext[bool, str]: ... r: RequiresContextResult[int, Exception, str] reveal_type(flow(r, bind_context(test), bind_context(second))) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.bool, builtins.Exception, builtins.str]" - case: bind_context_requires_context_result disable_cache: false main: | from returns.pointfree import bind_context from returns.context import RequiresContext, RequiresContextResult def test(arg: float) -> RequiresContext[int, str]: ... x: RequiresContextResult[float, Exception, str] reveal_type(bind_context(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.Exception, builtins.str]" - case: bind_context_requires_context_ioresult disable_cache: false main: | from returns.pointfree import bind_context from returns.context import RequiresContext, RequiresContextIOResult def test(arg: float) -> RequiresContext[int, str]: ... x: RequiresContextIOResult[float, Exception, str] reveal_type(bind_context(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.Exception, builtins.str]" - case: bind_context_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_context from returns.context import RequiresContext, RequiresContextFutureResult def test(arg: float) -> RequiresContext[int, str]: ... x: RequiresContextFutureResult[float, Exception, str] reveal_type(bind_context(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.Exception, builtins.str]" - case: bind_context_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.reader import ReaderLike3 from returns.primitives.hkt import SupportsKind3 from returns.context import Reader from returns.pointfree import bind_context _EnvType = TypeVar('_EnvType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _EnvType], ReaderLike3[_ValueType, _ErrorType, _EnvType], ): ... def test(a: int) -> Reader[float, str]: ... x: MyClass[int, Exception, str] reveal_type(bind_context(test)(x)) # N: Revealed type is "main.MyClass[builtins.float, builtins.Exception, builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_bind_context_future_result.yml000066400000000000000000000021301472312074000277540ustar00rootroot00000000000000- case: bind_context_future_result_wrong disable_cache: false main: | from returns.pointfree import bind_context_future_result from returns.context import RequiresContextFutureResult def test(arg: float) -> RequiresContextFutureResult[int, str, str]: ... x: RequiresContextFutureResult[float, bool, bool] bind_context_future_result(test)(x) # E: Argument 1 has incompatible type "RequiresContextFutureResult[float, bool, bool]"; expected "KindN[RequiresContextFutureResult[Any, Any, Any], float, str, str]" [arg-type] - case: bind_context_future_result_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_context_future_result from returns.context import RequiresContextFutureResult def test(arg: float) -> RequiresContextFutureResult[int, str, bool]: ... x: RequiresContextFutureResult[float, str, bool] reveal_type(bind_context_future_result(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.bool]" returns-0.24.0/typesafety/test_pointfree/test_bind_context_ioresult.yml000066400000000000000000000031001472312074000267100ustar00rootroot00000000000000- case: bind_context_ioresult_wrong disable_cache: false main: | from returns.pointfree import bind_context_ioresult from returns.context import RequiresContextIOResult def test(arg: float) -> RequiresContextIOResult[int, str, str]: ... x: RequiresContextIOResult[float, bool, bool] bind_context_ioresult(test)(x) # E: Argument 1 has incompatible type "RequiresContextIOResult[float, bool, bool]"; expected "KindN[RequiresContextIOResult[Any, Any, Any], float, str, str]" [arg-type] - case: bind_context_ioresult_requires_context_ioresult disable_cache: false main: | from returns.pointfree import bind_context_ioresult from returns.context import RequiresContextIOResult def test(arg: float) -> RequiresContextIOResult[int, str, bool]: ... x: RequiresContextIOResult[float, str, bool] reveal_type(bind_context_ioresult(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]" - case: bind_context_ioresult_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_context_ioresult from returns.context import RequiresContextIOResult, RequiresContextFutureResult def test(arg: float) -> RequiresContextIOResult[int, str, bool]: ... x: RequiresContextFutureResult[float, str, bool] reveal_type(bind_context_ioresult(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.bool]" returns-0.24.0/typesafety/test_pointfree/test_bind_context_result.yml000066400000000000000000000040461472312074000263720ustar00rootroot00000000000000- case: bind_context_result_wrong disable_cache: false main: | from returns.pointfree import bind_context_result from returns.context import RequiresContextResult def test(arg: float) -> RequiresContextResult[int, str, str]: ... x: RequiresContextResult[float, str, bool] bind_context_result(test)(x) # E: Argument 1 has incompatible type "RequiresContextResult[float, str, bool]"; expected "KindN[RequiresContextResult[Any, Any, Any], float, str, str]" [arg-type] - case: bind_context_result_requires_context_result disable_cache: false main: | from returns.pointfree import bind_context_result from returns.context import RequiresContextResult def test(arg: float) -> RequiresContextResult[int, str, bool]: ... x: RequiresContextResult[float, str, bool] reveal_type(bind_context_result(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.str, builtins.bool]" - case: bind_context_result_requires_context_ioresult disable_cache: false main: | from returns.pointfree import bind_context_result from returns.context import RequiresContextResult, RequiresContextIOResult def test(arg: float) -> RequiresContextResult[int, str, bool]: ... x: RequiresContextIOResult[float, str, bool] reveal_type(bind_context_result(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]" - case: bind_context_result_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_context_result from returns.context import RequiresContextResult, RequiresContextFutureResult def test(arg: float) -> RequiresContextResult[int, str, bool]: ... x: RequiresContextFutureResult[float, str, bool] reveal_type(bind_context_result(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.bool]" returns-0.24.0/typesafety/test_pointfree/test_bind_future.yml000066400000000000000000000052301472312074000246160ustar00rootroot00000000000000- case: bind_async_future_with_flow disable_cache: false main: | from returns.pointfree import bind_future from returns.pipeline import flow from returns.future import Future, FutureResult def test1(arg: float) -> Future[int]: ... def test2(arg: int) -> Future[str]: ... x: FutureResult[float, str] reveal_type(flow(x, bind_future(test1), bind_future(test2))) # N: Revealed type is "returns.future.FutureResult[builtins.str, builtins.str]" - case: bind_future_wrong_function disable_cache: false main: | from returns.pointfree import bind_future from returns.future import Future async def test(arg: str) -> Future[int]: ... x: Future[str] bind_future(test)(x) # E: Argument 1 to "bind_future" has incompatible type "Callable[[str], Coroutine[Any, Any, Future[int]]]"; expected "Callable[[str], Future[Never]]" [arg-type] - case: bind_future_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_future from returns.context import RequiresContextFutureResult from returns.future import Future def test(arg: float) -> Future[int]: ... x: RequiresContextFutureResult[float, bool, str] reveal_type(bind_future(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: bind_future_future disable_cache: false main: | from returns.pointfree import bind_future from returns.future import Future def test(arg: float) -> Future[int]: ... x: Future[float] reveal_type(bind_future(test)(x)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: bind_future_future_result disable_cache: false main: | from returns.pointfree import bind_future from returns.future import FutureResult, Future def test(arg: float) -> Future[int]: ... x: FutureResult[float, str] reveal_type(bind_future(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_future_custom_type disable_cache: false main: | from returns.pointfree import bind_future from returns.primitives.hkt import SupportsKind1 from returns.interfaces.specific.future import FutureLike1 from returns.future import Future from typing import TypeVar T = TypeVar('T') class MyClass( SupportsKind1['MyClass', T], FutureLike1[T], ): ... def test(arg: float) -> Future[int]: ... x: MyClass[float] reveal_type(bind_future(test)(x)) # N: Revealed type is "main.MyClass[builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_bind_future_result.yml000066400000000000000000000047311472312074000262210ustar00rootroot00000000000000- case: bind_future_result_wrong_value disable_cache: false main: | from returns.pointfree import bind_future_result from returns.future import FutureResult def test(arg: str) -> FutureResult[int, str]: ... x: FutureResult[float, str] bind_future_result(test)(x) # E: Argument 1 has incompatible type "FutureResult[float, str]"; expected "KindN[FutureResult[Any, Any], str, str, Never]" [arg-type] - case: bind_future_result_wrong_error disable_cache: false main: | from returns.pointfree import bind_future_result from returns.future import FutureResult def test(arg: float) -> FutureResult[int, str]: ... x: FutureResult[float, bool] bind_future_result(test)(x) # E: Argument 1 has incompatible type "FutureResult[float, bool]"; expected "KindN[FutureResult[Any, Any], float, str, Never]" [arg-type] - case: bind_future_result_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_future_result from returns.context import RequiresContextFutureResult from returns.future import FutureResult def test(arg: float) -> FutureResult[int, bool]: ... x: RequiresContextFutureResult[float, bool, str] reveal_type(bind_future_result(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: bind_future_result_future_result disable_cache: false main: | from returns.pointfree import bind_future_result from returns.future import FutureResult def test(arg: float) -> FutureResult[int, str]: ... x: FutureResult[float, str] reveal_type(bind_future_result(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_future_result_custom_type disable_cache: false main: | from returns.pointfree import bind_future_result from returns.primitives.hkt import SupportsKind2 from returns.interfaces.specific.future_result import FutureResultLike2 from returns.future import FutureResult from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyClass( SupportsKind2['MyClass', T, N], FutureResultLike2[T, N], ): ... def test(arg: float) -> FutureResult[int, str]: ... x: MyClass[float, str] reveal_type(bind_future_result(test)(x)) # N: Revealed type is "main.MyClass[builtins.int, builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_bind_io.yml000066400000000000000000000104671472312074000237230ustar00rootroot00000000000000- case: bind_io_variance disable_cache: false main: | from returns.pointfree import bind_io from returns.io import IO class A: ... class B(A): ... class C(B): ... def test(a: A) -> IO[C]: ... x: IO[B] reveal_type(bind_io(test)(x)) # N: Revealed type is "returns.io.IO[main.C]" - case: bind_io_with_flow disable_cache: false main: | from returns.pointfree import bind_io from returns.pipeline import flow from returns.io import IO def test(arg: float) -> IO[int]: ... x: IO[float] reveal_type(flow(x, bind_io(test))) # N: Revealed type is "returns.io.IO[builtins.int]" - case: bind_io_wrong_instance_type disable_cache: false main: | from returns.pointfree import bind_io from returns.io import IO def test(arg: float) -> IO[int]: ... x: IO[str] bind_io(test)(x) out: | main:8: error: Argument 1 has incompatible type "IO[str]"; expected "KindN[IO[Any], float, Never, Never]" [arg-type] - case: bind_io_wrong_unsupported_type disable_cache: false main: | from returns.pointfree import bind_io from returns.maybe import Maybe from returns.io import IO def test(arg: float) -> IO[int]: ... x: Maybe[str] bind_io(test)(x) out: | main:9: error: Value of type variable "_IOLikeKind" of function cannot be "Maybe[Any]" [type-var] main:9: error: Argument 1 has incompatible type "Maybe[str]"; expected "KindN[Maybe[Any], float, Never, Never]" [arg-type] - case: bind_io_io disable_cache: false main: | from returns.pointfree import bind_io from returns.io import IO def test(arg: float) -> IO[int]: ... x: IO[float] reveal_type(bind_io(test)(x)) # N: Revealed type is "returns.io.IO[builtins.int]" - case: bind_io_ioresult disable_cache: false main: | from returns.pointfree import bind_io from returns.io import IO, IOResult def test(arg: float) -> IO[int]: ... x: IOResult[float, str] reveal_type(bind_io(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: bind_io_requires_context_ioresult disable_cache: false main: | from returns.pointfree import bind_io from returns.io import IO from returns.context import RequiresContextIOResult def test(arg: float) -> IO[int]: ... x: RequiresContextIOResult[float, bool, str] reveal_type(bind_io(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.bool, builtins.str]" - case: bind_io_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_io from returns.io import IO from returns.context import RequiresContextFutureResult def test(arg: float) -> IO[int]: ... x: RequiresContextFutureResult[float, bool, str] reveal_type(bind_io(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.bool, builtins.str]" - case: bind_io_future disable_cache: false main: | from returns.pointfree import bind_io from returns.io import IO from returns.future import Future def test(arg: float) -> IO[int]: ... x: Future[float] reveal_type(bind_io(test)(x)) # N: Revealed type is "returns.future.Future[builtins.int]" - case: bind_io_future_result disable_cache: false main: | from returns.pointfree import bind_io from returns.io import IO from returns.future import FutureResult def test(arg: float) -> IO[int]: ... x: FutureResult[float, str] reveal_type(bind_io(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_io_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.io import IOLike1 from returns.primitives.hkt import SupportsKind1 from returns.io import IO from returns.pointfree import bind_io _ValueType = TypeVar('_ValueType') class MyClass( SupportsKind1['MyClass', _ValueType], IOLike1[_ValueType], ): ... def test(arg: float) -> IO[int]: ... x: MyClass[float] reveal_type(bind_io(test)(x)) # N: Revealed type is "main.MyClass[builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_bind_ioresult.yml000066400000000000000000000110461472312074000251540ustar00rootroot00000000000000- case: bind_ioresult_and_flow disable_cache: false main: | from returns.future import FutureResult from returns.io import IOResult from returns.functions import identity from returns.pointfree import bind_ioresult from returns.pipeline import flow def test(arg: int) -> IOResult[float, str]: ... def second(arg: float) -> IOResult[bool, str]: ... r: FutureResult[int, str] reveal_type(flow(r, bind_ioresult(test), bind_ioresult(second))) # N: Revealed type is "returns.future.FutureResult[builtins.bool, builtins.str]" - case: bind_ioresult_wrong_first_type disable_cache: false main: | from returns.pointfree import bind_ioresult from returns.context import RequiresContextFutureResult from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: RequiresContextFutureResult[str, str, bool] bind_ioresult(test)(x) # E: Argument 1 has incompatible type "RequiresContextFutureResult[str, str, bool]"; expected "KindN[RequiresContextFutureResult[Any, Any, Any], float, str, bool]" [arg-type] - case: bind_ioresult_wrong_second_type disable_cache: false main: | from returns.pointfree import bind_ioresult from returns.context import RequiresContextFutureResult from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: RequiresContextFutureResult[float, int, bool] bind_ioresult(test)(x) # E: Argument 1 has incompatible type "RequiresContextFutureResult[float, int, bool]"; expected "KindN[RequiresContextFutureResult[Any, Any, Any], float, str, bool]" [arg-type] - case: bind_ioresult_wrong_type disable_cache: false main: | from returns.pointfree import bind_ioresult from returns.context import RequiresContextResult from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: RequiresContextResult[float, str, bool] bind_ioresult(test)(x) # E: Value of type variable "_IOResultLikeKind" of function cannot be "RequiresContextResult[Any, Any, Any]" [type-var] - case: bind_ioresult_ioresult disable_cache: false main: | from returns.pointfree import bind_ioresult from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: IOResult[float, str] reveal_type(bind_ioresult(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: bind_ioresult_requires_context_ioresult disable_cache: false main: | from returns.pointfree import bind_ioresult from returns.context import RequiresContextIOResult from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: RequiresContextIOResult[float, str, bool] reveal_type(bind_ioresult(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]" - case: bind_ioresult_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_ioresult from returns.context import RequiresContextFutureResult from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: RequiresContextFutureResult[float, str, bool] reveal_type(bind_ioresult(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.bool]" - case: bind_ioresult_future_result disable_cache: false main: | from returns.pointfree import bind_ioresult from returns.future import FutureResult from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: FutureResult[float, str] reveal_type(bind_ioresult(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_ioresult_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.ioresult import IOResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.io import IOResult from returns.pointfree import bind_ioresult _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultBased2[_ValueType, _ErrorType], ): ... def test(a: int) -> IOResult[float, str]: ... x: MyClass[int, str] reveal_type(bind_ioresult(test)(x)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_bind_optional.yml000066400000000000000000000014211472312074000251270ustar00rootroot00000000000000- case: bind_optional_wrong_instance_type disable_cache: false main: | from returns.pointfree import bind_optional from returns.maybe import Maybe from typing import Optional def test(arg: float) -> Optional[int]: ... x: Maybe[str] bind_optional(test)(x) out: | main:9: error: Argument 1 has incompatible type "Maybe[str]"; expected "KindN[Maybe[Any], float, Never, Never]" [arg-type] - case: bind_optional_maybe disable_cache: false main: | from returns.pointfree import bind_optional from returns.maybe import Maybe from typing import Optional def test(arg: float) -> Optional[str]: ... x: Maybe[float] reveal_type(bind_optional(test)(x)) # N: Revealed type is "returns.maybe.Maybe[builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_bind_result.yml000066400000000000000000000115611472312074000246260ustar00rootroot00000000000000- case: bind_result_and_flow disable_cache: false main: | from returns.result import Result from returns.io import IOResult from returns.functions import identity from returns.pointfree import bind_result from returns.pipeline import flow def test(arg: int) -> Result[float, str]: ... def second(arg: float) -> Result[bool, str]: ... r: IOResult[int, str] reveal_type(flow(r, bind_result(test), bind_result(second))) # N: Revealed type is "returns.io.IOResult[builtins.bool, builtins.str]" - case: bind_result_wrong_first_type disable_cache: false main: | from returns.pointfree import bind_result from returns.context import RequiresContextFutureResult from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: RequiresContextFutureResult[str, str, bool] bind_result(test)(x) # E: Argument 1 has incompatible type "RequiresContextFutureResult[str, str, bool]"; expected "KindN[RequiresContextFutureResult[Any, Any, Any], float, str, bool]" [arg-type] - case: bind_result_wrong_second_type disable_cache: false main: | from returns.pointfree import bind_result from returns.context import RequiresContextFutureResult from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: RequiresContextFutureResult[float, int, bool] bind_result(test)(x) # E: Argument 1 has incompatible type "RequiresContextFutureResult[float, int, bool]"; expected "KindN[RequiresContextFutureResult[Any, Any, Any], float, str, bool]" [arg-type] - case: bind_result_result disable_cache: false main: | from returns.pointfree import bind_result from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: Result[float, str] reveal_type(bind_result(test)(x)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: bind_result_ioresult disable_cache: false main: | from returns.pointfree import bind_result from returns.io import IOResult from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: IOResult[float, str] reveal_type(bind_result(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: bind_result_requires_context_result disable_cache: false main: | from returns.pointfree import bind_result from returns.context import RequiresContextResult from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: RequiresContextResult[float, str, bool] reveal_type(bind_result(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.str, builtins.bool]" - case: bind_result_requires_context_ioresult disable_cache: false main: | from returns.pointfree import bind_result from returns.context import RequiresContextIOResult from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: RequiresContextIOResult[float, str, bool] reveal_type(bind_result(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]" - case: bind_result_requires_context_future_result disable_cache: false main: | from returns.pointfree import bind_result from returns.context import RequiresContextFutureResult from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: RequiresContextFutureResult[float, str, bool] reveal_type(bind_result(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.bool]" - case: bind_result_future_result disable_cache: false main: | from returns.pointfree import bind_result from returns.future import FutureResult from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: FutureResult[float, str] reveal_type(bind_result(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: bind_result_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.result import ResultBased2 from returns.primitives.hkt import SupportsKind2 from returns.result import Result from returns.pointfree import bind_result _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], ResultBased2[_ValueType, _ErrorType], ): ... def test(a: int) -> Result[float, str]: ... x: MyClass[int, str] reveal_type(bind_result(test)(x)) # N: Revealed type is "main.MyClass[builtins.float, builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_compose_result.yml000066400000000000000000000052311472312074000253540ustar00rootroot00000000000000- case: compose_result_ioresult disable_cache: false main: | from returns.pointfree import compose_result from returns.io import IOResult from returns.result import Result def test(arg: Result[str, int]) -> IOResult[float, int]: ... x: IOResult[str, int] reveal_type(compose_result(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.float, builtins.int]" - case: compose_result_requires_context_ioresult disable_cache: false main: | from returns.pointfree import compose_result from returns.context import RequiresContextIOResult from returns.result import Result def test(arg: Result[str, bool]) -> RequiresContextIOResult[int, bool, float]: ... x: RequiresContextIOResult[str, bool, float] reveal_type(compose_result(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.bool, builtins.float]" - case: compose_result_future disable_cache: false main: | from returns.pointfree import compose_result from returns.future import FutureResult from returns.result import Result def test(arg: Result[str, float]) -> FutureResult[str, float]: ... x: FutureResult[str, float] reveal_type(compose_result(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.str, builtins.float]" - case: compose_result_requires_context_future_result disable_cache: false main: | from returns.pointfree import compose_result from returns.context import NoDeps, RequiresContextFutureResult from returns.result import Result def test(arg: Result[str, float]) -> RequiresContextFutureResult[str, float, NoDeps]: ... x: RequiresContextFutureResult[str, float, NoDeps] reveal_type(compose_result(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.str, builtins.float, Any]" - case: compose_result_custom_type disable_cache: false main: | from typing import TypeVar from returns.pointfree import compose_result from returns.interfaces.specific.ioresult import IOResultLike2 from returns.primitives.hkt import SupportsKind2 from returns.result import Result _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') class MyClass( SupportsKind2['MyClass', _ValueType, _ErrorType], IOResultLike2[_ValueType, _ErrorType], ): ... def test(arg: Result[str, float]) -> MyClass[bool, float]: ... x: MyClass[str, float] reveal_type(compose_result(test)(x)) # N: Revealed type is "main.MyClass[builtins.bool, builtins.float]" returns-0.24.0/typesafety/test_pointfree/test_cond.yml000066400000000000000000000045711472312074000232420ustar00rootroot00000000000000- case: cond_result disable_cache: false main: | from returns.pointfree import cond from returns.result import Result reveal_type(cond(Result, 42, '42')(True)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: cond_ioresult disable_cache: false main: | from returns.io import IOResult from returns.pointfree import cond reveal_type(cond(IOResult, 'success', 'failure')(False)) # N: Revealed type is "returns.io.IOResult[builtins.str, builtins.str]" - case: cond_future_result disable_cache: false main: | from returns.future import FutureResult from returns.pointfree import cond reveal_type(cond(FutureResult, True, False)(False)) # N: Revealed type is "returns.future.FutureResult[builtins.bool, builtins.bool]" - case: cond_reader_ioresult disable_cache: false main: | from returns.pointfree import cond from returns.context import ReaderIOResult reveal_type(cond(ReaderIOResult, 1.0, False)(True)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.bool, Any]" - case: cond_reader_future_result disable_cache: false main: | from returns.pointfree import cond from returns.context import ReaderFutureResult reveal_type(cond(ReaderFutureResult, 1, 1.0)(True)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.float, Any]" - case: cond_maybe disable_cache: false main: | from returns.pointfree import cond from returns.maybe import Maybe reveal_type(cond(Maybe, True)(False)) # N: Revealed type is "returns.maybe.Maybe[builtins.bool]" - case: cond_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.result import ResultLike2 from returns.pointfree import cond from returns.primitives.hkt import SupportsKind2 ValueType = TypeVar('ValueType') ErrorType = TypeVar('ErrorType') class MyOwn( SupportsKind2['MyOwn', ValueType, ErrorType], ResultLike2[ValueType, ErrorType] ): ... reveal_type(cond(MyOwn, 'test', 1.0)(True)) out: | main:16: note: Revealed type is "main.MyOwn[builtins.str, builtins.float]" main:16: error: Only concrete class can be given where "Type[MyOwn[Any, Any]]" is expected [type-abstract] returns-0.24.0/typesafety/test_pointfree/test_map.yml000066400000000000000000000135521472312074000230730ustar00rootroot00000000000000- case: map_variance disable_cache: false main: | from returns.pointfree import map_ from returns.maybe import Maybe class A: ... class B(A): ... class C(B): ... x: Maybe[B] def test(a: A) -> C: ... reveal_type(map_(test)(x)) # N: Revealed type is "returns.maybe.Maybe[main.C]" - case: map_callback_protocol disable_cache: false main: | from typing import Callable, TypeVar from returns.pointfree import map_ from returns.primitives.hkt import KindN, kinded from returns.interfaces.mappable import MappableN from returns.maybe import Maybe _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _U1 = TypeVar('_U1') _Inst = TypeVar('_Inst', bound=MappableN) @kinded def ensure_callback( f: Callable[[KindN[_Inst, _T1, _T2, _T3]], KindN[_Inst, _U1, _T2, _T3]], c: KindN[_Inst, _T1, _T2, _T3], ) -> KindN[_Inst, _U1, _T2, _T3]: return f(c) def test(arg: int) -> float: return arg + 1.5 reveal_type(ensure_callback(map_(test), Maybe.from_value(1))) # N: Revealed type is "returns.maybe.Maybe[builtins.float]" - case: map_and_flow disable_cache: false main: | from returns.result import Result from returns.functions import identity from returns.pointfree import map_ from returns.pipeline import flow def test(arg: int) -> float: ... def stringify(arg: float) -> str: ... r: Result[int, str] reveal_type(flow(r, map_(test), map_(stringify), identity)) # N: Revealed type is "returns.result.Result[builtins.str, builtins.str]" - case: map_and_bind disable_cache: false main: | from returns.io import IO from returns.pointfree import map_, bind from returns.result import Result, Success def test(arg: int) -> Result[float, str]: ... reveal_type(map_(bind(test))(IO(Success(1)))) # N: Revealed type is "returns.io.IO[returns.result.Result[builtins.float, builtins.str]]" - case: map_io disable_cache: false main: | from returns.pointfree import map_ from returns.io import IO def test(arg: float) -> int: ... reveal_type(map_(test)(IO(1.5))) # N: Revealed type is "returns.io.IO[builtins.int]" - case: map_maybe disable_cache: false main: | from returns.pointfree import map_ from returns.maybe import Maybe def test(arg: float) -> int: ... reveal_type(map_(test)(Maybe.from_value(1.5))) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" - case: map_result disable_cache: false main: | from returns.pointfree import map_ from returns.result import Result def test(arg: float) -> int: ... x: Result[float, str] reveal_type(map_(test)(x)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: map_ioresult disable_cache: false main: | from returns.pointfree import map_ from returns.io import IOResult def test(arg: float) -> int: ... x: IOResult[float, str] reveal_type(map_(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: map_requires_context disable_cache: false main: | from returns.pointfree import map_ from returns.context import RequiresContext def test(arg: float) -> int: ... x: RequiresContext[float, str] reveal_type(map_(test)(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.int, builtins.str]" - case: map_requires_context_result disable_cache: false main: | from returns.pointfree import map_ from returns.context import RequiresContextResult def test(arg: float) -> int: ... x: RequiresContextResult[float, str, bool] reveal_type(map_(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.int, builtins.str, builtins.bool]" - case: map_requires_context_ioresult disable_cache: false main: | from returns.pointfree import map_ from returns.context import RequiresContextIOResult def test(arg: float) -> int: ... x: RequiresContextIOResult[float, str, bool] reveal_type(map_(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]" - case: map_requires_context_future_result disable_cache: false main: | from returns.pointfree import map_ from returns.context import RequiresContextFutureResult def test(arg: float) -> int: ... x: RequiresContextFutureResult[float, str, bool] reveal_type(map_(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, builtins.str, builtins.bool]" - case: map_future disable_cache: false main: | from returns.pointfree import map_ from returns.future import Future def test(arg: float) -> int: ... reveal_type(map_(test)(Future.from_value(1.5))) # N: Revealed type is "returns.future.Future[builtins.int]" - case: map_future_result disable_cache: false main: | from returns.pointfree import map_ from returns.future import FutureResult def test(arg: float) -> int: ... x: FutureResult[float, str] reveal_type(map_(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: map_custom_type disable_cache: false main: | from returns.pointfree import map_ from returns.primitives.hkt import SupportsKind1 from returns.interfaces.mappable import Mappable1 from typing import TypeVar T = TypeVar('T') N = TypeVar('N') class MyOwn( SupportsKind1['MyOwn', T], Mappable1[T], ): ... def test(arg: float) -> int: ... x: MyOwn[float] reveal_type(map_(test)(x)) # N: Revealed type is "main.MyOwn[builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_modify_env2.yml000066400000000000000000000046721472312074000245420ustar00rootroot00000000000000- case: modify_env2_wrong_env disable_cache: false main: | from returns.pointfree import modify_env2 from returns.context import RequiresContext x: RequiresContext[float, str] modify_env2(int)(x) out: | main:5: error: Argument 1 has incompatible type "RequiresContext[float, str]"; expected "KindN[RequiresContext[Any, Any], float, int, Any]" [arg-type] - case: modify_env2_wrong_type disable_cache: false main: | from returns.pointfree import modify_env2 from returns.context import RequiresContextResult x: RequiresContextResult[float, str, int] modify_env2(int)(x) out: | main:5: error: Value of type variable "_Reader2Kind" of function cannot be "RequiresContextResult[Any, Any, Any]" [type-var] main:5: error: Argument 1 has incompatible type "RequiresContextResult[float, str, int]"; expected "KindN[RequiresContextResult[Any, Any, Any], float, int, Any]" [arg-type] - case: modify_env2_and_flow disable_cache: false main: | from returns.context import RequiresContext from returns.pointfree import modify_env2, bind from returns.pipeline import flow def modify(str) -> int: ... def test(arg: int) -> RequiresContext[float, str]: ... r: RequiresContext[int, int] reveal_type(flow(r, modify_env2(modify), bind(test))) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.float, builtins.str]" - case: modify_env2_requires_context disable_cache: false main: | from returns.pointfree import modify_env2 from returns.context import RequiresContext class A: ... def mod(arg: A) -> bool: ... x: RequiresContext[float, bool] reveal_type(modify_env2(mod)(x)) # N: Revealed type is "returns.context.requires_context.RequiresContext[builtins.float, main.A]" - case: modify_env2_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.reader import ReaderBased2 from returns.primitives.hkt import SupportsKind2 from returns.pointfree import modify_env2 _EnvType = TypeVar('_EnvType') _ReturnType = TypeVar('_ReturnType') class MyClass( SupportsKind2['MyClass', _ReturnType, _EnvType], ReaderBased2[_ReturnType, _EnvType], ): ... class A: ... def mod(arg: A) -> bool: ... x: MyClass[float, bool] reveal_type(modify_env2(mod)(x)) # N: Revealed type is "main.MyClass[builtins.float, main.A]" returns-0.24.0/typesafety/test_pointfree/test_modify_env3.yml000066400000000000000000000076271472312074000245460ustar00rootroot00000000000000- case: modify_env_wrong_env disable_cache: false main: | from returns.pointfree import modify_env from returns.context import RequiresContextResult def test(arg: float) -> int: ... x: RequiresContextResult[float, bool, str] modify_env(test)(x) out: | main:8: error: Argument 1 has incompatible type "RequiresContextResult[float, bool, str]"; expected "KindN[RequiresContextResult[Any, Any, Any], float, bool, int]" [arg-type] - case: modify_env_wrong_type disable_cache: false main: | from returns.pointfree import modify_env from returns.context import RequiresContext def test(arg: str) -> int: ... x: RequiresContext[float, int] modify_env(test)(x) out: | main:8: error: Value of type variable "_Reader3Kind" of function cannot be "RequiresContext[Any, Any]" [type-var] - case: modify_env_alias disable_cache: false main: | from returns.pointfree import modify_env3 from returns.context import RequiresContextResult def test(arg: str) -> int: ... x: RequiresContextResult[float, Exception, int] reveal_type(modify_env3(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.float, builtins.Exception, builtins.str]" - case: modify_env_and_flow disable_cache: false main: | from returns.context import RequiresContextResult from returns.pointfree import modify_env, bind from returns.pipeline import flow def modify(str) -> int: ... def test(arg: int) -> RequiresContextResult[float, Exception, str]: ... r: RequiresContextResult[int, Exception, int] reveal_type(flow(r, modify_env(modify), bind(test))) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.float, builtins.Exception, builtins.str]" - case: modify_env_requires_context_result disable_cache: false main: | from returns.pointfree import modify_env from returns.context import RequiresContextResult def test(arg: str) -> int: ... x: RequiresContextResult[float, Exception, int] reveal_type(modify_env(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.float, builtins.Exception, builtins.str]" - case: modify_env_requires_context_ioresult disable_cache: false main: | from returns.pointfree import modify_env from returns.context import RequiresContextIOResult def test(arg: str) -> int: ... x: RequiresContextIOResult[float, Exception, int] reveal_type(modify_env(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.Exception, builtins.str]" - case: modify_env_requires_context_future_result disable_cache: false main: | from returns.pointfree import modify_env from returns.context import RequiresContextFutureResult def test(arg: str) -> int: ... x: RequiresContextFutureResult[float, Exception, int] reveal_type(modify_env(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.float, builtins.Exception, builtins.str]" - case: modify_env_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.reader import ReaderLike3 from returns.primitives.hkt import SupportsKind3 from returns.pointfree import modify_env _EnvType = TypeVar('_EnvType') _ValueType = TypeVar('_ValueType') _ErrorType = TypeVar('_ErrorType') class MyClass( SupportsKind3['MyClass', _ValueType, _ErrorType, _EnvType], ReaderLike3[_ValueType, _ErrorType, _EnvType], ): ... def test(arg: str) -> int: ... x: MyClass[int, Exception, int] reveal_type(modify_env(test)(x)) # N: Revealed type is "main.MyClass[builtins.int, builtins.Exception, builtins.str]" returns-0.24.0/typesafety/test_pointfree/test_rescue.yml000066400000000000000000000101371472312074000236000ustar00rootroot00000000000000- case: lash_wrong_second_type disable_cache: false main: | from returns.pointfree import lash from returns.context import RequiresContextIOResult def test(arg: float) -> RequiresContextIOResult[float, str, int]: ... x: RequiresContextIOResult[float, str, int] lash(test)(x) # E: Argument 1 has incompatible type "RequiresContextIOResult[float, str, int]"; expected "KindN[RequiresContextIOResult[Any, Any, Any], float, float, int]" [arg-type] - case: lash_wrong_else_types disable_cache: false main: | from returns.pointfree import lash from returns.context import RequiresContextIOResult def test(arg: float) -> RequiresContextIOResult[float, str, int]: ... x: RequiresContextIOResult[str, float, str] lash(test)(x) # E: Argument 1 has incompatible type "RequiresContextIOResult[str, float, str]"; expected "KindN[RequiresContextIOResult[Any, Any, Any], float, float, int]" [arg-type] - case: lash_maybe disable_cache: false main: | from returns.pointfree import lash from returns.maybe import Maybe def test(arg=None) -> Maybe[int]: ... x: Maybe[int] reveal_type(lash(test)(x)) # N: Revealed type is "returns.maybe.Maybe[builtins.int]" - case: lash_result disable_cache: false main: | from returns.pointfree import lash from returns.result import Result def test(arg: float) -> Result[int, str]: ... x: Result[int, float] reveal_type(lash(test)(x)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: lash_ioresult disable_cache: false main: | from returns.pointfree import lash from returns.io import IOResult def test(arg: float) -> IOResult[int, str]: ... x: IOResult[int, float] reveal_type(lash(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: lash_context_result disable_cache: false main: | from returns.pointfree import lash from returns.context import RequiresContextResult def test(arg: float) -> RequiresContextResult[float, str, int]: ... x: RequiresContextResult[float, float, int] reveal_type(lash(test)(x)) # N: Revealed type is "returns.context.requires_context_result.RequiresContextResult[builtins.float, builtins.str, builtins.int]" - case: lash_context_ioresult disable_cache: false main: | from returns.pointfree import lash from returns.context import RequiresContextIOResult def test(arg: float) -> RequiresContextIOResult[float, str, int]: ... x: RequiresContextIOResult[float, float, int] reveal_type(lash(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.str, builtins.int]" - case: lash_context_future_result disable_cache: false main: | from returns.pointfree import lash from returns.context import RequiresContextFutureResult def test(arg: float) -> RequiresContextFutureResult[float, str, int]: ... x: RequiresContextFutureResult[float, float, int] reveal_type(lash(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.float, builtins.str, builtins.int]" - case: lash_future_result disable_cache: false main: | from returns.pointfree import lash from returns.future import FutureResult def test(arg: float) -> FutureResult[int, str]: ... x: FutureResult[int, float] reveal_type(lash(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.int, builtins.str]" - case: lash_custom_type disable_cache: false main: | from typing import TypeVar from returns.pointfree import lash from returns.interfaces.lashable import Lashable2 from returns.primitives.hkt import SupportsKind2 V = TypeVar('V') E = TypeVar('E') class MyClass(SupportsKind2['MyClass', V, E], Lashable2[V, E]): ... def test(arg: str) -> MyClass[int, int]: ... x: MyClass[int, str] reveal_type(lash(test)(x)) # N: Revealed type is "main.MyClass[builtins.int, builtins.int]" returns-0.24.0/typesafety/test_pointfree/test_unify.yml000066400000000000000000000062141472312074000234450ustar00rootroot00000000000000- case: unify_result disable_cache: false main: | from returns.pointfree import unify from returns.result import Result def test(arg: str) -> Result[str, int]: ... x: Result[str, AssertionError] reveal_type(unify(test)(x)) # N: Revealed type is "returns.result.Result[builtins.str, Union[builtins.AssertionError, builtins.int]]" - case: unify_ioresult disable_cache: false main: | from returns.io import IOResult from returns.pointfree import unify def test(arg: float) -> IOResult[str, bytes]: ... x: IOResult[float, bool] reveal_type(unify(test)(x)) # N: Revealed type is "returns.io.IOResult[builtins.str, Union[builtins.bool, builtins.bytes]]" - case: unify_future_result disable_cache: false main: | from returns.future import FutureResult from returns.pointfree import unify def test(arg: bool) -> FutureResult[bool, str]: ... x: FutureResult[bool, float] reveal_type(unify(test)(x)) # N: Revealed type is "returns.future.FutureResult[builtins.bool, Union[builtins.float, builtins.str]]" - case: unify_reader_ioresult disable_cache: false main: | from returns.pointfree import unify from returns.context import ReaderIOResult def test(arg: float) -> ReaderIOResult[bool, str, float]: ... x: ReaderIOResult[float, Exception, float] reveal_type(unify(test)(x)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.bool, Union[builtins.Exception, builtins.str], builtins.float]" - case: unify_reader_future_result1 disable_cache: false main: | from returns.pointfree import unify from returns.context import ReaderFutureResult, NoDeps def test(arg: int) -> ReaderFutureResult[int, bool, bool]: ... x: ReaderFutureResult[int, str, NoDeps] reveal_type(unify(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, Union[builtins.str, builtins.bool], builtins.bool]" - case: unify_reader_future_result2 disable_cache: false main: | from returns.pointfree import unify from returns.context import ReaderFutureResult def test(arg: int) -> ReaderFutureResult[int, bool, float]: ... x: ReaderFutureResult[int, str, float] reveal_type(unify(test)(x)) # N: Revealed type is "returns.context.requires_context_future_result.RequiresContextFutureResult[builtins.int, Union[builtins.str, builtins.bool], builtins.float]" - case: unify_custom_type disable_cache: false main: | from typing import TypeVar from returns.interfaces.specific.result import ResultLike2 from returns.pointfree import unify from returns.primitives.hkt import SupportsKind2 ValueType = TypeVar('ValueType') ErrorType = TypeVar('ErrorType') class MyOwn( SupportsKind2['MyOwn', ValueType, ErrorType], ResultLike2[ValueType, ErrorType] ): ... def test(arg: str) -> MyOwn[str, bool]: ... x: MyOwn[str, ValueError] reveal_type(unify(test)(x)) # N: Revealed type is "main.MyOwn[builtins.str, Union[builtins.ValueError, builtins.bool]]" returns-0.24.0/typesafety/test_primitives/000077500000000000000000000000001472312074000207265ustar00rootroot00000000000000returns-0.24.0/typesafety/test_primitives/test_hkt/000077500000000000000000000000001472312074000225535ustar00rootroot00000000000000returns-0.24.0/typesafety/test_primitives/test_hkt/test_dekind/000077500000000000000000000000001472312074000250505ustar00rootroot00000000000000returns-0.24.0/typesafety/test_primitives/test_hkt/test_dekind/test_dekind.yml000066400000000000000000000041011472312074000300640ustar00rootroot00000000000000- case: dekind_correct_fully_known disable_cache: false main: | from returns.io import IO from returns.primitives.hkt import Kind1, dekind container: Kind1[IO, int] reveal_type(dekind(container)) # N: Revealed type is "returns.io.IO[builtins.int]" - case: dekind_bindable disable_cache: false main: | from returns.interfaces.bindable import Bindable1 from typing import Callable, TypeVar from returns.primitives.hkt import Kind1, dekind T = TypeVar('T') N = TypeVar('N') def bind( instance: Bindable1[T], function: Callable[[T], Kind1[Bindable1, N]], ) -> Bindable1[N]: x = dekind(instance.bind(function)) reveal_type(x) # N: Revealed type is "returns.interfaces.bindable.BindableN[N`-2, Never, Never]" return x - case: dekind_correct_two_type_args disable_cache: false main: | from returns.io import IOResult from returns.primitives.hkt import Kind2, dekind container: Kind2[IOResult, int, str] reveal_type(dekind(container)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: dekind_correct_typevar disable_cache: false main: | from typing import Callable, TypeVar from returns.primitives.hkt import SupportsKind1, Kind1, dekind _ValueType = TypeVar('_ValueType') _NewValueType = TypeVar('_NewValueType') class IO(SupportsKind1['IO', _ValueType]): def apply( self, container: Kind1[ 'IO', Callable[[_ValueType], _NewValueType], ], ): reveal_type(dekind(container)) # N: Revealed type is "main.IO[def (_ValueType`1) -> _NewValueType`-1]" - case: dekind_wrong_non_instance disable_cache: false main: | from typing import TypeVar from returns.primitives.hkt import Kind2, dekind T = TypeVar('T') def some(arg: T): container: Kind2[T, int, str] reveal_type(dekind(container)) out: | main:8: error: dekind must be used with Instance as the first type argument [misc] main:8: note: Revealed type is "Any" returns-0.24.0/typesafety/test_primitives/test_hkt/test_kinded/000077500000000000000000000000001472312074000250505ustar00rootroot00000000000000returns-0.24.0/typesafety/test_primitives/test_hkt/test_kinded/test_kinded.yml000066400000000000000000000101051472312074000300650ustar00rootroot00000000000000- case: kinded_with_any disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from typing import Any, TypeVar T1 = TypeVar('T1') T2 = TypeVar('T2') T3 = TypeVar('T3') I = TypeVar('I') @kinded def test(arg: KindN[I, T1, T2, T3]) -> KindN[I, T1, T2, T3]: ... container: Any reveal_type(test(container)) # N: Revealed type is "Any" - case: kinded_with_kind1 disable_cache: false main: | from returns.primitives.hkt import Kind1, kinded from returns.io import IO from typing import Any, TypeVar, Sequence T1 = TypeVar('T1') I = TypeVar('I') @kinded def test(arg: Kind1[I, T1]) -> Kind1[I, T1]: ... container: IO[str] reveal_type(test(container)) # N: Revealed type is "returns.io.IO[builtins.str]" - case: kinded_with_kind2 disable_cache: false main: | from returns.primitives.hkt import Kind2, kinded from returns.io import IOResult from typing import Any, TypeVar, Sequence T1 = TypeVar('T1') T2 = TypeVar('T2') I = TypeVar('I') @kinded def test(arg: Kind2[I, T1, T2]) -> Kind2[I, T1, T2]: ... container: IOResult[int, str] reveal_type(test(container)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: kinded_with_kind3 disable_cache: false main: | from returns.primitives.hkt import Kind3, kinded from returns.context import ReaderIOResult from typing import Any, TypeVar, Sequence T1 = TypeVar('T1') T2 = TypeVar('T2') T3 = TypeVar('T3') I = TypeVar('I') @kinded def test(arg: Kind3[I, T1, T2, T3]) -> Kind3[I, T1, T2, T3]: ... container: ReaderIOResult[int, str, bool] reveal_type(test(container)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]" - case: kinded_regression521 disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.context import ReaderIOResult from typing import Any, TypeVar, Sequence, Iterable T1 = TypeVar('T1') T2 = TypeVar('T2') T3 = TypeVar('T3') I = TypeVar('I') @kinded def test( arg: Iterable[KindN[I, T1, T2, T3]], ) -> KindN[I, Sequence[T1], T2, T3]: ... container: Iterable[ReaderIOResult[int, str, bool]] reveal_type(test(container)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[typing.Sequence[builtins.int], builtins.str, builtins.bool]" - case: kinded_with_container1 disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.io import IO from typing import TypeVar T1 = TypeVar('T1') T2 = TypeVar('T2') T3 = TypeVar('T3') I = TypeVar('I') @kinded def test(arg: KindN[I, T1, T2, T3]) -> KindN[I, T1, T2, T3]: ... container: IO[int] reveal_type(test(container)) # N: Revealed type is "returns.io.IO[builtins.int]" - case: kinded_with_container2 disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.io import IOResult from typing import TypeVar T1 = TypeVar('T1') T2 = TypeVar('T2') T3 = TypeVar('T3') I = TypeVar('I') @kinded def test(arg: KindN[I, T1, T2, T3]) -> KindN[I, T1, T2, T3]: ... container: IOResult[int, str] reveal_type(test(container)) # N: Revealed type is "returns.io.IOResult[builtins.int, builtins.str]" - case: kinded_with_container3 disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.context import ReaderIOResult from typing import TypeVar T1 = TypeVar('T1') T2 = TypeVar('T2') T3 = TypeVar('T3') I = TypeVar('I') @kinded def test(arg: KindN[I, T1, T2, T3]) -> KindN[I, T1, T2, T3]: ... container: ReaderIOResult[int, str, bool] reveal_type(test(container)) # N: Revealed type is "returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]" returns-0.24.0/typesafety/test_primitives/test_hkt/test_kinded/test_kinded_methods.yml000066400000000000000000000125041472312074000316150ustar00rootroot00000000000000- case: kinded_with_method disable_cache: false main: | from typing import Callable, Generic, TypeVar from returns.primitives.hkt import Kind1, kinded _FirstType = TypeVar('_FirstType') _UpdatedType = TypeVar('_UpdatedType') _MappableType = TypeVar('_MappableType', bound='Mappable') class Mappable(Generic[_FirstType]): @kinded def map( self: _MappableType, function: Callable[[_FirstType], _UpdatedType], ) -> Kind1[_MappableType, _UpdatedType]: ... x: Mappable[int] reveal_type(x.map(str)) # N: Revealed type is "main.Mappable[builtins.str]" - case: kinded_with_unannotated_self_method disable_cache: false main: | from typing import Callable, Generic, TypeVar from returns.primitives.hkt import Kind1, kinded _FirstType = TypeVar('_FirstType') _UpdatedType = TypeVar('_UpdatedType') _MappableType = TypeVar('_MappableType', bound='Mappable') class Mappable(Generic[_FirstType]): @kinded def map( self, # not annotated, `_MappableType` is unbound function: Callable[[_FirstType], _UpdatedType], ) -> Kind1[_MappableType, _UpdatedType]: ... x: Mappable[int] reveal_type(x.map(str)) # N: Revealed type is "Any" - case: kinded_with_two_params disable_cache: false main: | from typing import Callable, TypeVar from returns.primitives.hkt import Kind1, SupportsKind1, kinded _FirstType = TypeVar('_FirstType') _UpdatedType = TypeVar('_UpdatedType') _MappableType = TypeVar('_MappableType', bound='Mappable') class Mappable(object): @kinded def map( self, container: Kind1[_MappableType, _FirstType], function: Callable[[_FirstType], _UpdatedType], ) -> Kind1[_MappableType, _UpdatedType]: ... class My(Mappable, SupportsKind1['My', _FirstType]): ... x: Mappable y: My[int] reveal_type(x.map(y, str)) # N: Revealed type is "main.My[builtins.str]" - case: kinded_classmethod_with_two_params1 disable_cache: false main: | from typing import Callable, TypeVar from returns.primitives.hkt import Kind1, SupportsKind1, kinded _FirstType = TypeVar('_FirstType') _UpdatedType = TypeVar('_UpdatedType') _MappableType = TypeVar('_MappableType', bound='Mappable') class Mappable(object): @kinded @classmethod def map( cls, container: Kind1[_MappableType, _FirstType], function: Callable[[_FirstType], _UpdatedType], ) -> Kind1[_MappableType, _UpdatedType]: ... class My(Mappable, SupportsKind1['My', _FirstType]): ... y: My[int] reveal_type(Mappable.map(y, str)) # N: Revealed type is "main.My[builtins.str]" reveal_type(My.map(y, str)) # N: Revealed type is "main.My[builtins.str]" reveal_type(y.map(y, str)) # N: Revealed type is "main.My[builtins.str]" - case: kinded_classmethod_with_two_params2 disable_cache: false main: | from typing import Callable, TypeVar, Type from returns.primitives.hkt import Kind1, SupportsKind1, kinded _FirstType = TypeVar('_FirstType') _UpdatedType = TypeVar('_UpdatedType') _MappableType = TypeVar('_MappableType', bound='Mappable') class Mappable(object): @kinded @classmethod def map( cls: Type[Mappable], container: Kind1[_MappableType, _FirstType], function: Callable[[_FirstType], _UpdatedType], ) -> Kind1[_MappableType, _UpdatedType]: ... class My(Mappable, SupportsKind1['My', _FirstType]): ... y: My[int] reveal_type(Mappable.map(y, str)) # N: Revealed type is "main.My[builtins.str]" reveal_type(My.map(y, str)) # N: Revealed type is "main.My[builtins.str]" reveal_type(y.map(y, str)) # N: Revealed type is "main.My[builtins.str]" - case: kinded_with_wrong_self_type1 disable_cache: false main: | from typing import Callable, TypeVar, Type from returns.primitives.hkt import Kind1, kinded _FirstType = TypeVar('_FirstType') _UpdatedType = TypeVar('_UpdatedType') _MappableType = TypeVar('_MappableType', bound='Mappable') class Mappable(object): @kinded def map( self: int, container: Kind1[_MappableType, _FirstType], function: Callable[[_FirstType], _UpdatedType], ) -> Kind1[_MappableType, _UpdatedType]: ... out: | main:10: error: The erased type of self "builtins.int" is not a supertype of its class "main.Mappable" [misc] - case: kinded_with_wrong_self_type2 disable_cache: false main: | from typing import Callable, TypeVar, Type from returns.primitives.hkt import Kind1, kinded _FirstType = TypeVar('_FirstType') _UpdatedType = TypeVar('_UpdatedType') _MappableType = TypeVar('_MappableType', bound='Mappable') class Mappable(object): @kinded @classmethod def map( cls: Type[int], container: Kind1[_MappableType, _FirstType], function: Callable[[_FirstType], _UpdatedType], ) -> Kind1[_MappableType, _UpdatedType]: ... out: | main:11: error: The erased type of self "Type[builtins.int]" is not a supertype of its class "Type[main.Mappable]" [misc] returns-0.24.0/typesafety/test_primitives/test_hkt/test_kinded/test_kinded_nested.yml000066400000000000000000000067341472312074000314440ustar00rootroot00000000000000- case: kinded_with_nested_kind_interface disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.interfaces.mappable import MappableN from returns.context import ReaderIOResult from typing import TypeVar, Sequence I = TypeVar('I', bound=MappableN) @kinded def test(arg: KindN[I, int, str, bool]) -> Sequence[KindN[I, int, str, bool]]: ... x: ReaderIOResult[int, str, bool] reveal_type(test(x)) # N: Revealed type is "typing.Sequence[returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]]" - case: kinded_with_nested_kind_instance disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.context import ReaderIOResult from typing import Sequence @kinded def test() -> Sequence[KindN[ReaderIOResult, int, str, bool]]: ... reveal_type(test()) # N: Revealed type is "typing.Sequence[returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]]" - case: kinded_with_nested_kind_callable disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.context import ReaderIOResult from typing import Sequence, Callable @kinded def test() -> Sequence[ Callable[ [KindN[ReaderIOResult, int, str, bool]], KindN[ReaderIOResult, int, str, bool] ], ]: ... reveal_type(test()) # N: Revealed type is "typing.Sequence[def (returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]) -> returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]]" - case: kinded_with_nested_kind_tuple disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.context import ReaderIOResult from typing import Tuple @kinded def test() -> Tuple[ KindN[ReaderIOResult, int, str, bool], KindN[ReaderIOResult, float, bytes, object], ]: ... reveal_type(test()) # N: Revealed type is "Tuple[returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool], returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.bytes, builtins.object]]" - case: kinded_with_nested_kind_union disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.context import ReaderIOResult from typing import Union @kinded def test() -> Union[ KindN[ReaderIOResult, int, str, bool], KindN[ReaderIOResult, float, bytes, object], ]: ... reveal_type(test()) # N: Revealed type is "Union[returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool], returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.float, builtins.bytes, builtins.object]]" - case: kinded_with_nested_kind_type disable_cache: false main: | from returns.primitives.hkt import KindN, kinded from returns.context import ReaderIOResult from typing import Type @kinded def test() -> Type[KindN[ReaderIOResult, int, str, bool]]: ... reveal_type(test()) # N: Revealed type is "Type[returns.context.requires_context_ioresult.RequiresContextIOResult[builtins.int, builtins.str, builtins.bool]]" returns-0.24.0/typesafety/test_primitives/test_hkt/test_kinded/test_kinded_overload.yml000066400000000000000000000020451472312074000317640ustar00rootroot00000000000000- case: kinded_with_any disable_cache: false main: | from typing import TypeVar, overload, Optional from returns.maybe import Maybe from returns.interfaces.mappable import MappableN from returns.primitives.hkt import Kind1, kinded _ValueType = TypeVar('_ValueType') _NewType = TypeVar('_NewType') _MappableKind = TypeVar('_MappableKind', bound=MappableN) @overload def _test( instance: Kind1[_MappableKind, _ValueType], ) -> Kind1[_MappableKind, None]: ... @overload def _test( instance: Kind1[_MappableKind, _ValueType], value: _NewType, ) -> Kind1[_MappableKind, _NewType]: ... def _test( instance: Kind1[_MappableKind, _ValueType], value: Optional[_NewType] = None, ) -> Kind1[_MappableKind, Optional[_NewType]]: ... test = kinded(_test) x: Maybe[int] reveal_type(test(x)) # N: Revealed type is "returns.maybe.Maybe[None]" reveal_type(test(x, 'a')) # N: Revealed type is "returns.maybe.Maybe[builtins.str]" returns-0.24.0/typesafety/test_primitives/test_hkt/test_kindn/000077500000000000000000000000001472312074000247155ustar00rootroot00000000000000returns-0.24.0/typesafety/test_primitives/test_hkt/test_kindn/test_kindn.yml000066400000000000000000000006171472312074000276060ustar00rootroot00000000000000- case: kind_params disable_cache: false parametrized: - count: 1 expected: 2 - count: 2 expected: 3 - count: 3 expected: 4 main: | from returns.primitives.hkt import Kind{{ count }} container: Kind{{ count }}[object, int, str, bool, float] out: | main:3: error: Bad number of arguments for type alias, expected {{ expected }}, given 5 [type-arg] returns-0.24.0/typesafety/test_primitives/test_hkt/test_kindn/test_kindn_getattr.yml000066400000000000000000000013571472312074000313420ustar00rootroot00000000000000- case: kind_existing_getattr disable_cache: false main: | from returns.primitives.hkt import Kind1 from typing import List container: Kind1[List, int] reveal_type(container.pop) # N: Revealed type is "def (typing.SupportsIndex =) -> builtins.int" - case: kind_missing_getattr disable_cache: false main: | from returns.primitives.hkt import Kind1 from typing import List container: Kind1[List, int] container.missing # E: "List[Any]" has no attribute "missing" [attr-defined] - case: kind_any_getattr disable_cache: false main: | from returns.primitives.hkt import Kind1 from typing import Any container: Kind1[Any, int] reveal_type(container.missing) # N: Revealed type is "Any" returns-0.24.0/typesafety/test_primitives/test_hkt/test_supports_kind.yml000066400000000000000000000021651472312074000272450ustar00rootroot00000000000000- case: supports_kind_getattr_existing disable_cache: false main: | from returns.primitives.hkt import SupportsKind1 from typing import List container: SupportsKind1[List, int] container.pop # E: "SupportsKindN[List[Any], int, Never, Never]" has no attribute "pop" [attr-defined] - case: supports_kind_getattr_missing disable_cache: false main: | from returns.primitives.hkt import SupportsKindN container: SupportsKindN[object, int, str, bool] container.missing out: | main:4: error: "SupportsKindN[object, int, str, bool]" has no attribute "missing" [attr-defined] - case: supports_kind_getattr_subclass disable_cache: false main: | from returns.primitives.hkt import SupportsKind1 from typing import TypeVar T = TypeVar('T') class Custom(SupportsKind1['Custom', T]): existing: T container: Custom[int] reveal_type(container.existing) reveal_type(container.missing) out: | main:10: note: Revealed type is "builtins.int" main:11: error: "Custom[int]" has no attribute "missing" [attr-defined] main:11: note: Revealed type is "Any" returns-0.24.0/typesafety/test_primitives/test_reawaitable/000077500000000000000000000000001472312074000242455ustar00rootroot00000000000000returns-0.24.0/typesafety/test_primitives/test_reawaitable/test_reawaitable_decorator.yml000066400000000000000000000007161472312074000323550ustar00rootroot00000000000000- case: reawaitable_decorator_with_args disable_cache: false main: | from typing import Optional from returns.primitives.reawaitable import reawaitable @reawaitable async def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: ... reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> typing.Coroutine[Any, Any, builtins.int]" returns-0.24.0/typesafety/test_primitives/test_tracing/000077500000000000000000000000001472312074000234145ustar00rootroot00000000000000returns-0.24.0/typesafety/test_primitives/test_tracing/test_collect_traces.yml000066400000000000000000000024331472312074000301660ustar00rootroot00000000000000- case: collect_traces_context_manager_return_type_one skip: sys.version_info[:2] >= (3, 13) disable_cache: false main: | from returns.primitives.tracing import collect_traces reveal_type(collect_traces) # N: Revealed type is "Overload(def () -> typing.ContextManager[None], def [_FunctionType <: def (*Any, **Any) -> Any] (function: _FunctionType`-1) -> _FunctionType`-1)" - case: collect_traces_context_manager_return_type_two skip: sys.version_info[:2] >= (3, 13) disable_cache: false main: | from returns.primitives.tracing import collect_traces with reveal_type(collect_traces()): # N: Revealed type is "typing.ContextManager[None]" pass - case: collect_traces_decorated_function_return_type disable_cache: false main: | from returns.primitives.tracing import collect_traces @collect_traces def function() -> int: return 0 reveal_type(function) # N: Revealed type is "def () -> builtins.int" - case: collect_traces_decorated_function_with_argument_return_type disable_cache: false main: | from returns.primitives.tracing import collect_traces @collect_traces def function(number: int) -> str: return str(number) reveal_type(function) # N: Revealed type is "def (number: builtins.int) -> builtins.str" returns-0.24.0/typesafety/test_result/000077500000000000000000000000001472312074000200515ustar00rootroot00000000000000returns-0.24.0/typesafety/test_result/test_attempt.yml000066400000000000000000000021471472312074000233150ustar00rootroot00000000000000- case: attempt_decorator_no_params disable_cache: false main: | from returns.result import attempt @attempt def test() -> int: return 1 out: | main:3: error: Argument 1 to "attempt" has incompatible type "Callable[[], int]"; expected "Callable[[_FirstType], int]" [arg-type] - case: attempt_decorator_one_param disable_cache: false main: | from returns.result import attempt @attempt def test(param: str) -> int: return 1 reveal_type(test) # N: Revealed type is "def (builtins.str) -> returns.result.Result[builtins.int, builtins.str]" def test2(param: int) -> str: return 'str' reveal_type(attempt(test2)) # N: Revealed type is "def (builtins.int) -> returns.result.Result[builtins.str, builtins.int]" - case: attempt_decorator_two_params disable_cache: false main: | from returns.result import attempt @attempt def test(first: str, second: float) -> int: return 1 out: | main:3: error: Argument 1 to "attempt" has incompatible type "Callable[[str, float], int]"; expected "Callable[[str], int]" [arg-type] returns-0.24.0/typesafety/test_result/test_construct_failure.yml000066400000000000000000000013141472312074000253650ustar00rootroot00000000000000- case: failure_lash disable_cache: false main: | from returns.result import Failure, Result def returns_result(param: int) -> Result[str, Exception]: ... first: Result[str, int] = Failure(1) reveal_type(first.lash(returns_result)) # N: Revealed type is "returns.result.Result[builtins.str, builtins.Exception]" - case: failure_alt disable_cache: false main: | from returns.result import Failure reveal_type(Failure(1).alt(str)) # N: Revealed type is "returns.result.Result[Any, builtins.str]" - case: failure_failure disable_cache: false main: | from returns.result import Failure reveal_type(Failure(1).failure()) # N: Revealed type is "builtins.int" returns-0.24.0/typesafety/test_result/test_construct_success.yml000066400000000000000000000035061472312074000254130ustar00rootroot00000000000000- case: success_bind disable_cache: false main: | from returns.result import Success, Result def returns_result(param: int) -> Result[str, Exception]: ... first: Result[int, Exception] = Success(1) reveal_type(first.bind(returns_result)) # N: Revealed type is "returns.result.Result[builtins.str, builtins.Exception]" - case: success_bind_result disable_cache: false main: | from returns.result import Success, Result def returns_result(param: int) -> Result[str, Exception]: ... first: Result[int, Exception] = Success(1) reveal_type(first.bind_result(returns_result)) # N: Revealed type is "returns.result.Result[builtins.str, builtins.Exception]" - case: success_map disable_cache: false main: | from returns.result import Success, Result reveal_type(Success(1).map(str)) # N: Revealed type is "returns.result.Result[builtins.str, Any]" - case: success_apply1 disable_cache: false main: | from returns.result import Success, Result reveal_type(Success(1).apply(Success(str))) # N: Revealed type is "returns.result.Result[builtins.str, Any]" - case: success_apply2 disable_cache: false main: | from returns.result import Success, Result from returns.curry import curry @curry def sum_two(first: int, second: float) -> str: ... reveal_type(Success(2.0).apply(Success(1).apply(Success(sum_two)))) # N: Revealed type is "returns.result.Result[builtins.str, Any]" - case: success_value_or disable_cache: false main: | from returns.result import Success reveal_type(Success(1).value_or(None)) # N: Revealed type is "Union[builtins.int, None]" - case: success_unwrap disable_cache: false main: | from returns.result import Success reveal_type(Success(1).unwrap()) # N: Revealed type is "builtins.int" returns-0.24.0/typesafety/test_result/test_do.yml000066400000000000000000000035511472312074000222410ustar00rootroot00000000000000- case: do_all_errors disable_cache: false main: | from returns.result import Result, Failure reveal_type(Result.do( # N: Revealed type is "returns.result.Result[Any, Union[builtins.int, builtins.str]]" first / second for first in Failure(1) for second in Failure('a') )) - case: do_no_errors disable_cache: false main: | from returns.result import Success, Result reveal_type(Result.do( # N: Revealed type is "returns.result.Result[builtins.float, Never]" x + y for x in Success(1) for y in Success(2.5) )) - case: do_with_errors disable_cache: false main: | from returns.result import Success, Result a: Result[int, str] b: Result[float, bytes] reveal_type(Result.do( # N: Revealed type is "returns.result.Result[builtins.float, Union[builtins.str, builtins.bytes]]" x + y for x in a for y in b )) - case: do_types_missmatch disable_cache: false main: | from returns.result import Success, Result from returns.io import IOSuccess Result.do( x + y for x in IOSuccess(1) # E: Invalid type supplied in do-notation: expected "returns.result.Result[Any, Any]", got "returns.io.IOSuccess[builtins.int]" [misc] for y in Success(2.5) ) - case: do_with_if disable_cache: false main: | from returns.result import Success, Result Result.do( # E: Using "if" conditions inside a generator is not allowed [misc] x + y for x in Success(1) for y in Success(2.5) if y > 5 ) - case: do_with_var disable_cache: false main: | from returns.result import Success, Result x = ( x + y for x in Success(1) for y in Success(2.5) ) Result.do(x) # E: Literal generator expression is required, not a variable or function call [misc] returns-0.24.0/typesafety/test_result/test_result_error.yml000066400000000000000000000005421472312074000243630ustar00rootroot00000000000000- case: failure_lash disable_cache: false main: | from returns.result import ResultE, Success, Failure def some(arg: int) -> ResultE[int]: if arg > 0: return Success(arg) return Failure(ValueError('test')) reveal_type(some(1)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.Exception]" returns-0.24.0/typesafety/test_result/test_result_type_cast.yml000066400000000000000000000104521472312074000252260ustar00rootroot00000000000000- case: result_success_cast1 disable_cache: false main: | from returns.result import Result, Success first: Result[int, Exception] = Success(1) reveal_type(first) # N: Revealed type is "returns.result.Result[builtins.int, builtins.Exception]" - case: result_success_cast2 disable_cache: false main: | from returns.result import Result, Success first: Result[object, Exception] = Success(1) reveal_type(first) # N: Revealed type is "returns.result.Result[builtins.object, builtins.Exception]" - case: result_failure_cast1 disable_cache: false main: | from returns.result import Result, Failure first: Result[int, Exception] = Failure(Exception()) reveal_type(first) # N: Revealed type is "returns.result.Result[builtins.int, builtins.Exception]" - case: result_failure_cast2 disable_cache: false main: | from returns.result import Result, Failure first: Result[int, Exception] = Failure(TypeError()) reveal_type(first) # N: Revealed type is "returns.result.Result[builtins.int, builtins.Exception]" - case: result_swap disable_cache: false main: | from returns.result import Result x: Result[int, str] reveal_type(x.swap()) # N: Revealed type is "returns.result.Result[builtins.str, builtins.int]" - case: result_getattr disable_cache: false main: | from returns.result import Result x: Result[int, str] x.missing # E: "Result[int, str]" has no attribute "missing" [attr-defined] - case: result_from_value disable_cache: false main: | from returns.result import Result reveal_type(Result.from_value(1)) # N: Revealed type is "returns.result.Result[builtins.int, Any]" - case: result_from_failure disable_cache: false main: | from returns.result import Result reveal_type(Result.from_failure(1)) # N: Revealed type is "returns.result.Result[Any, builtins.int]" - case: result_from_result disable_cache: false main: | from returns.result import Result x: Result[int ,str] reveal_type(Result.from_result(x)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: result_covariant_cast disable_cache: false main: | from returns.result import Result first: Result[TypeError, ValueError] # we cast both values second: Result[Exception, Exception] = first reveal_type(second) # N: Revealed type is "returns.result.Result[builtins.Exception, builtins.Exception]" - case: result_success_bind_contra1 disable_cache: false main: | from returns.result import Result, Success def test(some: int) -> Result[float, str]: ... first: Result[int, str] = Success(4) reveal_type(first.bind(test)) # N: Revealed type is "returns.result.Result[builtins.float, builtins.str]" - case: result_success_bind_contra2 disable_cache: false main: | from returns.result import Result, Success def test(some: int) -> Result[int, ValueError]: ... first: Result[int, Exception] second = first.bind(test) reveal_type(second) # N: Revealed type is "returns.result.Result[builtins.int, builtins.Exception]" - case: result_correct_usage disable_cache: false main: | from returns.result import Result, Success, Failure def factory(inner_value: int) -> Result[int, str]: if inner_value > 0: return Success(inner_value + 2) return Failure(str(inner_value)) reveal_type(factory(1)) # N: Revealed type is "returns.result.Result[builtins.int, builtins.str]" - case: resulte_typecast1 disable_cache: false main: | from returns.result import Result, ResultE, Success, Failure def function(arg: int) -> ResultE[int]: if arg > 0: return Success(arg + 1) return Failure(ValueError(arg)) result: Result[int, Exception] = function(1) reveal_type(result) # N: Revealed type is "returns.result.Result[builtins.int, builtins.Exception]" - case: resulte_typecast2 disable_cache: false main: | from returns.result import Result, ResultE, Success, Failure def function(arg: int) -> Result[int, Exception]: if arg > 0: return Success(arg + 1) return Failure(ValueError(arg)) result: ResultE[int] = function(1) reveal_type(result) # N: Revealed type is "returns.result.Result[builtins.int, builtins.Exception]" returns-0.24.0/typesafety/test_result/test_safe.yml000066400000000000000000000156321472312074000225600ustar00rootroot00000000000000- case: safe_decorator_no_params disable_cache: false main: | from returns.result import safe @safe def test() -> int: return 1 reveal_type(test) # N: Revealed type is "def () -> returns.result.Result[builtins.int, builtins.Exception]" - case: safe_decorator_passing_exceptions_no_params disable_cache: false main: | from returns.result import safe @safe((ValueError,)) def test() -> int: return 1 reveal_type(test) # N: Revealed type is "def () -> returns.result.Result[builtins.int, builtins.ValueError]" @safe(exceptions=(ValueError,)) def test2() -> int: return 1 reveal_type(test2) # N: Revealed type is "def () -> returns.result.Result[builtins.int, builtins.ValueError]" - case: safe_composition_no_params disable_cache: false main: | from returns.result import safe def test() -> int: return 1 reveal_type(safe(test)) # N: Revealed type is "def () -> returns.result.Result[builtins.int, builtins.Exception]" - case: safe_composition_passing_exceptions_no_params disable_cache: false main: | from returns.result import safe def test() -> int: return 1 reveal_type(safe((EOFError,))(test)) # N: Revealed type is "def () -> returns.result.Result[builtins.int, builtins.EOFError]" - case: safe_decorator_with_args disable_cache: false main: | from typing import Optional from returns.result import safe @safe def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: return 1 reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.result.Result[builtins.int, builtins.Exception]" - case: safe_decorator_passing_exceptions_with_args disable_cache: false main: | from typing import Optional from returns.result import safe @safe((ValueError, EOFError)) def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: return 1 reveal_type(test) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.result.Result[builtins.int, builtins.Exception]" - case: safe_composition_with_args disable_cache: false main: | from typing import Optional from returns.result import safe def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: return 1 reveal_type(safe(test)) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.result.Result[builtins.int, builtins.Exception]" - case: safe_composition_passing_exceptions_with_args disable_cache: false main: | from typing import Optional from returns.result import safe def test(first: int, second: Optional[str] = None, *, kw: bool = True) -> int: return 1 reveal_type(safe((ValueError,))(test)) # N: Revealed type is "def (first: builtins.int, second: Union[builtins.str, None] =, *, kw: builtins.bool =) -> returns.result.Result[builtins.int, builtins.ValueError]" - case: safe_regression333 disable_cache: false main: | from returns.result import safe from typing import Any @safe def send(text: str) -> Any: return "test" reveal_type(send) # N: Revealed type is "def (text: builtins.str) -> returns.result.Result[Any, builtins.Exception]" - case: safe_passing_exceptions_regression333 disable_cache: false main: | from returns.result import safe from typing import Any @safe((Exception,)) def send(text: str) -> Any: return "test" reveal_type(send) # N: Revealed type is "def (text: builtins.str) -> returns.result.Result[Any, builtins.Exception]" - case: safe_regression641 disable_cache: false main: | from returns.result import safe from returns.functions import tap class Response(object): def raise_for_status(self) -> None: ... reveal_type(safe(tap(Response.raise_for_status))) # N: Revealed type is "def (main.Response) -> returns.result.Result[main.Response, builtins.Exception]" - case: safe_passing_exceptions_regression641 disable_cache: false main: | from returns.result import safe from returns.functions import tap class Response(object): def raise_for_status(self) -> None: ... reveal_type(safe((EOFError,))(tap(Response.raise_for_status))) # N: Revealed type is "def (main.Response) -> returns.result.Result[main.Response, builtins.EOFError]" - case: safe_decorator_with_args_kwargs disable_cache: false main: | from returns.result import safe @safe def test(*args, **kwargs) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> returns.result.Result[builtins.int, builtins.Exception]" - case: safe_decorator_passing_exceptions_with_args_kwargs disable_cache: false main: | from returns.result import safe @safe((EOFError,)) def test(*args, **kwargs) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: Any, **kwargs: Any) -> returns.result.Result[builtins.int, builtins.EOFError]" - case: safe_decorator_with_args_kwargs disable_cache: false main: | from returns.result import safe @safe def test(*args: int, **kwargs: str) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: builtins.int, **kwargs: builtins.str) -> returns.result.Result[builtins.int, builtins.Exception]" - case: safe_decorator_passing_exceptions_with_args_kwargs disable_cache: false main: | from returns.result import safe @safe((Exception,)) def test(*args: int, **kwargs: str) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: builtins.int, **kwargs: builtins.str) -> returns.result.Result[builtins.int, builtins.Exception]" - case: safe_decorator_composition disable_cache: false main: | from returns.io import impure from returns.result import safe @impure @safe def test(*args: int, **kwargs: str) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: builtins.int, **kwargs: builtins.str) -> returns.io.IO[returns.result.Result[builtins.int, builtins.Exception]]" - case: safe_decorator_passing_exceptions_composition disable_cache: false main: | from returns.io import impure from returns.result import safe @impure @safe((ValueError,)) def test(*args: int, **kwargs: str) -> int: return 1 reveal_type(test) # N: Revealed type is "def (*args: builtins.int, **kwargs: builtins.str) -> returns.io.IO[returns.result.Result[builtins.int, builtins.ValueError]]" - case: safe_decorator_wrong_exceptions_types disable_cache: false main: | from returns.result import safe safe((int,)) # E: Value of type variable "_ExceptionType" of "safe" cannot be "int" [type-var] returns-0.24.0/typesafety/test_trampolines/000077500000000000000000000000001472312074000210705ustar00rootroot00000000000000returns-0.24.0/typesafety/test_trampolines/test_trampoline.yml000066400000000000000000000025321472312074000250260ustar00rootroot00000000000000- case: trampoline_missing_args disable_cache: false main: | from typing import List, Union from returns.trampolines import Trampoline, trampoline @trampoline def _accumulate( numbers: List[int], acc: int = 0, ) -> Union[int, Trampoline[int]]: return Trampoline(_accumulate) out: | main:9: error: Missing positional argument "numbers" in call to "Trampoline" [call-arg] - case: trampoline_wrong_args disable_cache: false main: | from typing import List, Union from returns.trampolines import Trampoline, trampoline @trampoline def _accumulate( numbers: List[int], acc: int = 0, ) -> Union[int, Trampoline[int]]: return Trampoline(_accumulate, ['a'], 'b') out: | main:9: error: List item 0 has incompatible type "str"; expected "int" [list-item] main:9: error: Argument 3 to "Trampoline" has incompatible type "str"; expected "int" [arg-type] - case: trampoline_return_type disable_cache: false main: | from typing import List, Union from returns.trampolines import Trampoline, trampoline @trampoline def _accumulate( numbers: List[int], acc: int = 0, ) -> Union[int, Trampoline[int]]: return Trampoline(_accumulate, [1], 2) reveal_type(_accumulate([1, 2])) # N: Revealed type is "builtins.int" returns-0.24.0/typesafety/test_unsafe/000077500000000000000000000000001472312074000200145ustar00rootroot00000000000000returns-0.24.0/typesafety/test_unsafe/test_unsafe.yml000066400000000000000000000003331472312074000230560ustar00rootroot00000000000000- case: unsafe_perform_io disable_cache: false main: | from returns.io import IO from returns.unsafe import unsafe_perform_io reveal_type(unsafe_perform_io(IO(1))) # N: Revealed type is "builtins.int"