pax_global_header00006660000000000000000000000064145521574770014533gustar00rootroot0000000000000052 comment=7bd2f2a513aba394477c9f67b60ff1ba1b5120c9 matrix-nio-0.24.0/000077500000000000000000000000001455215747700137055ustar00rootroot00000000000000matrix-nio-0.24.0/.github/000077500000000000000000000000001455215747700152455ustar00rootroot00000000000000matrix-nio-0.24.0/.github/workflows/000077500000000000000000000000001455215747700173025ustar00rootroot00000000000000matrix-nio-0.24.0/.github/workflows/tests.yml000066400000000000000000000026121455215747700211700ustar00rootroot00000000000000name: Build Status on: push: branches: - main pull_request: jobs: pre-commit-check: runs-on: ubuntu-latest strategy: matrix: python: [ "3.12" ] steps: - uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} cache: "pip" - name: Install pre-commit run: | pip install pre-commit - name: pre-commit run --all-files run: | pre-commit run --all-files build: runs-on: ubuntu-latest strategy: matrix: python: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} cache: "pip" - name: Install Tox run: pip install tox - name: Run Tox # Run tox using the version of Python in `PATH` run: tox -e py coverage: runs-on: ubuntu-latest strategy: matrix: python: [ "3.12" ] steps: - uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} cache: "pip" - name: Install Tox run: pip install tox - name: Run Tox run: tox -e coverage matrix-nio-0.24.0/.gitignore000066400000000000000000000002741455215747700157000ustar00rootroot00000000000000.coverage coverage.xml .mypy_cache/ .ropeproject/ .pytest_cache/ packages/ __pycache__ poetry.lock *.pyc .hypothesis/ .tox/ doc/build matrix_nio.egg-info/ dist doc/html *.swp build .idea/ matrix-nio-0.24.0/.nojekyll000066400000000000000000000000001455215747700155230ustar00rootroot00000000000000matrix-nio-0.24.0/.pre-commit-config.yaml000066400000000000000000000010561455215747700201700ustar00rootroot00000000000000exclude: ^tests/data repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/sphinx-contrib/sphinx-lint rev: v0.9.1 hooks: - id: sphinx-lint - repo: https://github.com/psf/black rev: 23.12.1 hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.1.13 hooks: - id: ruff args: [ --fix, --exit-non-zero-on-fix ] matrix-nio-0.24.0/.readthedocs.yaml000066400000000000000000000010701455215747700171320ustar00rootroot00000000000000# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Set the version of Python and other tools you might need build: os: ubuntu-22.04 tools: python: "3.11" # Build documentation in the doc/ directory with Sphinx sphinx: configuration: doc/conf.py # We recommend specifying your dependencies to enable reproducible builds: # https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html python: install: - requirements: rtd-requirements.txt matrix-nio-0.24.0/CHANGELOG.md000066400000000000000000000416441455215747700155270ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. ## [0.24.0] - 2024-01-18 ### Miscellaneous Tasks - [[#473]] Update pre-commit hooks, fix issues with sphinx-lint - [[#472]] [[#475]] Add content to `built-with-nio` - [[#468]] Bump `aiohttp` from 3.8.6 to 3.9.0 - [[#461]] Support `python3.12` - [[#478]] Bump `pycryptodome` from 3.19.0 to 3.19.1 [#461]: https://github.com/poljar/matrix-nio/pull/461 [#468]: https://github.com/poljar/matrix-nio/pull/468 [#472]: https://github.com/poljar/matrix-nio/pull/472 [#473]: https://github.com/poljar/matrix-nio/pull/473 [#475]: https://github.com/poljar/matrix-nio/pull/475 [#478]: https://github.com/poljar/matrix-nio/pull/478 ## [0.23.0] - 2023-11-17 ### Bug Fixes - [[#460]] Allow custom `ToDeviceEvent`s via `UnknownToDeviceEvent` - [[#463]] Remove callback execution boilerplate + allow arbitrary callable/awaitable objects - [[#457]] Fix schemas for `m.room.avatar` and `m.room.canonical_alias` - [[#403]] Propagate `asyncio.CancelledError` in `sync_forever` ### Features - [[#451]] Introduce the DM room account data (`m.direct`) ### Miscellaneous Tasks - [[#458]] Update the `nio-bot` description - [[#462]] Don't manually build `libolm` during tests + `pre-commit autoupdate` - [[#464]] Bump `aiohttp` from 3.8.5 to 3.8.6 [#460]: https://github.com/poljar/matrix-nio/pull/460 [#458]: https://github.com/poljar/matrix-nio/pull/458 [#462]: https://github.com/poljar/matrix-nio/pull/462 [#451]: https://github.com/poljar/matrix-nio/pull/451 [#463]: https://github.com/poljar/matrix-nio/pull/463 [#464]: https://github.com/poljar/matrix-nio/pull/464 [#457]: https://github.com/poljar/matrix-nio/pull/457 [#403]: https://github.com/poljar/matrix-nio/pull/403 ## [0.22.1] - 2023-10-9 ### Bug Fixes - [[#453]] Fix `ImportError` from when e2e is not installed [#453]: https://github.com/poljar/matrix-nio/pull/453 ## [0.22.0] - 2023-10-6 ### Bug Fixes - [[#434]] Fix space handling to account for Matrix spec ambiguities. ### Features - [[#426]] Add a simple streamed response to download to files - [[#436]] Add get space hierarchy capability - [[#437]] Support for Token-Authenticated Registration - [[#330]] Add `room_type` to `room_create` API function to allow for custom room types - [[#351]] Add support for `m.reaction` events (Closes [[#174]]) ### Miscellaneous Tasks - [[#427]], [[#446]] Add `.readthedocs.yaml` v2 to support ReadTheDocs migration - [[#440]] Remove `future` dependency - [[#438]] Fix `jsonschema` deprecations - [[#439]] Replace `cgi.parse_header()` - [[#441]] Run `pre-commit autoupdate` to fix deprecation - [[#442]] Introduce `ruff` as a `pre-commit` hook + run on whole codebase - [[#445]] Update `pre-commit` hooks - [[#447]] Replace ALL type comments with type hints - [[#448]] Add `pyupgrade`, `async`, various `flake8`, `Perflint`, and more `ruff` linting rules [#174]: https://github.com/poljar/matrix-nio/issues/174 [#434]: https://github.com/poljar/matrix-nio/pull/434 [#426]: https://github.com/poljar/matrix-nio/pull/426 [#436]: https://github.com/poljar/matrix-nio/pull/436 [#437]: https://github.com/poljar/matrix-nio/pull/437 [#330]: https://github.com/poljar/matrix-nio/pull/330 [#351]: https://github.com/poljar/matrix-nio/pull/351 [#427]: https://github.com/poljar/matrix-nio/pull/427 [#446]: https://github.com/poljar/matrix-nio/pull/446 [#440]: https://github.com/poljar/matrix-nio/pull/440 [#438]: https://github.com/poljar/matrix-nio/pull/438 [#439]: https://github.com/poljar/matrix-nio/pull/439 [#441]: https://github.com/poljar/matrix-nio/pull/441 [#442]: https://github.com/poljar/matrix-nio/pull/442 [#445]: https://github.com/poljar/matrix-nio/pull/445 [#447]: https://github.com/poljar/matrix-nio/pull/447 [#448]: https://github.com/poljar/matrix-nio/pull/448 ## [0.21.2] - 2023-7-17 ### Bug Fixes - [[#423]] Revert [[#411]] due to backwards-incompatibilities. [#423]: https://github.com/poljar/matrix-nio/pull/423 ## [0.21.1] - 2023-7-16 ### Bug Fixes - [[#422]] `async_client.whoami` will alter the state of `async_client` correctly, and accept all spec-compliant fields. ### Miscellaneous Tasks - [[#420]] Add `python3.8` tests to workflow. [#422]: https://github.com/poljar/matrix-nio/pull/422 [#420]: https://github.com/poljar/matrix-nio/pull/420 ## [0.21.0] - 2023-7-14 ### Breaking Changes - [[#416]] Drop support for end-of-life `python3.7` - [[#413]] Drop usage of `logbook` in favor of standard library `logging` - This fixes an issue where logging was effectively disabled by default. ### Features - [[#409]] Support m.space.parent and m.space.child events - [[#418]] Add ability to knock on a room, and enable knocking for a room ### Documentation - Add documentation on how to configure `logging` - Note in `README` that room upgrades/tombstone events *are* supported ### Miscellaneous Tasks - [[#401]] Removing skip for passing test - [[#417]] Add type hints - [[#406]] [[#407]] [[#414]] Add content to `built-with-nio` ### Bug Fixes - [[#408]] Properly generate code coverage - [[#411]] Fixed bug in Event Callbacks [#416]: https://github.com/poljar/matrix-nio/pull/416 [#413]: https://github.com/poljar/matrix-nio/pull/413 [#409]: https://github.com/poljar/matrix-nio/pull/409 [#418]: https://github.com/poljar/matrix-nio/pull/418 [#401]: https://github.com/poljar/matrix-nio/pull/401 [#417]: https://github.com/poljar/matrix-nio/pull/417 [#406]: https://github.com/poljar/matrix-nio/pull/406 [#407]: https://github.com/poljar/matrix-nio/pull/407 [#414]: https://github.com/poljar/matrix-nio/pull/414 [#408]: https://github.com/poljar/matrix-nio/pull/408 [#411]: https://github.com/poljar/matrix-nio/pull/411 ## [0.20.2] - 2023-3-26 ### Miscellaneous Tasks - Upgrade dependencies - Various test, formatting, type hinting fixes - Update GitHub Workflow Actions versions for CI - [[#384]] Add content to `built-with-nio` ### Bug Fixes - [[#335]] Default to the configured request timeout when syncing - [[#354]] Fix `first_sync_filter` parameter of `AsyncClient.sync_forever` - [[#357]] Element exports keys without required fields - [[#396]] Fix `timeline->limited` being required [#384]: https://github.com/poljar/matrix-nio/pull/384 [#335]: https://github.com/poljar/matrix-nio/pull/335 [#354]: https://github.com/poljar/matrix-nio/pull/354 [#357]: https://github.com/poljar/matrix-nio/pull/357 [#396]: https://github.com/poljar/matrix-nio/pull/396 ## [0.20.1] - 2022-11-09 ### Bug Fixes - Fix Python 3.11 compatibility ## [0.20.0] - 2022-09-28 ### Bug Fixes - Fix import sequence errors. - Exclude `tests/data/` from pre-commit workflow. - Only accept forwarded room keys from our own trusted devices ### Documentation - Mention that room key backups are unsupported. - Add matrix-webhook to built-with-nio - Add matrix-asgi to built-with-nio ### Features - Add `mxc` URI parameter to `AsyncClient.download` and deprecate `server_name` and `media_id`. ### Miscellaneous Tasks - Remove the usage of the imp module - Fix our import order - Fix a bunch of typos - Remove key re-sharing - Remove some unnecessary test code - Add poetry to the test requirements - Style fixes - Sort our imports ### Refactor - Clean up and make a bunch of tests more consistent ### Styling - Add config for `pre-commit`. - Fix formatting using `black` and `isort`. - Convert from `str.format` to f-strings. ### Testing - Update test for `AsyncClient.download`. - Fix our async tests ### Ci - Add `black` and `isort`. ## 0.19.0 - 2022-02-04 - [[#296]] Allow creating spaces - [[#293]] Add special check for "room_id" in PushEventMatch - [[#291]] Send empty object with m.read receipt - [[#288]] Update aiohttp-socks dependency - [[#286]] Fix type annotation for async callbacks in add_event_callback - [[#285]] Remove chain_index field when sending room keys - [[#281]] Add support for room upgrades [#296]: https://github.com/poljar/matrix-nio/pull/296 [#293]: https://github.com/poljar/matrix-nio/pull/293 [#291]: https://github.com/poljar/matrix-nio/pull/291 [#288]: https://github.com/poljar/matrix-nio/pull/288 [#286]: https://github.com/poljar/matrix-nio/pull/286 [#285]: https://github.com/poljar/matrix-nio/pull/285 [#281]: https://github.com/poljar/matrix-nio/pull/281 ## 0.18.7 - 2021-09-27 - [[#277]] Allow setting custom headers with the client. - [[#276]] Allow logging in using an email. - [[#273]] Use the correct json format for login requests. [#277]: https://github.com/poljar/matrix-nio/pull/277 [#276]: https://github.com/poljar/matrix-nio/pull/276 [#273]: https://github.com/poljar/matrix-nio/pull/273 ## 0.18.6 - 2021-07-28 - [[#272]] Allow the mimetype to be in the info for encrypted images [#272]: https://github.com/poljar/matrix-nio/pull/272 ## 0.18.5 - 2021-07-26 - [[1f17a20]] Fix errors due to missing keys in syncs [1f17a20]: https://github.com/poljar/matrix-nio/commit/1f17a20ca818c1c3a0c2e75fdc64da9c629eb5f9 ## 0.18.4 - 2021-07-14 - [[#265]] Fix parsing syncs missing invite/join/leave rooms [#265]: https://github.com/poljar/matrix-nio/pull/265 ## 0.18.3 - 2021-06-21 - [[#264]] Allow for devices in keys query that have no signatures [#264]: https://github.com/poljar/matrix-nio/pull/264 ## 0.18.2 - 2021-06-03 - [[#261]] Use the IV as is when decrypting attachments - [[#260]] Always load the crypto data, even if a new account was made [#260]: https://github.com/poljar/matrix-nio/pull/260 [#261]: https://github.com/poljar/matrix-nio/pull/261 ## 0.18.1 - 2021-05-07 - [[#258]] Fix sticker event parsing [#258]: https://github.com/poljar/matrix-nio/pull/256 ## 0.18.0 - 2021-05-06 - [[#256]] Upgrade our dependencies - [[#255]] Relax the sync response json schema - [[#253]] Support the BytesIO type for uploads - [[#252]] Add a sticker events type [#256]: https://github.com/poljar/matrix-nio/pull/256 [#255]: https://github.com/poljar/matrix-nio/pull/255 [#253]: https://github.com/poljar/matrix-nio/pull/253 [#252]: https://github.com/poljar/matrix-nio/pull/252 ## 0.17.0 - 2021-03-01 - [[#228]] Add support for global account data - [[#222]] Add support for push rules events and API - [[#233]] Treat `device_lists` in `SyncResponse` as optional - [[#239]] Add support for authenticated `/profile` requests - [[#246]] Add support for SOCKS5 proxies [#228]: https://github.com/poljar/matrix-nio/pull/228 [#222]: https://github.com/poljar/matrix-nio/pull/222 [#233]: https://github.com/poljar/matrix-nio/pull/233 [#239]: https://github.com/poljar/matrix-nio/pull/239 [#246]: https://github.com/poljar/matrix-nio/pull/246 ## 0.16.0 - 2021-01-18 - [[#235]] Expose the whoami API endpoint in the AsyncClient. - [[#233]] Treat device lists as optional in the Sync response class. - [[#228]] Add support for account data in the AsyncClient. - [[#223]] Percent encode user IDs when they appear in an URL. [#235]: https://github.com/poljar/matrix-nio/pull/235 [#233]: https://github.com/poljar/matrix-nio/pull/233 [#228]: https://github.com/poljar/matrix-nio/pull/228 [#223]: https://github.com/poljar/matrix-nio/pull/223 ## 0.15.2 - 2020-10-29 ### Fixed - [[#220]] Copy the unencrypted `m.relates_to` part of an encrypted event into the decrypted event. [#220]: https://github.com/poljar/matrix-nio/pull/220 ## 0.15.1 - 2020-08-28 ### Fixed - [[#216]] `AsyncClient.room_get_state_event()`: return a `RoomGetStateEventError` if the server returns a 404 error for the request - [[ffc4228]] When fetching the full list of room members, discard the members we previously had that are absent from the full list - [[c123e24]] `MatrixRoom.members_synced`: instead of depending on the potentially outdated room summary member count, become `True` when the full member list has been fetched for the room. [#216]: https://github.com/poljar/matrix-nio/pull/216 [ffc4228]: https://github.com/poljar/matrix-nio/commit/ffc42287c22a1179a9be7d4e47555693417f715d [c123e24]: https://github.com/poljar/matrix-nio/commit/c123e24c8df81c55d40973470b825e78fd2f92a2 ## 0.15.0 - 2020-08-21 ### Added - [[#194]] Add server discovery info (.well-known API) support to AsyncClient - [[#206]] Add support for uploading sync filters to AsyncClient - New [examples] and documentation improvements ### Fixed - [[#206]] Fix `AsyncClient.room_messages()` to not accept filter IDs, using one results in a server error - [[4b6ea92]] Fix the `SqliteMemoryStore` constructor - [[4654c7a]] Wait for current session sharing operation to finish before starting a new one - [[fc9f5e3]] Fix `OverflowError` occurring in `AsyncClient.get_timeout_retry_wait_time()` after a thousand retries [#194]: https://github.com/poljar/matrix-nio/pull/194 [#206]: https://github.com/poljar/matrix-nio/pull/206 [4b6ea92]: https://github.com/poljar/matrix-nio/commit/4b6ea92cb69e445bb39bbfd83948b40adb8a23a5 [4654c7a]: https://github.com/poljar/matrix-nio/commit/4654c7a1a7e39b496b107337977421aeb5953974 [fc9f5e3]: https://github.com/poljar/matrix-nio/commit/fc9f5e3eda25ad65936aeb95412a26af73cedf6a [examples]: https://matrix-nio.readthedocs.io/en/latest/examples.html ## 0.14.1 - 2020-06-26 ### Fixed - [[238b6ad]] Fix the schema for the devices response. [238b6ad]: https://github.com/poljar/matrix-nio/commit/238b6addaaa85b994552e00007638b0170c47c43 ## 0.14.0 - 2020-06-21 ### Added - [[#166]] Add a method to restore the login with an access token. ### Changed - [[#159]] Allow whitespace in HTTP headers in the HttpClient. - [[42e70de]] Fix the creation of PresenceGetError responses. - [[bf60bd1]] Split out the bulk of the key verification events into a common module. - [[9a01396]] Don't require the presence dict to be in the sync response. ### Removed - [[cc789f6]] Remove the PartialSyncResponse. This is a breaking change, but hopefully nobody used this. [#166]: https://github.com/poljar/matrix-nio/pull/166 [#159]: https://github.com/poljar/matrix-nio/pull/159 [42e70de]: https://github.com/poljar/matrix-nio/commit/42e70dea945ae97b69b41d49cb57f64c3b6bd1c4 [cc789f6]: https://github.com/poljar/matrix-nio/commit/cc789f665063b38be5b4146855e5204e9bc5bdb6 [bf60bd1]: https://github.com/poljar/matrix-nio/commit/bf60bd19a15429dc03616b9be11c3a205768e5ad [9a01396]: https://github.com/poljar/matrix-nio/commit/9a0139673329fb82abc59496025d78a34b419b77 ## 0.13.0 - 2020-06-05 ### Added - [[#145]] Added the `room_get_event()` method to `AsyncClient`. - [[#151]] Added the `add_presence_callback` method to base `Client`. - [[#151]] Added the `get_presence()` and `set_presence()` methods to `AsyncClient`. - [[#151]] Added the `presence`, `last_active_ago`, `currently_active` and `status_msg` attributes to `MatrixUser` - [[#152]] Added a docker container with E2E dependencies pre-installed. - [[#153]] Added the `add_room_account_data_callback` method to base `Client`. - [[#153]] Added the `fully_read_marker` and `tags` attributes to `MatrixRoom`. - [[#156]] Added the `update_receipt_marker()` method to `AsyncClient`. - [[#156]] Added the `unread_notifications` and `unread_highlights` attributes to `MatrixRoom`. ### Changed - [[#141]] Improved the upload method to accept file objects directly. [#141]: https://github.com/poljar/matrix-nio/pull/141 [#145]: https://github.com/poljar/matrix-nio/pull/145 [#151]: https://github.com/poljar/matrix-nio/pull/151 [#152]: https://github.com/poljar/matrix-nio/pull/152 [#153]: https://github.com/poljar/matrix-nio/pull/153 [#156]: https://github.com/poljar/matrix-nio/pull/156 ## 0.12.0 - 2020-05-21 ### Added - [[#140]] Added the `update_device()` method to the `AsyncClient`. - [[#143]] Added the `login_info()` method to the `AsyncClient`. - [[c4f460f]] Added support for the new SAS key agreement protocol. ### Fixed - [[#146]] Fix room summary updates when new summary doesn't have any attributes. - [[#147]] Added missing requirements to the test requirements file. [#140]: https://github.com/poljar/matrix-nio/pull/140 [#143]: https://github.com/poljar/matrix-nio/pull/143 [#146]: https://github.com/poljar/matrix-nio/pull/146 [#147]: https://github.com/poljar/matrix-nio/pull/147 [c4f460f]: https://github.com/poljar/matrix-nio/commit/c4f460f62c9543a76eaf1dad4be8ff5ae9312243 ## 0.11.2 - 2020-05-11 ### Fixed - Fixed support to run nio without python-olm. - Fixed an incorrect raise in the group sessions sharing logic. - Handle 429 errors correctly even if they don't contain a json response. ## 0.11.1 - 2020-05-10 ### Fixed - Fix a wrong assertion resulting in errors when trying to send a message. ## 0.11.0 - 2020-05-10 ### Added - Kick, ban, unban support to the AsyncClient. - Read receipt sending support in the AsyncClient. - Read receipt parsing and emitting. - Support token login in the AsyncClient login method. - Support for user registration in the BaseClient and AsyncClient. - Support for ID based filters for the sync and room_messages methods. - Support filter uploading. ### Changed - Convert attrs classes to dataclasses. - Fire the `synced` asyncio event only in the sync forever loop. ### Fixed - Don't encrypt reactions. - Properly put event relationships into the unencrypted content. - Catch Too Many Requests errors more reliably. - Better room name calculation, now using the room summary. ### Removed - Removed the legacy store. matrix-nio-0.24.0/LICENSE.md000066400000000000000000000015001455215747700153050ustar00rootroot00000000000000Internet Systems Consortium license =================================== Copyright (c) `2018`, `Damir Jelić ` Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. matrix-nio-0.24.0/MANIFEST.in000066400000000000000000000000211455215747700154340ustar00rootroot00000000000000include Makefile matrix-nio-0.24.0/Makefile000066400000000000000000000007421455215747700153500ustar00rootroot00000000000000PYTHON ?= python all: init: pre-commit install --install-hooks test: python3 -m pytest --benchmark-disable typecheck: mypy -p nio --warn-redundant-casts coverage: python3 -m pytest --cov nio --benchmark-disable clean: -rm -r dist/ __pycache__/ -rm -r packages/ arch-git-pkg: -rm -r packages/ umask 0022 && poetry build --format sdist cp contrib/archlinux/pkgbuild/PKGBUILD.git dist/PKGBUILD cd dist && makepkg -ci .PHONY: all clean init test typecheck coverage matrix-nio-0.24.0/README.md000066400000000000000000000062651455215747700151750ustar00rootroot00000000000000nio === [![Build Status](https://img.shields.io/github/actions/workflow/status/poljar/matrix-nio/tests.yml?branch=main&style=flat-square)](https://github.com/poljar/matrix-nio/actions) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/matrix-nio?style=flat-square)](https://pypi.org/project/matrix-nio/) [![codecov](https://img.shields.io/codecov/c/github/poljar/matrix-nio/master.svg?style=flat-square)](https://codecov.io/gh/poljar/matrix-nio) [![license](https://img.shields.io/badge/license-ISC-blue.svg?style=flat-square)](https://github.com/poljar/matrix-nio/blob/master/LICENSE.md) [![Documentation Status](https://readthedocs.org/projects/matrix-nio/badge/?version=latest&style=flat-square)](https://matrix-nio.readthedocs.io/en/latest/?badge=latest) [![#nio](https://img.shields.io/badge/matrix-%23nio:matrix.org-blue.svg?style=flat-square)](https://matrix.to/#/!JiiOHXrIUCtcOJsZCa:matrix.org?via=matrix.org&via=maunium.net&via=t2l.io) nio is a multilayered [Matrix](https://matrix.org/) client library. The underlying base layer doesn't do any network IO on its own, but on top of that is a full-fledged batteries-included asyncio layer using [aiohttp](https://github.com/aio-libs/aiohttp/). File IO is only done if you enable end-to-end encryption (E2EE). Documentation ------------- The full API documentation for nio can be found at [https://matrix-nio.readthedocs.io](https://matrix-nio.readthedocs.io/en/latest/#api-documentation) Features -------- nio has most of the features you'd expect in a Matrix library, but it's still a work in progress. - ✅ transparent end-to-end encryption (EE2E) - ✅ encrypted file uploads & downloads - ✅ space parents/children - ✅ manual and emoji verification - ✅ custom [authentication types](https://matrix.org/docs/spec/client_server/r0.6.0#id183) - ✅ well-integrated type system - ✅ knocking, kick, ban and unban - ✅ typing notifications - ✅ message redaction - ✅ token based login - ✅ user registration - ✅ read receipts - ✅ live syncing - ✅ `m.reaction`s - ✅ `m.tag`s - ❌ cross-signing support - ❌ server-side key backups (room key backup, "Secure Backup") - ❌ user deactivation ([#112](https://github.com/poljar/matrix-nio/issues/112)) - ❌ threading support - ❌ in-room emoji verification Installation ------------ To install nio, simply use pip: ```bash $ pip install matrix-nio ``` Note that this installs nio without end-to-end encryption support. For e2ee support, python-olm is needed which requires the [libolm](https://gitlab.matrix.org/matrix-org/olm) C library (version 3.x). On Debian and Ubuntu one can use `apt-get` to install package `libolm-dev`. On Fedora one can use `dnf` to install package `libolm-devel`. On MacOS one can use [brew](https://brew.sh/) to install package `libolm`. Make sure version 3 is installed. After libolm has been installed, the e2ee enabled version of nio can be installed using pip: ```bash $ pip install matrix-nio[e2e] ``` Additionally, a docker image with the e2ee enabled version of nio is provided in the `docker/` directory. Examples -------- For examples of how to use nio, and how others are using it, [read the docs](https://matrix-nio.readthedocs.io/en/latest/examples.html) matrix-nio-0.24.0/apache_license.txt000066400000000000000000000260741455215747700174020ustar00rootroot00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. matrix-nio-0.24.0/contrib/000077500000000000000000000000001455215747700153455ustar00rootroot00000000000000matrix-nio-0.24.0/contrib/archlinux/000077500000000000000000000000001455215747700173425ustar00rootroot00000000000000matrix-nio-0.24.0/contrib/archlinux/pkgbuild/000077500000000000000000000000001455215747700211435ustar00rootroot00000000000000matrix-nio-0.24.0/contrib/archlinux/pkgbuild/PKGBUILD.git000066400000000000000000000017761455215747700230640ustar00rootroot00000000000000# $Id$ # Maintainer: Damir Jelić pkgbase=matrix-nio pkgname='python-matrix-nio-git' pkgver=0.24.0 _pkgver=0.24.0 pkgrel=1 pkgdesc='python no-IO library for the matrix chat protocol' arch=('any') url='https://github.com/poljar/matrix-nio' license=('ISC') makedepends=('python-setuptools') conflicts=('python-matrix-nio') provides=('python-matrix-nio') depends=('python' 'python-olm' 'python-h11' 'python-h2' 'python-jsonschema' 'python-logbook' 'python-peewee' 'python-atomicwrites' 'python-pycryptodome' 'python-unpaddedbase64') checkdepends=() source=("$pkgbase-$pkgver.tar.gz") sha512sums=('SKIP') pkgver() { cd "$pkgbase-$_pkgver" git describe | sed 's/\([^-]*-\)g/r\1/;s/-/./g' } prepare() { cd "$pkgbase-$_pkgver" dephell deps convert --from pyproject.toml --to setup.py } build() { cd "$pkgbase-$_pkgver" python setup.py build } package() { cd "$pkgbase-$_pkgver" python setup.py install --root="$pkgdir" --optimize=1 --skip-build } matrix-nio-0.24.0/doc/000077500000000000000000000000001455215747700144525ustar00rootroot00000000000000matrix-nio-0.24.0/doc/Makefile000066400000000000000000000011041455215747700161060ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SOURCEDIR = . BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) matrix-nio-0.24.0/doc/built-with-nio.rst000066400000000000000000000050511455215747700200600ustar00rootroot00000000000000Projects built with nio ----------------------- - `pantalaimon `_ - e2ee-aware reverse proxy daemon - `nio-template `_ - template for creating bots - `matrix-tweetalong-bot `_ - Twitter-backed watchalongs - `weechat-matrix `_ - lets Weechat communicate over Matrix - `hemppa `_ - generic modular Matrix bot - `devops-bot `_ - sysadmin Swiss army knife bot for team efficiency - `podbot `_ - play podcasts sent from AntennaPod via Riot.im elsewhere - `delator `_ - bot with logs and modular commands - `cody `_ - REPL for your matrix chat rooms - `matrix-nio-send `_ - predecessor of matrix-commander - `Mirage `_ - feature-rich keyboard-operable chat client written in Qt/QML - `matrix-commander `_ - CLI-oriented integratable client - `matrix-archive `_ - creates a YAML log of all room messages and media - `matrix-eno-bot `_ - admin and personal assistence bot - `infinigpt-matrix `_ - OpenAI GPT chatbot with infinite personalities - `ollamarama-matrix `_ - AI chatbot with infinite personalities, using local LLMs via Ollama - `matrix-discord-bridge `_ - `Matrix Nio - HACS `_ - A HACS Integration to replace the core matrix Home Assistant integration - `Simple-Matrix-Bot-Lib `_ - `pushmatrix `_ - `matrix-webhook `_ - `matrix-asgi `_ - `opsdroid `_ - `niobot `_ - An extensive framework for building powerful Matrix bots with ease - `matrix-reminder-bot `_ - A bot to remind you about stuff Are we missing a project? Submit a pull request and we'll get you added! Just edit ``doc/built-with-nio.rst`` matrix-nio-0.24.0/doc/conf.py000066400000000000000000000126141455215747700157550ustar00rootroot00000000000000# # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys import sphinx # noqa: F401 sys.path.insert(0, os.path.abspath("../")) # -- Project information ----------------------------------------------------- project = "nio" copyright = "2020, Damir Jelić" author = "Damir Jelić, Paarth Shah" # The short X.Y version version = "" # The full version, including alpha/beta/rc tags release = "0.24.0" # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Mock out the Olm module since it can't be installed without the C lib. autodoc_mock_imports = ["olm"] # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.coverage", "sphinx.ext.viewcode", "sphinx.ext.githubpages", "sphinx.ext.napoleon", "m2r2", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = [".rst", ".md"] # source_suffix = '.rst' # The master toctree document. master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = "niodoc" # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, "nio.tex", "nio Documentation", "Damir Jelić", "manual"), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "nio", "nio Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "nio", "nio Documentation", author, "nio", "One line description of project.", "Miscellaneous", ), ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] # -- Extension configuration ------------------------------------------------- matrix-nio-0.24.0/doc/contributing.rst000066400000000000000000000065601455215747700177220ustar00rootroot00000000000000Contributing ============ There are lots of ways to contribute, from adding new features via pull requests to opening issues for missing features. Check out our `issues list `_ and filter for good first issues to find something we think you could tackle. We recommend creating a Python 3 `virtual environment `_ (or using `pipenv `_) before you start coding to keep the packages you need for nio separate from the packages you might need for other projects. Throughout the rest of the document, we'll assume you're working in your virtual environment. Prerequisites ------------- You'll need to install the following: - ``make`` - `matrix-org/olm `_ version 3.x You'll also need to install some pip packages: .. code-block:: sh pip install -r test-requirements.txt pip install -r rtd-requirements.txt .. _Testing: Testing ------- As you write, you may want to test your changes. You can add new tests and test files in ``tests/`` to be picked up by pytest. To run the full test suite (please do this before submitting your pull request!), run .. code-block:: sh make test If you only want to test your changes (and not run all of the test suite), you can run the following: .. code-block:: sh python3 -m pytest --benchmark-disable tests/your-test.py Getting ready for a pull request -------------------------------- `Get early feedback. `_ You don't need to perfect your changes to submit them; early feedback can help guide you in the right direction, especially if you're struggling. Make sure any new classes or methods you've added are properly documented, and if you've changed any existing methods make sure their docstrings are still up-to-date. It's really important to have good documentation because you encourage other people to use that great feature you just added! Before you submit your code for discussion, please make sure your code passes the test suite by reading Testing_. Next, run ``make typecheck`` to verify that mypy is happy with the types in your code. Not sure about an error you got from either? No worries; submit your changes and we'll talk about it. When ready, push your changes to a fork of `poljar/matrix-nio `_ and open up a pull request. Give us a bit of a description of what changes you've made and why. If you are trying to close an open issue, you can link to it just by writing "Closes #104" in the description. Adding documentation -------------------- Writing might be hard, but with a few instructions we'll get you started. As you code, you can just write docstrings in your classes and methods, but if you want to document the architecture of nio (and we'd love if you did!) you can help us write documentation in the `reStructuredText `_ format. You'll need to install `Sphinx `_ if you'd like to preview your changes: .. code-block:: sh pip install sphinx Once that is done, edit the ``.rst`` files in ``doc/`` and run ``make html`` in the same directory. You'll now have HTML pages in ``doc/build/html`` you can review. matrix-nio-0.24.0/doc/examples.rst000066400000000000000000000172251455215747700170310ustar00rootroot00000000000000Examples ======== If you've built something with matrix-nio and want to support the project, add a shield! .. image:: https://img.shields.io/badge/built%20with-matrix--nio-brightgreen :target: https://github.com/poljar/matrix-nio :alt: Built with matrix-nio .. code-block:: [![Built with matrix-nio](https://img.shields.io/badge/built%20with-matrix--nio-brightgreen)](https://github.com/poljar/matrix-nio) To start making a chat bot quickly, considering using `nio-template `_. .. Attention:: For E2EE support, ``python-olm`` is needed, which requires the `libolm `_ C library (version 3.x). After libolm has been installed, the e2ee enabled version of nio can be installed using ``pip install "matrix-nio[e2e]"``. .. include:: built-with-nio.rst A basic client -------------- A basic client requires a few things before you start: - nio is installed - a Matrix homeserver URL (probably "https://matrix.example.org") - a username and password for an account on that homeserver - a room ID for a room on that homeserver. In Riot, this is found in the Room's settings page under "Advanced" By far the easiest way to use nio is using the asyncio layer, unless you have special restrictions that disallow the use of asyncio. All examples require Python 3.5+ for the ``async / await`` syntax. .. literalinclude:: ../examples/basic_client.py :language: python :linenos: Log in using a stored access_token ---------------------------------- Using access tokens requires that when you first log in you save a few values to use later. In this example, we're going to write them to disk as a JSON object, but you could also store them in a database, print them out and post them up on the wall beside your desk, text them to your sister in law, or anything else that allows you access to the values at a later date. We've tried to keep this example small enough that it's just enough to work; once you start writing your own programs with nio you may want to clean things up a bit. This example requires that the user running it has write permissions to the folder they're in. If you copied this repo to your computer, you probably have write permissions. Now run the program `restore_login.py` twice. First time around it will ask you for credentials like homeserver and password. On the second run, the program will log in for you automatically and it will send a "Hello World" message to the room you specify. .. literalinclude:: ../examples/restore_login.py :language: python :linenos: Sending an image ---------------------------------- Now that you have sent a first "Hello World" text message, how about going one step further and sending an image, like a photo from your last vacation. Run the `send_image.py` program and provide a filename to the photo. Voila, you have just sent your first image! .. literalinclude:: ../examples/send_image.py :language: python :linenos: Manual encryption key verification ---------------------------------- Below is a program that works through manual encryption of other users when you already know all of their device IDs. It's a bit dense but provides a good example in terms of being pythonic and using nio's design features purposefully. It is not designed to be a template that you can immediately extend to run your bot, it's designed to be an example of how to use nio. The overall structure is this: we subclass nio's ``AsyncClient`` class and add in our own handlers for a few things, namely: - automatically restoring login details from disk instead of creating new sessions each time we restart the process - callback for printing out any message we receive to stdout - callback for automatically joining any room @alice is invited to - a method for trusting devices using a user ID and (optionally) their list of trusted device IDs - a sample "hello world" encrypted message method In main, we make an instance of that subclass, attempt to login, then create an `asyncio coroutine `_ to run later that will trust the devices and send the hello world message. We then create `asyncio Tasks <>`_ to run that coroutine as well as the ``sync_forever()`` coroutine that nio provides, which does most of the handling of required work for communicating with Matrix: it uploads keys, checks for new messages, executes callbacks when events occur that trigger those callbacks, etc. Main executes the result of those Tasks. You'll need two accounts, which we'll call @alice:example.org and @bob:example.org. @alice will be your nio application and @bob will be your second user account. Before the script runs, **make a new room** with the @bob account, enable encryption and invite @alice. Note the room ID as you'll need it for this script. You'll also need **all** of @bob's device IDs, which you can get from within Riot under the profile settings > Advanced section. They may be called "session IDs". These are the device IDs that your program will trust, and getting them into nio is the manual part here. In another example we'll document automatic emoji verification. It may look long at first but much of the program is actually documentation explaining how it works. If you have questions about the example, please don't hesitate to ask them on `#nio:matrix.org `_. If you are stuck, it may be useful to read this primer from Matrix.org on implementing end-to-end encryption: https://matrix.org/docs/guides/end-to-end-encryption-implementation-guide To delete the store, or clear the trusted devices, simply remove "nio_store" in the working directory as well as "manual_encrypted_verify.json". Then the example script will log in (with a new session ID) and generate new keys. .. literalinclude:: ../examples/manual_encrypted_verify.py :language: python :linenos: Interactive encryption key verification --------------------------------------- One way to interactively verify a device is via emojis. On popular Matrix clients you will find that devices are flagged as `trusted` or `untrusted`. If a device is `untrusted` you can verify to make it `trusted`. Most clients have a red symbol for `untrusted` and a green icon for `trusted`. One can select un untrusted device and initiate a `verify by emoji` action. How would that look like in code? How can you add that to your application? Next we present a simple application that showcases emoji verification. Note, the app only accepts emoji verification. So, you have to start it on the other client (e.g. Element). Initiating an emoji verification is similar in code, consider doing it as "homework" if you feel up to it. But for now, let's have a look how emoji verification can be accepted and processed. .. literalinclude:: ../examples/verify_with_emoji.py :language: python :linenos: Further reading and exploration --------------------------------------- In an external repo, not maintained by us, is a simple Matrix client that includes sending, receiving and verification. It gives an example of - how to send text, images, audio, video, other text files - listen to messages forever - get just the newest unread messages - get the last N messages - perform emoji verification - etc. So, if you want more example code and want to explore further have a look at this external repo called `matrix-commander `_. And of course, you should check out all the other projects built with `matrix-nio`. To do so, check out our `built-with-marix-nio list `_. matrix-nio-0.24.0/doc/index.html000066400000000000000000000001071455215747700164450ustar00rootroot00000000000000 matrix-nio-0.24.0/doc/index.rst000066400000000000000000000006671455215747700163240ustar00rootroot00000000000000.. nio documentation master file, created by sphinx-quickstart on Tue Nov 27 15:15:35 2018. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. .. mdinclude:: ../README.md Api Documentation ================= .. toctree:: :maxdepth: 2 nio examples contributing Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` matrix-nio-0.24.0/doc/make.bat000066400000000000000000000014221455215747700160560ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd matrix-nio-0.24.0/doc/nio.rst000066400000000000000000000070621455215747700157760ustar00rootroot00000000000000API === This document details the API of nio. Logging ------- matrix-nio writes logs using python's standard `logging` module. In order to see these logs, you will need to configure `logging`. In order to see all logs matrix-nio produces, you can build off of the following snippet:: import logging logging.basicConfig(level=logging.DEBUG) This snippet is very loud, and will produce a lot of output. If you want to see less output, you can set the logging level to `INFO` or `WARNING`. For example:: import logging logging.basicConfig(level=logging.INFO) In production, it is recommended to use WARNING or higher, as INFO may still be too noisy. You can also attach your own logs to this system:: import logging logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger("my-app") logger.info("Hello, world!") For more information, seek the documentation for the `logging` module at https://docs.python.org/3/library/logging.html Api --- .. autoclass:: nio.Api :members: :undoc-members: nio Clients ----------- .. automodule:: nio.client .. autoclass:: nio.ClientConfig :members: Client ^^^^^^ .. autoclass:: nio.Client :members: AsyncClient ^^^^^^^^^^^ .. autoclass:: nio.AsyncClient :members: :undoc-members: :show-inheritance: .. autoclass:: nio.TransferMonitor :members: :undoc-members: :show-inheritance: HttpClient ^^^^^^^^^^ .. autoclass:: nio.HttpClient :members: :undoc-members: :show-inheritance: Rooms ----- .. automodule:: nio.rooms :members: :undoc-members: :show-inheritance: Events ------ .. automodule:: nio.events :members: :undoc-members: :show-inheritance: .. automodule:: nio.events.misc :members: :show-inheritance: Room Events ^^^^^^^^^^^ .. automodule:: nio.events.room_events :members: :undoc-members: :show-inheritance: Invite Room Events ^^^^^^^^^^^^^^^^^^ .. automodule:: nio.events.invite_events :members: :undoc-members: :show-inheritance: To-device Events ^^^^^^^^^^^^^^^^ .. automodule:: nio.events.to_device :members: :undoc-members: :show-inheritance: Ephemeral Events ^^^^^^^^^^^^^^^^ .. automodule:: nio.events.ephemeral :members: :undoc-members: :show-inheritance: Account Data ^^^^^^^^^^^^ .. automodule:: nio.events.account_data :members: :undoc-members: :show-inheritance: Building events --------------- .. automodule:: nio.event_builders :members: :show-inheritance: .. autoclass:: nio.event_builders.EventBuilder :members: :show-inheritance: Direct messages ^^^^^^^^^^^^^^^ .. automodule:: nio.event_builders.direct_messages :members: :show-inheritance: State events ^^^^^^^^^^^^ .. automodule:: nio.event_builders.state_events :members: :show-inheritance: Exceptions ---------- .. automodule:: nio.exceptions :members: :undoc-members: :show-inheritance: Responses --------- .. automodule:: nio.responses :members: :undoc-members: Storage ------- .. automodule:: nio.store :members: :undoc-members: .. autoclass:: nio.store.MatrixStore :members: .. autoclass:: nio.store.DefaultStore :show-inheritance: .. autoclass:: nio.store.SqliteStore :show-inheritance: .. autoclass:: nio.store.SqliteMemoryStore :show-inheritance: Encryption ---------- .. automodule:: nio.crypto .. autoclass:: nio.crypto.DeviceStore :members: .. autoclass:: nio.crypto.OlmDevice :members: .. autoclass:: nio.crypto.TrustState :members: .. autoclass:: nio.crypto.Sas :members: matrix-nio-0.24.0/docker/000077500000000000000000000000001455215747700151545ustar00rootroot00000000000000matrix-nio-0.24.0/docker/Dockerfile000066400000000000000000000041361455215747700171520ustar00rootroot00000000000000# To build the image, run `docker build` command from the root of the # repository: # # docker build -f docker/Dockerfile . # # There is an optional PYTHON_VERSION build argument which sets the # version of python to build against. For example: # # docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.9 . # # # And an optional LIBOLM_VERSION build argument which sets the # version of libolm to build against. For example: # # docker build -f docker/Dockerfile --build-arg LIBOLM_VERSION=3.2.11 . # ## ## Creating a builder container ## # We use an initial docker container to build all of the runtime dependencies, # then transfer those dependencies to the container we're going to ship, # before throwing this one away ARG PYTHON_VERSION=3.8 FROM docker.io/python:${PYTHON_VERSION}-alpine3.16 as builder ## ## Build libolm for matrix-nio e2e support ## # Install libolm build dependencies ARG LIBOLM_VERSION=3.2.11 RUN apk add --no-cache \ make \ cmake \ gcc \ g++ \ git \ libffi-dev \ python3-dev # Build libolm at the specified version # # This will build libolm and place it at /libolm # This will also build the libolm python bindings and place them at /python-libs # We will later copy contents from both of these folders to the runtime container COPY docker/build_and_install_libolm.sh /scripts/ RUN /scripts/build_and_install_libolm.sh ${LIBOLM_VERSION} /python-libs # Now that libolm is installed, install matrix-nio with e2e dependencies # We again install to /python-libs RUN pip install --prefix="/python-libs" --no-warn-script-location \ "matrix-nio[e2e]" ## ## Creating the runtime container ## # Create the container we'll actually ship. We need to copy libolm and any # python dependencies that we built above to this container FROM docker.io/python:${PYTHON_VERSION}-alpine3.16 # Copy python dependencies from the "builder" container COPY --from=builder /python-libs /usr/local # Copy libolm from the "builder" container COPY --from=builder /usr/local/lib/libolm* /usr/local/lib/ # Install any native runtime dependencies RUN apk add --no-cache \ libstdc++ matrix-nio-0.24.0/docker/README.md000066400000000000000000000015121455215747700164320ustar00rootroot00000000000000# Docker The provided docker base image is based on alpine, and comes with libolm and libolm python3 bindings installed. This image can then be built on top of for projects that use matrix-nio. ## Building the Image To build the image from source, use the following `docker build` command from the repo's root: ```sh docker build -t poljar/matrix-nio:latest -f docker/Dockerfile . ``` You can also customise the version of libolm and python that is bundled in the container using the following build arguments. To customise the python version, set `PYTHON_VERSION`: ```sh docker build -t poljar/matrix-nio:latest -f docker/Dockerfile --build-arg PYTHON_VERSION=3.8 . ``` To customise the libolm version, set `LIBOLM_VERSION`: ```sh docker build -t poljar/matrix-nio:latest -f docker/Dockerfile --build-arg LIBOLM_VERSION=3.1.4 . ``` matrix-nio-0.24.0/docker/build_and_install_libolm.sh000077500000000000000000000012601455215747700225170ustar00rootroot00000000000000#!/usr/bin/env sh # # Call with the following arguments: # # ./build_and_install_libolm.sh # # Example: # # ./build_and_install_libolm.sh 3.1.4 /python-bindings # # Note that if a python bindings installation directory is not supplied, bindings will # be installed to the default directory. # set -ex # Download the specified version of libolm git clone -b "$1" https://gitlab.matrix.org/matrix-org/olm.git olm && cd olm # Build libolm cmake . -Bbuild cmake --build build # Install make install # Build the python3 bindings cd python && make olm-python3 # Install python3 bindings mkdir -p "$2" DESTDIR="$2" make install-python3 matrix-nio-0.24.0/examples/000077500000000000000000000000001455215747700155235ustar00rootroot00000000000000matrix-nio-0.24.0/examples/README.md000066400000000000000000000025321455215747700170040ustar00rootroot00000000000000# Matrix-nio Examples You can find the examples documented on the `matrix-nio` documentation page: https://matrix-nio.readthedocs.io/en/latest/examples.html It is recommended that you work through the examples *in a specific order*: 1. [`basic_client.py`](basic_client.py): first introduction to matrix-nio, Hello World example 2. [`restore_login.py`](restore_login.py): store credentials and send text message 3. [`send_image.py`](send_image.py): send an image 4. [`manual_encrypted_verify.py`](manual_encrypted_verify.py): trust devices and users, verify manually 5. [`verify_with_emoji.py`](verify_with_emoji.py): verify interactively with emojis Download the examples today and start playing with them in only a couple of minutes! Enjoy! Find more in the [matrix-nio documentation]( https://matrix-nio.readthedocs.io/en/latest/index.html). If you desire more sample code and want to do additional reading and exploration have a look at this external repo (not maintained by us) called [matrix-commander](). It showcases various types of sending, receiving and verification. And last but not least, you should definitely check out all the other projects built with `matrix-nio`. To do so, explore our [built-with-marix-nio list]( ). matrix-nio-0.24.0/examples/basic_client.py000066400000000000000000000020021455215747700205060ustar00rootroot00000000000000import asyncio from nio import AsyncClient, MatrixRoom, RoomMessageText async def message_callback(room: MatrixRoom, event: RoomMessageText) -> None: print( f"Message received in room {room.display_name}\n" f"{room.user_name(event.sender)} | {event.body}" ) async def main() -> None: client = AsyncClient("https://matrix.example.org", "@alice:example.org") client.add_event_callback(message_callback, RoomMessageText) print(await client.login("my-secret-password")) # "Logged in as @alice:example.org device id: RANDOMDID" # If you made a new room and haven't joined as that user, you can use # await client.join("your-room-id") await client.room_send( # Watch out! If you join an old room you'll see lots of old messages room_id="!my-fave-room:example.org", message_type="m.room.message", content={"msgtype": "m.text", "body": "Hello world!"}, ) await client.sync_forever(timeout=30000) # milliseconds asyncio.run(main()) matrix-nio-0.24.0/examples/manual_encrypted_verify.py000066400000000000000000000310131455215747700230110ustar00rootroot00000000000000import asyncio import json import os import sys from typing import Optional import aiofiles from nio import ( AsyncClient, ClientConfig, InviteEvent, LoginResponse, MatrixRoom, RoomMessageText, crypto, exceptions, ) # This is a fully-documented example of how to do manual verification with nio, # for when you already know the device IDs of the users you want to trust. If # you want live verification using emojis, the process is more complicated and # will be covered in another example. # We're building on the restore_login example here to preserve device IDs and # therefore preserve trust; if @bob trusts @alice's device ID ABC and @alice # restarts this program, loading the same keys, @bob will preserve trust. If # @alice logged in again @alice would have new keys and a device ID XYZ, and # @bob wouldn't trust it. # The store is where we want to place encryption details like our keys, trusted # devices and blacklisted devices. Here we place it in the working directory, # but if you deploy your program you might consider /var or /opt for storage STORE_FOLDER = "nio_store/" # This file is for restoring login details after closing the program, so you # can preserve your device ID. If @alice logged in every time instead, @bob # would have to re-verify. See the restoring login example for more into. SESSION_DETAILS_FILE = "credentials.json" # Only needed for this example, this is who @alice will securely # communicate with. We need all the device IDs of this user so we can consider # them "trusted". If an unknown device shows up (like @bob signs into their # account on another device), this program will refuse to send a message in the # room. Try it! BOB_ID = "@bob:example.org" BOB_DEVICE_IDS = [ # You can find these in Riot under Settings > Security & Privacy. # They may also be called "session IDs". You'll want to add ALL of them here # for the one other user in your encrypted room "URDEVICEID", ] # the ID of the room you want your bot to join and send commands in. # This can be a direct message or room; Matrix treats them the same ROOM_ID = "!myfavouriteroom:example.org" ALICE_USER_ID = "@alice:example.org" ALICE_HOMESERVER = "https://matrix.example.org" ALICE_PASSWORD = "hunter2" class CustomEncryptedClient(AsyncClient): def __init__( self, homeserver, user="", device_id="", store_path="", config=None, ssl=None, proxy=None, ): # Calling super.__init__ means we're running the __init__ method # defined in AsyncClient, which this class derives from. That does a # bunch of setup for us automatically super().__init__( homeserver, user=user, device_id=device_id, store_path=store_path, config=config, ssl=ssl, proxy=proxy, ) # if the store location doesn't exist, we'll make it if store_path and not os.path.isdir(store_path): os.mkdir(store_path) # auto-join room invites self.add_event_callback(self.cb_autojoin_room, InviteEvent) # print all the messages we receive self.add_event_callback(self.cb_print_messages, RoomMessageText) async def login(self) -> None: """Log in either using the global variables or (if possible) using the session details file. NOTE: This method kinda sucks. Don't use these kinds of global variables in your program; it would be much better to pass them around instead. They are only used here to minimise the size of the example. """ # Restore the previous session if we can # See the "restore_login.py" example if you're not sure how this works if os.path.exists(SESSION_DETAILS_FILE) and os.path.isfile( SESSION_DETAILS_FILE ): try: async with aiofiles.open(SESSION_DETAILS_FILE, "r") as f: contents = await f.read() config = json.loads(contents) self.access_token = config["access_token"] self.user_id = config["user_id"] self.device_id = config["device_id"] # This loads our verified/blacklisted devices and our keys self.load_store() print( f"Logged in using stored credentials: {self.user_id} on {self.device_id}" ) except OSError as err: print(f"Couldn't load session from file. Logging in. Error: {err}") except json.JSONDecodeError: print("Couldn't read JSON file; overwriting") # We didn't restore a previous session, so we'll log in with a password if not self.user_id or not self.access_token or not self.device_id: # this calls the login method defined in AsyncClient from nio resp = await super().login(ALICE_PASSWORD) if isinstance(resp, LoginResponse): print("Logged in using a password; saving details to disk") self.__write_details_to_disk(resp) else: print(f"Failed to log in: {resp}") sys.exit(1) def trust_devices(self, user_id: str, device_list: Optional[str] = None) -> None: """Trusts the devices of a user. If no device_list is provided, all of the users devices are trusted. If one is provided, only the devices with IDs in that list are trusted. Arguments: user_id {str} -- the user ID whose devices should be trusted. Keyword Arguments: device_list {Optional[str]} -- The full list of device IDs to trust from that user (default: {None}) """ print(f"{user_id}'s device store: {self.device_store[user_id]}") # The device store contains a dictionary of device IDs and known # OlmDevices for all users that share a room with us, including us. # We can only run this after a first sync. We have to populate our # device store and that requires syncing with the server. for device_id, olm_device in self.device_store[user_id].items(): if device_list and device_id not in device_list: # a list of trusted devices was provided, but this ID is not in # that list. That's an issue. print( f"Not trusting {device_id} as it's not in {user_id}'s pre-approved list." ) continue if user_id == self.user_id and device_id == self.device_id: # We cannot explicitly trust the device @alice is using continue self.verify_device(olm_device) print(f"Trusting {device_id} from user {user_id}") def cb_autojoin_room(self, room: MatrixRoom, event: InviteEvent): """Callback to automatically joins a Matrix room on invite. Arguments: room {MatrixRoom} -- Provided by nio event {InviteEvent} -- Provided by nio """ self.join(room.room_id) room = self.rooms[ROOM_ID] print(f"Room {room.name} is encrypted: {room.encrypted}") async def cb_print_messages(self, room: MatrixRoom, event: RoomMessageText): """Callback to print all received messages to stdout. Arguments: room {MatrixRoom} -- Provided by nio event {RoomMessageText} -- Provided by nio """ if event.decrypted: encrypted_symbol = "🛡 " else: encrypted_symbol = "⚠️ " print( f"{room.display_name} |{encrypted_symbol}| {room.user_name(event.sender)}: {event.body}" ) async def send_hello_world(self): # Now we send an encrypted message that @bob can read, although it will # appear to be "unverified" when they see it, because @bob has not verified # the device @alice is sending from. # We'll leave that as an exercise for the reader. try: await self.room_send( room_id=ROOM_ID, message_type="m.room.message", content={ "msgtype": "m.text", "body": "Hello, this message is encrypted", }, ) except exceptions.OlmUnverifiedDeviceError: print("These are all known devices:") device_store: crypto.DeviceStore = device_store # noqa: F821 [ print( f"\t{device.user_id}\t {device.device_id}\t {device.trust_state}\t {device.display_name}" ) for device in device_store ] sys.exit(1) @staticmethod def __write_details_to_disk(resp: LoginResponse) -> None: """Writes login details to disk so that we can restore our session later without logging in again and creating a new device ID. Arguments: resp {LoginResponse} -- the successful client login response. """ with open(SESSION_DETAILS_FILE, "w") as f: json.dump( { "access_token": resp.access_token, "device_id": resp.device_id, "user_id": resp.user_id, }, f, ) async def run_client(client: CustomEncryptedClient) -> None: """A basic encrypted chat application using nio.""" # This is our own custom login function that looks for a pre-existing config # file and, if it exists, logs in using those details. Otherwise it will log # in using a password. await client.login() # Here we create a coroutine that we can call in asyncio.gather later, # along with sync_forever and any other API-related coroutines you'd like # to do. async def after_first_sync(): # We'll wait for the first firing of 'synced' before trusting devices. # client.synced is an asyncio event that fires any time nio syncs. This # code doesn't run in a loop, so it only fires once print("Awaiting sync") await client.synced.wait() # In practice, you want to have a list of previously-known device IDs # for each user you want to trust. Here, we require that list as a # global variable client.trust_devices(BOB_ID, BOB_DEVICE_IDS) # In this case, we'll trust _all_ of @alice's devices. NOTE that this # is a SUPER BAD IDEA in practice, but for the purpose of this example # it'll be easier, since you may end up creating lots of sessions for # @alice as you play with the script client.trust_devices(ALICE_USER_ID) await client.send_hello_world() # We're creating Tasks here so that you could potentially write other # Python coroutines to do other work, like checking an API or using another # library. All of these Tasks will be run concurrently. # For more details, check out https://docs.python.org/3/library/asyncio-task.html # ensure_future() is for Python 3.5 and 3.6 compatibility. For 3.7+, use # asyncio.create_task() after_first_sync_task = asyncio.ensure_future(after_first_sync()) # We use full_state=True here to pull any room invites that occurred or # messages sent in rooms _before_ this program connected to the # Matrix server sync_forever_task = asyncio.ensure_future( client.sync_forever(30000, full_state=True) ) await asyncio.gather( # The order here IS significant! You have to register the task to trust # devices FIRST since it awaits the first sync after_first_sync_task, sync_forever_task, ) async def main(): # By setting `store_sync_tokens` to true, we'll save sync tokens to our # store every time we sync, thereby preventing reading old, previously read # events on each new sync. # For more info, check out https://matrix-nio.readthedocs.io/en/latest/nio.html#asyncclient config = ClientConfig(store_sync_tokens=True) client = CustomEncryptedClient( ALICE_HOMESERVER, ALICE_USER_ID, store_path=STORE_FOLDER, config=config, ssl=False, proxy="http://localhost:8080", ) try: await run_client(client) except (asyncio.CancelledError, KeyboardInterrupt): await client.close() # Run the main coroutine, which instantiates our custom subclass, trusts all the # devices, and syncs forever (or until your press Ctrl+C) if __name__ == "__main__": try: asyncio.run(main()) except KeyboardInterrupt: pass matrix-nio-0.24.0/examples/restore_login.py000066400000000000000000000065631455215747700207620ustar00rootroot00000000000000#!/usr/bin/env python3 import asyncio import getpass import json import os import sys import aiofiles from nio import AsyncClient, LoginResponse CONFIG_FILE = "credentials.json" # Check out main() below to see how it's done. def write_details_to_disk(resp: LoginResponse, homeserver) -> None: """Writes the required login details to disk so we can log in later without using a password. Arguments: resp {LoginResponse} -- the successful client login response. homeserver -- URL of homeserver, e.g. "https://matrix.example.org" """ # open the config file in write-mode with open(CONFIG_FILE, "w") as f: # write the login details to disk json.dump( { "homeserver": homeserver, # e.g. "https://matrix.example.org" "user_id": resp.user_id, # e.g. "@user:example.org" "device_id": resp.device_id, # device ID, 10 uppercase letters "access_token": resp.access_token, # cryptogr. access token }, f, ) async def main() -> None: # If there are no previously-saved credentials, we'll use the password if not os.path.exists(CONFIG_FILE): print( "First time use. Did not find credential file. Asking for " "homeserver, user, and password to create credential file." ) homeserver = "https://matrix.example.org" homeserver = input(f"Enter your homeserver URL: [{homeserver}] ") if not (homeserver.startswith("https://") or homeserver.startswith("http://")): homeserver = "https://" + homeserver user_id = "@user:example.org" user_id = input(f"Enter your full user ID: [{user_id}] ") device_name = "matrix-nio" device_name = input(f"Choose a name for this device: [{device_name}] ") client = AsyncClient(homeserver, user_id) pw = getpass.getpass() resp = await client.login(pw, device_name=device_name) # check that we logged in successfully if isinstance(resp, LoginResponse): write_details_to_disk(resp, homeserver) else: print(f'homeserver = "{homeserver}"; user = "{user_id}"') print(f"Failed to log in: {resp}") sys.exit(1) print( "Logged in using a password. Credentials were stored.", "Try running the script again to login with credentials.", ) # Otherwise the config file exists, so we'll use the stored credentials else: # open the file in read-only mode async with aiofiles.open(CONFIG_FILE, "r") as f: contents = await f.read() config = json.loads(contents) client = AsyncClient(config["homeserver"]) client.access_token = config["access_token"] client.user_id = config["user_id"] client.device_id = config["device_id"] # Now we can send messages as the user room_id = "!myfavouriteroomid:example.org" room_id = input(f"Enter room id for test message: [{room_id}] ") await client.room_send( room_id, message_type="m.room.message", content={"msgtype": "m.text", "body": "Hello world!"}, ) print("Logged in using stored credentials. Sent a test message.") # Either way we're logged in here, too await client.close() asyncio.run(main()) matrix-nio-0.24.0/examples/send_image.py000066400000000000000000000133141455215747700201720ustar00rootroot00000000000000#!/usr/bin/env python3 import asyncio import getpass import json import os import sys import aiofiles.os import magic from PIL import Image from nio import AsyncClient, LoginResponse, UploadResponse CONFIG_FILE = "credentials.json" # Check out main() below to see how it's done. def write_details_to_disk(resp: LoginResponse, homeserver) -> None: """Writes the required login details to disk so we can log in later without using a password. Arguments: resp {LoginResponse} -- the successful client login response. homeserver -- URL of homeserver, e.g. "https://matrix.example.org" """ # open the config file in write-mode with open(CONFIG_FILE, "w") as f: # write the login details to disk json.dump( { "homeserver": homeserver, # e.g. "https://matrix.example.org" "user_id": resp.user_id, # e.g. "@user:example.org" "device_id": resp.device_id, # device ID, 10 uppercase letters "access_token": resp.access_token, # cryptogr. access token }, f, ) async def send_image(client, room_id, image): """Send image to room. Arguments: --------- client : Client room_id : str image : str, file name of image This is a working example for a JPG image. "content": { "body": "someimage.jpg", "info": { "size": 5420, "mimetype": "image/jpeg", "thumbnail_info": { "w": 100, "h": 100, "mimetype": "image/jpeg", "size": 2106 }, "w": 100, "h": 100, "thumbnail_url": "mxc://example.com/SomeStrangeThumbnailUriKey" }, "msgtype": "m.image", "url": "mxc://example.com/SomeStrangeUriKey" } """ mime_type = magic.from_file(image, mime=True) # e.g. "image/jpeg" if not mime_type.startswith("image/"): print("Drop message because file does not have an image mime type.") return im = Image.open(image) (width, height) = im.size # im.size returns (width,height) tuple # first do an upload of image, then send URI of upload to room file_stat = await aiofiles.os.stat(image) async with aiofiles.open(image, "r+b") as f: resp, maybe_keys = await client.upload( f, content_type=mime_type, # image/jpeg filename=os.path.basename(image), filesize=file_stat.st_size, ) if isinstance(resp, UploadResponse): print("Image was uploaded successfully to server. ") else: print(f"Failed to upload image. Failure response: {resp}") content = { "body": os.path.basename(image), # descriptive title "info": { "size": file_stat.st_size, "mimetype": mime_type, "thumbnail_info": None, # TODO "w": width, # width in pixel "h": height, # height in pixel "thumbnail_url": None, # TODO }, "msgtype": "m.image", "url": resp.content_uri, } try: await client.room_send(room_id, message_type="m.room.message", content=content) print("Image was sent successfully") except Exception: print(f"Image send of file {image} failed.") async def main() -> None: # If there are no previously-saved credentials, we'll use the password if not os.path.exists(CONFIG_FILE): print( "First time use. Did not find credential file. Asking for " "homeserver, user, and password to create credential file." ) homeserver = "https://matrix.example.org" homeserver = input(f"Enter your homeserver URL: [{homeserver}] ") if not (homeserver.startswith("https://") or homeserver.startswith("http://")): homeserver = "https://" + homeserver user_id = "@user:example.org" user_id = input(f"Enter your full user ID: [{user_id}] ") device_name = "matrix-nio" device_name = input(f"Choose a name for this device: [{device_name}] ") client = AsyncClient(homeserver, user_id) pw = getpass.getpass() resp = await client.login(pw, device_name=device_name) # check that we logged in successfully if isinstance(resp, LoginResponse): write_details_to_disk(resp, homeserver) else: print(f'homeserver = "{homeserver}"; user = "{user_id}"') print(f"Failed to log in: {resp}") sys.exit(1) print( "Logged in using a password. Credentials were stored.", "Try running the script again to login with credentials.", ) # Otherwise the config file exists, so we'll use the stored credentials else: # open the file in read-only mode async with aiofiles.open(CONFIG_FILE, "r") as f: contents = await f.read() config = json.loads(contents) client = AsyncClient(config["homeserver"]) client.access_token = config["access_token"] client.user_id = config["user_id"] client.device_id = config["device_id"] # Now we can send messages as the user room_id = "!myfavouriteroomid:example.org" room_id = input(f"Enter room id for image message: [{room_id}] ") image = "exampledir/samplephoto.jpg" image = input(f"Enter file name of image to send: [{image}] ") await send_image(client, room_id, image) print("Logged in using stored credentials. Sent a test message.") # Close the client connection after we are done with it. await client.close() asyncio.run(main()) matrix-nio-0.24.0/examples/verify_with_emoji.py000066400000000000000000000371551455215747700216320ustar00rootroot00000000000000#!/usr/bin/env python3 """verify_with_emoji.py A sample program to demo Emoji verification. # Objectives: - Showcase the emoji verification using matrix-nio SDK - This sample program tries to show the key steps involved in performing an emoji verification. - It does so only for incoming request, outgoing emoji verification request are similar but not shown in this sample program # Prerequisites: - You must have matrix-nio and components for end-to-end encryption installed See: https://github.com/poljar/matrix-nio - You must have created a Matrix account already, and have username and password ready - You must have already joined a Matrix room with someone, e.g. yourself - This other party initiates an emoji verification with you - You are using this sample program to accept this incoming emoji verification and follow the protocol to successfully verify the other party's device # Use Cases: - Apply similar code in your Matrix bot - Apply similar code in your Matrix client - Just to learn about Matrix and the matrix-nio SDK # Running the Program: - Change permissions to allow execution `chmod 755 ./verify_with_emoji.py` - Optionally create a store directory, if not it will be done for you `mkdir ./store/` - Run the program as-is, no changes needed `./verify_with_emoji.py` - Run it as often as you like # Sample Screen Output when Running Program: $ ./verify_with_emoji.py First time use. Did not find credential file. Asking for homeserver, user, and password to create credential file. Enter your homeserver URL: [https://matrix.example.org] matrix.example.org Enter your full user ID: [@user:example.org] @user:example.org Choose a name for this device: [matrix-nio] verify_with_emoji Password: Logged in using a password. Credentials were stored. On next execution the stored login credentials will be used. This program is ready and waiting for the other party to initiate an emoji verification with us by selecting "Verify by Emoji" in their Matrix client. [('⚓', 'Anchor'), ('☎️', 'Telephone'), ('😀', 'Smiley'), ('😀', 'Smiley'), ('☂️', 'Umbrella'), ('⚓', 'Anchor'), ('☎️', 'Telephone')] Do the emojis match? (Y/N) y Match! Device will be verified by accepting verification. sas.we_started_it = False sas.sas_accepted = True sas.canceled = False sas.timed_out = False sas.verified = True sas.verified_devices = ['DEVICEIDXY'] Emoji verification was successful. Hit Control-C to stop the program or initiate another Emoji verification from another device or room. """ import asyncio import getpass import json import os import sys import traceback import aiofiles from nio import ( AsyncClient, AsyncClientConfig, KeyVerificationCancel, KeyVerificationEvent, KeyVerificationKey, KeyVerificationMac, KeyVerificationStart, LocalProtocolError, LoginResponse, ToDeviceError, ) # file to store credentials in case you want to run program multiple times CONFIG_FILE = "credentials.json" # login credentials JSON file # directory to store persistent data for end-to-end encryption STORE_PATH = "./store/" # local directory class Callbacks: """Class to pass client to callback methods.""" def __init__(self, client): """Store AsyncClient.""" self.client = client async def to_device_callback(self, event): # noqa """Handle events sent to device.""" try: client = self.client if isinstance(event, KeyVerificationStart): # first step """first step: receive KeyVerificationStart KeyVerificationStart( source={'content': {'method': 'm.sas.v1', 'from_device': 'DEVICEIDXY', 'key_agreement_protocols': ['curve25519-hkdf-sha256', 'curve25519'], 'hashes': ['sha256'], 'message_authentication_codes': ['hkdf-hmac-sha256', 'hmac-sha256'], 'short_authentication_string': ['decimal', 'emoji'], 'transaction_id': 'SomeTxId' }, 'type': 'm.key.verification.start', 'sender': '@user2:example.org' }, sender='@user2:example.org', transaction_id='SomeTxId', from_device='DEVICEIDXY', method='m.sas.v1', key_agreement_protocols=[ 'curve25519-hkdf-sha256', 'curve25519'], hashes=['sha256'], message_authentication_codes=[ 'hkdf-hmac-sha256', 'hmac-sha256'], short_authentication_string=['decimal', 'emoji']) """ if "emoji" not in event.short_authentication_string: print( "Other device does not support emoji verification " f"{event.short_authentication_string}." ) return resp = await client.accept_key_verification(event.transaction_id) if isinstance(resp, ToDeviceError): print(f"accept_key_verification failed with {resp}") sas = client.key_verifications[event.transaction_id] todevice_msg = sas.share_key() resp = await client.to_device(todevice_msg) if isinstance(resp, ToDeviceError): print(f"to_device failed with {resp}") elif isinstance(event, KeyVerificationCancel): # anytime """at any time: receive KeyVerificationCancel KeyVerificationCancel(source={ 'content': {'code': 'm.mismatched_sas', 'reason': 'Mismatched authentication string', 'transaction_id': 'SomeTxId'}, 'type': 'm.key.verification.cancel', 'sender': '@user2:example.org'}, sender='@user2:example.org', transaction_id='SomeTxId', code='m.mismatched_sas', reason='Mismatched short authentication string') """ # There is no need to issue a # client.cancel_key_verification(tx_id, reject=False) # here. The SAS flow is already cancelled. # We only need to inform the user. print( f"Verification has been cancelled by {event.sender} " f'for reason "{event.reason}".' ) elif isinstance(event, KeyVerificationKey): # second step """Second step is to receive KeyVerificationKey KeyVerificationKey( source={'content': { 'key': 'SomeCryptoKey', 'transaction_id': 'SomeTxId'}, 'type': 'm.key.verification.key', 'sender': '@user2:example.org' }, sender='@user2:example.org', transaction_id='SomeTxId', key='SomeCryptoKey') """ sas = client.key_verifications[event.transaction_id] print(f"{sas.get_emoji()}") yn = input("Do the emojis match? (Y/N) (C for Cancel) ") if yn.lower() == "y": print( "Match! The verification for this " "device will be accepted." ) resp = await client.confirm_short_auth_string(event.transaction_id) if isinstance(resp, ToDeviceError): print(f"confirm_short_auth_string failed with {resp}") elif yn.lower() == "n": # no, don't match, reject print( "No match! Device will NOT be verified " "by rejecting verification." ) resp = await client.cancel_key_verification( event.transaction_id, reject=True ) if isinstance(resp, ToDeviceError): print(f"cancel_key_verification failed with {resp}") else: # C or anything for cancel print("Cancelled by user! Verification will be " "cancelled.") resp = await client.cancel_key_verification( event.transaction_id, reject=False ) if isinstance(resp, ToDeviceError): print(f"cancel_key_verification failed with {resp}") elif isinstance(event, KeyVerificationMac): # third step """Third step is to receive KeyVerificationMac KeyVerificationMac( source={'content': { 'mac': {'ed25519:DEVICEIDXY': 'SomeKey1', 'ed25519:SomeKey2': 'SomeKey3'}, 'keys': 'SomeCryptoKey4', 'transaction_id': 'SomeTxId'}, 'type': 'm.key.verification.mac', 'sender': '@user2:example.org'}, sender='@user2:example.org', transaction_id='SomeTxId', mac={'ed25519:DEVICEIDXY': 'SomeKey1', 'ed25519:SomeKey2': 'SomeKey3'}, keys='SomeCryptoKey4') """ sas = client.key_verifications[event.transaction_id] try: todevice_msg = sas.get_mac() except LocalProtocolError as e: # e.g. it might have been cancelled by ourselves print( f"Cancelled or protocol error: Reason: {e}.\n" f"Verification with {event.sender} not concluded. " "Try again?" ) else: resp = await client.to_device(todevice_msg) if isinstance(resp, ToDeviceError): print(f"to_device failed with {resp}") print( f"sas.we_started_it = {sas.we_started_it}\n" f"sas.sas_accepted = {sas.sas_accepted}\n" f"sas.canceled = {sas.canceled}\n" f"sas.timed_out = {sas.timed_out}\n" f"sas.verified = {sas.verified}\n" f"sas.verified_devices = {sas.verified_devices}\n" ) print( "Emoji verification was successful!\n" "Hit Control-C to stop the program or " "initiate another Emoji verification from " "another device or room." ) else: print( f"Received unexpected event type {type(event)}. " f"Event is {event}. Event will be ignored." ) except BaseException: print(traceback.format_exc()) def write_details_to_disk(resp: LoginResponse, homeserver) -> None: """Write the required login details to disk. It will allow following logins to be made without password. Arguments: --------- resp : LoginResponse - successful client login response homeserver : str - URL of homeserver, e.g. "https://matrix.example.org" """ # open the config file in write-mode with open(CONFIG_FILE, "w") as f: # write the login details to disk json.dump( { "homeserver": homeserver, # e.g. "https://matrix.example.org" "user_id": resp.user_id, # e.g. "@user:example.org" "device_id": resp.device_id, # device ID, 10 uppercase letters "access_token": resp.access_token, # cryptogr. access token }, f, ) async def login() -> AsyncClient: """Handle login with or without stored credentials.""" # Configuration options for the AsyncClient client_config = AsyncClientConfig( max_limit_exceeded=0, max_timeouts=0, store_sync_tokens=True, encryption_enabled=True, ) # If there are no previously-saved credentials, we'll use the password if not os.path.exists(CONFIG_FILE): print( "First time use. Did not find credential file. Asking for " "homeserver, user, and password to create credential file." ) homeserver = "https://matrix.example.org" homeserver = input(f"Enter your homeserver URL: [{homeserver}] ") if not (homeserver.startswith("https://") or homeserver.startswith("http://")): homeserver = "https://" + homeserver user_id = "@user:example.org" user_id = input(f"Enter your full user ID: [{user_id}] ") device_name = "matrix-nio" device_name = input(f"Choose a name for this device: [{device_name}] ") if not os.path.exists(STORE_PATH): os.makedirs(STORE_PATH) # Initialize the matrix client client = AsyncClient( homeserver, user_id, store_path=STORE_PATH, config=client_config, ) pw = getpass.getpass() resp = await client.login(password=pw, device_name=device_name) # check that we logged in successfully if isinstance(resp, LoginResponse): write_details_to_disk(resp, homeserver) else: print(f'homeserver = "{homeserver}"; user = "{user_id}"') print(f"Failed to log in: {resp}") sys.exit(1) print( "Logged in using a password. Credentials were stored. " "On next execution the stored login credentials will be used." ) # Otherwise the config file exists, so we'll use the stored credentials else: # open the file in read-only mode async with aiofiles.open(CONFIG_FILE, "r") as f: contents = await f.read() config = json.loads(contents) # Initialize the matrix client based on credentials from file client = AsyncClient( config["homeserver"], config["user_id"], device_id=config["device_id"], store_path=STORE_PATH, config=client_config, ) client.restore_login( user_id=config["user_id"], device_id=config["device_id"], access_token=config["access_token"], ) print("Logged in using stored credentials.") return client async def main() -> None: """Login and wait for and perform emoji verify.""" client = await login() # Set up event callbacks callbacks = Callbacks(client) client.add_to_device_callback(callbacks.to_device_callback, (KeyVerificationEvent,)) # Sync encryption keys with the server # Required for participating in encrypted rooms if client.should_upload_keys: await client.keys_upload() print( "This program is ready and waiting for the other party to initiate " 'an emoji verification with us by selecting "Verify by Emoji" ' "in their Matrix client." ) await client.sync_forever(timeout=30000, full_state=True) try: asyncio.run(main()) except Exception: print(traceback.format_exc()) sys.exit(1) except KeyboardInterrupt: print("Received keyboard interrupt.") sys.exit(0) matrix-nio-0.24.0/index.html000066400000000000000000000001131455215747700156750ustar00rootroot00000000000000 matrix-nio-0.24.0/nio/000077500000000000000000000000001455215747700144725ustar00rootroot00000000000000matrix-nio-0.24.0/nio/__init__.py000066400000000000000000000004501455215747700166020ustar00rootroot00000000000000from .api import ( Api, MessageDirection, PushRuleKind, ResizingMethod, RoomPreset, RoomVisibility, ) from .client import * from .event_builders import * from .events import * from .exceptions import * from .monitors import * from .responses import * from .rooms import * matrix-nio-0.24.0/nio/_compat.py000066400000000000000000000016571455215747700164770ustar00rootroot00000000000000# Copyright © 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from importlib import util def package_installed(package_name): spec = util.find_spec(package_name) if spec is None: return False return True matrix-nio-0.24.0/nio/api.py000066400000000000000000002147031455215747700156240ustar00rootroot00000000000000# Copyright © 2018 Damir Jelić # Copyright © 2020-2021 Famedly GmbH # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """nio api module. This module contains primitives to build Matrix API http requests. In general these functions are not directly called. One should use an existing client like AsyncClient or HttpClient. """ from __future__ import annotations import json import os from collections import defaultdict from collections.abc import Iterable from enum import Enum, unique from typing import ( TYPE_CHECKING, Any, DefaultDict, Dict, List, Optional, Sequence, Tuple, Union, ) from uuid import UUID if TYPE_CHECKING: from .events.account_data import PushAction, PushCondition try: from urllib.parse import quote, urlencode, urlparse except ImportError: from urllib import quote, urlencode # type: ignore from urlparse import urlparse # type: ignore MATRIX_API_PATH: str = "/_matrix/client/r0" MATRIX_MEDIA_API_PATH: str = "/_matrix/media/r0" _FilterT = Union[None, str, Dict[Any, Any]] @unique class MessageDirection(Enum): """Enum representing the direction messages should be fetched from.""" back = 0 front = 1 @unique class ResizingMethod(Enum): """Enum representing the desired resizing method for a thumbnail. "scale" maintains the original aspect ratio of the image, "crop" provides an image in the aspect ratio of the requested size. """ scale = "scale" crop = "crop" @unique class RoomVisibility(Enum): """Enum representing the desired visibility when creating a room. "public" means the room will be shown in the server's room directory. "private" will hide the room from the server's room directory. """ private = "private" public = "public" @unique class RoomPreset(Enum): """Enum representing the available rule presets when creating a room. "private_chat" makes the room invite-only and allows guests. "trusted_private_chat" is the same as above, but also gives all invitees the same power level as the room's creator. "public_chat" makes the room joinable by anyone without invitations, and forbid guests. """ private_chat = "private_chat" trusted_private_chat = "trusted_private_chat" public_chat = "public_chat" @unique class EventFormat(Enum): """Available formats in which a filter can make the server return events. "client" will return the events in a format suitable for clients. "federation" will return the raw event as received over federation. """ client = "client" federation = "federation" @unique class PushRuleKind(Enum): """Push rule kinds defined by the Matrix spec, ordered by priority.""" override = "override" content = "content" room = "room" sender = "sender" underride = "underride" class Api: """Matrix API class. Static methods reflecting the Matrix REST API. """ @staticmethod def to_json(content_dict: Dict[Any, Any]) -> str: """Turn a dictionary into a json string.""" return json.dumps(content_dict, separators=(",", ":")) @staticmethod def to_canonical_json(content_dict: Dict[Any, Any]) -> str: """Turn a dictionary into a canonical json string.""" return json.dumps( content_dict, ensure_ascii=False, separators=(",", ":"), sort_keys=True, ) @staticmethod def mimetype_to_msgtype(mimetype: str) -> str: """Turn a mimetype into a matrix message type.""" if mimetype.startswith("image"): return "m.image" elif mimetype.startswith("video"): return "m.video" elif mimetype.startswith("audio"): return "m.audio" return "m.file" @staticmethod def mxc_to_http(mxc: str, homeserver: Optional[str] = None) -> Optional[str]: """Convert a matrix content URI to a HTTP URI.""" url = urlparse(mxc) if url.scheme != "mxc": return None if not url.netloc or not url.path: return None parsed_homeserver = urlparse(homeserver) if homeserver else None http_url = ( "{homeserver}/_matrix/media/r0/download/" "{server_name}{mediaId}" ).format( homeserver=( parsed_homeserver.geturl() if parsed_homeserver else f"https://{url.netloc}" ), server_name=url.hostname, mediaId=url.path, ) return http_url @staticmethod def encrypted_mxc_to_plumb( mxc, key: str, hash: str, iv: str, homeserver: Optional[str] = None, mimetype: Optional[str] = None, ) -> Optional[str]: """Convert a matrix content URI to a encrypted mxc URI. The return value of this function will have a URI schema of emxc://. The path of the URI will be converted just like the mxc_to_http() function does, but it will also contain query parameters that are necessary to decrypt the payload the URI is pointing to. This function is useful to present a clickable URI that can be passed to a plumber program that will download and decrypt the content that the matrix content URI is pointing to. The returned URI should never be converted to http and opened directly, as that would expose the decryption parameters to any middleman or ISP. Args: mxc (str): The matrix content URI. key (str): The encryption key that can be used to decrypt the payload the URI is pointing to. hash (str): The hash of the payload. iv (str): The initial value needed to decrypt the payload. mimetype (str): The mimetype of the payload. """ url = urlparse(mxc) if url.scheme != "mxc": return None if not url.netloc or not url.path: return None parsed_homeserver = urlparse(homeserver) if homeserver else None host = ( parsed_homeserver._replace(scheme="emxc").geturl() if parsed_homeserver else None ) plumb_url = ( "{homeserver}/_matrix/media/r0/download/" "{server_name}{mediaId}" ).format( homeserver=host if host else f"emxc://{url.netloc}", server_name=url.hostname, mediaId=url.path, ) query_parameters = { "key": key, "hash": hash, "iv": iv, } if mimetype is not None: query_parameters["mimetype"] = mimetype plumb_url += f"?{urlencode(query_parameters)}" return plumb_url @staticmethod def _build_path( path: List[str], query_parameters: Optional[Dict] = None, base_path: str = MATRIX_API_PATH, ) -> str: """Builds a percent-encoded path from a list of strings. For example, turns ["hello", "wo/rld"] into "/hello/wo%2Frld". All special characters are percent encoded, including the forward slash (/). Args: path (List[str]): the list of path elements. query_parameters (Dict, optional): [description]. Defaults to None. base_path (str, optional): A base path to be prepended to path. Defaults to MATRIX_API_PATH. Returns: str: [description] """ quoted_path = "" if isinstance(path, str): quoted_path = quote(path, safe="") elif isinstance(path, List): quoted_path = "/".join([quote(str(part), safe="") for part in path]) else: raise AssertionError( f"'path' must be of type List[str] or str, got {type(path)}" ) built_path = f"{base_path}/{quoted_path}" built_path = built_path.rstrip("/") if query_parameters: built_path += f"?{urlencode(query_parameters)}" return built_path @staticmethod def discovery_info() -> Tuple[str, str]: """Get discovery information about a domain. Returns the HTTP method and HTTP path for the request. """ path = Api._build_path(path=[".well-known", "matrix", "client"], base_path="") return ("GET", path) @staticmethod def login_info() -> Tuple[str, str]: """Get the homeserver's supported login types Returns the HTTP method and HTTP path for the request. """ path = Api._build_path(path=["login"]) return "GET", path @staticmethod def register( user: str, password: Optional[str] = None, device_name: Optional[str] = "", device_id: Optional[str] = "", auth_dict: Optional[dict[str, Any]] = None, ): """Register a new user. Args: user (str): The fully qualified user ID or just local part of the user ID, to log in. password (str): The user's password. device_name (str): A display name to assign to a newly-created device. Ignored if device_id corresponds to a known device device_id (str): ID of the client device. If this does not correspond to a known client device, a new device will be created. auth_dict (Dict[str, Any, optional): The authentication dictionary containing the elements for a particular registration flow. If not provided, then m.login.dummy is used. See the example below and here https://spec.matrix.org/latest/client-server-api/#account-registration-and-management for detailed documentation Example: >>> auth_dict = { >>> "type": "m.login.registration_token", >>> "registration_token": "REGISTRATIONTOKEN", >>> "session": "session-id-from-homeserver" >>> } """ path = Api._build_path(["register"]) content_dict = { "username": user, "password": password, "auth": auth_dict or {"type": "m.login.dummy"}, } if device_id: content_dict["device_id"] = device_id if device_name: content_dict["initial_device_display_name"] = device_name return "POST", path, Api.to_json(content_dict) @staticmethod def login( user: str, password: Optional[str] = None, device_name: Optional[str] = "", device_id: Optional[str] = "", token: Optional[str] = None, ) -> Tuple[str, str, str]: """Authenticate the user. Returns the HTTP method, HTTP path and data for the request. Args: user (str): The fully qualified user ID or just local part of the user ID, to log in. If the user ID contains an '@', but no ':', the user ID will be considered to be an email address. password (str): The user's password. device_name (str): A display name to assign to a newly-created device. Ignored if device_id corresponds to a known device device_id (str): ID of the client device. If this does not correspond to a known client device, a new device will be created. token (str): Token for token-based login. """ path = Api._build_path(path=["login"]) if password is not None: identifier = {} if "@" in user and not user.startswith("@"): identifier = { "type": "m.id.thirdparty", "medium": "email", "address": user, } else: # As per spec, a user can login with either their localpart (that # cannot contain an @) or their full Matrix ID, starting with an @. identifier = { "type": "m.id.user", "user": user, } content_dict = { "type": "m.login.password", "identifier": identifier, "password": password, } elif token is not None: content_dict = { "type": "m.login.token", "token": token, } else: raise ValueError("Neither a password nor a token was provided") if device_id: content_dict["device_id"] = device_id if device_name: content_dict["initial_device_display_name"] = device_name return "POST", path, Api.to_json(content_dict) @staticmethod def login_raw( auth_dict: Dict[str, Any], ) -> Tuple[str, str, str]: """Login to the homeserver using a raw dictionary. Returns the HTTP method, HTTP path and data for the request. Args: auth_dict (Dict[str, Any): The authentication dictionary containing the elements for the logon. See the example below and here https://matrix.org/docs/spec/client_server/r0.6.0#authentication-types for detailed documentation Example: >>> auth_dict = { >>> "type": "m.login.password", >>> "identifier": { >>> "type": "m.id.thirdparty", >>> "medium": "email", >>> "address": "testemail@mail.org" >>> }, >>> "password": "PASSWORDABCD", >>> "initial_device_display_name": "Test user" >>> } """ if auth_dict is None or auth_dict == {}: raise ValueError("Auth dictionary shall not be empty") path = Api._build_path(path=["login"]) return "POST", path, Api.to_json(auth_dict) @staticmethod def logout( access_token: str, all_devices: bool = False, ): """Logout the session. Returns nothing. Args: access_token (str): the access token to be used with the request. all_devices (bool): Logout all sessions from all devices if set to True. """ query_parameters = {"access_token": access_token} if all_devices: api_path = ["logout", "all"] else: api_path = ["logout"] content_dict: Dict = {} return ( "POST", Api._build_path(api_path, query_parameters), Api.to_json(content_dict), ) @staticmethod def sync( access_token: str, since: Optional[str] = None, timeout: Optional[int] = None, filter: Optional[_FilterT] = None, full_state: Optional[bool] = None, set_presence: Optional[str] = None, ) -> Tuple[str, str]: """Synchronise the client's state with the latest state on the server. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. since (str): The room id of the room where the event will be sent to. timeout (int): The maximum time to wait, in milliseconds, before returning this request. filter (Union[None, str, Dict[Any, Any]): A filter ID or dict that should be used for this sync request. full_state (bool, optional): Controls whether to include the full state for all rooms the user is a member of. If this is set to true, then all state events will be returned, even if since is non-empty. The timeline will still be limited by the since parameter. set_presence (str, optional): Controls whether the client is automatically marked as online by polling this API. If this parameter is omitted then the client is automatically marked as online when it uses this API. Otherwise if the parameter is set to "offline" then the client is not marked as being online when it uses this API. When set to "unavailable", the client is marked as being idle. One of: ["offline", "online", "unavailable"] """ query_parameters = {"access_token": access_token} if since: query_parameters["since"] = since if full_state is not None: query_parameters["full_state"] = str(full_state).lower() if timeout is not None: query_parameters["timeout"] = str(timeout) if set_presence: query_parameters["set_presence"] = set_presence if isinstance(filter, dict): filter_json = json.dumps(filter, separators=(",", ":")) query_parameters["filter"] = filter_json elif isinstance(filter, str): query_parameters["filter"] = filter return "GET", Api._build_path(["sync"], query_parameters) @staticmethod def room_send( access_token: str, room_id: str, event_type: str, body: Dict[Any, Any], tx_id: Union[str, UUID], ) -> Tuple[str, str, str]: """Send a message event to a room. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room where the event will be sent to. event_type (str): The type of the message that will be sent. body(Dict): The body of the event. The fields in this object will vary depending on the type of event. tx_id (str): The transaction ID for this event. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "send", event_type, str(tx_id)] return ("PUT", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def space_get_hierarchy( access_token: str, space_id: str, from_page: Optional[str] = None, limit: Optional[int] = None, max_depth: Optional[int] = None, suggested_only: bool = False, ) -> Tuple[str, str]: """Get rooms/spaces that are a part of the provided space. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. space_id (str): The ID of the space to get the hierarchy for. from_page (str, optional): Pagination token from a previous request to this endpoint. limit (int, optional): The maximum number of rooms to return. max_depth (int, optional): The maximum depth of the returned tree. suggested_only (bool, optional): Whether to only return rooms that are considered suggested. Defaults to False. """ query_parameters = {"access_token": access_token} if from_page: query_parameters["from"] = from_page if limit: query_parameters["limit"] = limit if max_depth: query_parameters["max_depth"] = max_depth if suggested_only: query_parameters["suggested_only"] = suggested_only path = ["rooms", space_id, "hierarchy"] return ("GET", Api._build_path(path, query_parameters, "/_matrix/client/v1")) @staticmethod def direct_room_list(access_token: str, user_id: str) -> Tuple[str, str]: """Lists all rooms flagged as direct the client is participating in. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used within the request user_id (str): The user id of the user to get the direct rooms for """ query_parameters = {"access_token": access_token} path = ["user", user_id, "account_data", "m.direct"] return ("GET", Api._build_path(path, query_parameters)) @staticmethod def room_get_event( access_token: str, room_id: str, event_id: str ) -> Tuple[str, str]: """Get a single event based on roomId/eventId. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room where the event is in. event_id (str): The event id to get. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "event", event_id] return ("GET", Api._build_path(path, query_parameters)) @staticmethod def room_put_state( access_token: str, room_id: str, event_type: str, body: Dict[Any, Any], state_key: str = "", ) -> Tuple[str, str, str]: """Send a state event. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room where the event will be sent to. event_type (str): The type of the event that will be sent. body(Dict): The body of the event. The fields in this object will vary depending on the type of event. state_key: The key of the state to look up. Defaults to an empty string. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "state", event_type, state_key] return ("PUT", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def room_get_state_event( access_token, room_id: str, event_type: str, state_key: str = "" ) -> Tuple[str, str]: """Fetch a state event. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room where the state is fetched from. event_type (str): The type of the event that will be fetched. state_key: The key of the state to look up. Defaults to an empty string. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "state", event_type, state_key] return ("GET", Api._build_path(path, query_parameters)) @staticmethod def room_get_state(access_token: str, room_id: str) -> Tuple[str, str]: """Fetch the current state for a room. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room where the state is fetched from. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "state"] return ("GET", Api._build_path(path, query_parameters)) @staticmethod def room_redact( access_token: str, room_id: str, event_id: str, tx_id: Union[str, UUID], reason: Optional[str] = None, ) -> Tuple[str, str, str]: """Strip information out of an event. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room that contains the event that will be redacted. event_id (str): The ID of the event that will be redacted. tx_id (str/UUID, optional): A transaction ID for this event. reason(str, optional): A description explaining why the event was redacted. """ query_parameters = {"access_token": access_token} body = {} if reason: body["reason"] = reason path = ["rooms", room_id, "redact", event_id, str(tx_id)] return ("PUT", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def room_kick( access_token: str, room_id: str, user_id: str, reason: Optional[str] = None ) -> Tuple[str, str, str]: """Kick a user from a room, or withdraw their invitation. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room that the user will be kicked from. user_id (str): The user_id of the user that should be kicked. reason (str, optional): A reason for which the user is kicked. """ query_parameters = {"access_token": access_token} body = {"user_id": user_id} if reason: body["reason"] = reason path = ["rooms", room_id, "kick"] return ("POST", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def room_ban( access_token: str, room_id: str, user_id: str, reason: Optional[str] = None, ) -> Tuple[str, str, str]: """Ban a user from a room. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room that the user will be banned from. user_id (str): The user_id of the user that should be banned. reason (str, optional): A reason for which the user is banned. """ path = ["rooms", room_id, "ban"] query_parameters = {"access_token": access_token} body = {"user_id": user_id} if reason: body["reason"] = reason return ( "POST", Api._build_path(path, query_parameters), Api.to_json(body), ) @staticmethod def room_unban( access_token: str, room_id: str, user_id: str, ) -> Tuple[str, str, str]: """Unban a user from a room. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room that the user will be unbanned from. user_id (str): The user_id of the user that should be unbanned. """ path = ["rooms", room_id, "unban"] query_parameters = {"access_token": access_token} body = {"user_id": user_id} return ( "POST", Api._build_path(path, query_parameters), Api.to_json(body), ) @staticmethod def room_knock( access_token: str, room_id: str, reason: Optional[str] = None, ) -> Tuple[str, str, str]: """Knocks on a room for the user. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room that the user will be knocking on. reason (str, optional): The reason the user is knocking. """ path = ["knock", room_id] query_parameters = {"access_token": access_token} body = {} if reason: body["reason"] = reason return ( "POST", Api._build_path(path, query_parameters), Api.to_json(body), ) @staticmethod def room_invite( access_token: str, room_id: str, user_id: str ) -> Tuple[str, str, str]: """Invite a user to a room. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room that the user will be invited to. user_id (str): The user id of the user that should be invited. """ path = ["rooms", room_id, "invite"] query_parameters = {"access_token": access_token} body = {"user_id": user_id} return ("POST", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def room_create( access_token: str, visibility: RoomVisibility = RoomVisibility.private, alias: Optional[str] = None, name: Optional[str] = None, topic: Optional[str] = None, room_version: Optional[str] = None, room_type: Optional[str] = None, federate: bool = True, is_direct: bool = False, preset: Optional[RoomPreset] = None, invite: Sequence[str] = (), initial_state: Sequence[Dict[str, Any]] = (), power_level_override: Optional[Dict[str, Any]] = None, predecessor: Optional[Dict[str, Any]] = None, space: bool = False, ) -> Tuple[str, str, str]: """Create a new room. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. visibility (RoomVisibility): whether to have the room published in the server's room directory or not. Defaults to ``RoomVisibility.private``. alias (str, optional): The desired canonical alias local part. For example, if set to "foo" and the room is created on the "example.com" server, the room alias will be "#foo:example.com". name (str, optional): A name to set for the room. topic (str, optional): A topic to set for the room. room_version (str, optional): The room version to set. If not specified, the homeserver will use its default setting. If a version not supported by the homeserver is specified, a 400 ``M_UNSUPPORTED_ROOM_VERSION`` error will be returned. room_type (str, optional): The room type to set. If not specified, the homeserver will use its default setting. In spec v1.2 the following room types are specified: - ``m.space`` Unspecified room types are permitted through the use of Namespaced Identifiers. federate (bool): Whether to allow users from other homeservers from joining the room. Defaults to ``True``. Cannot be changed later. is_direct (bool): If this should be considered a direct messaging room. If ``True``, the server will set the ``is_direct`` flag on ``m.room.member events`` sent to the users in ``invite``. Defaults to ``False``. preset (RoomPreset, optional): The selected preset will set various rules for the room. If unspecified, the server will choose a preset from the ``visibility``: ``RoomVisibility.public`` equates to ``RoomPreset.public_chat``, and ``RoomVisibility.private`` equates to a ``RoomPreset.private_chat``. invite (list): A list of user id to invite to the room. initial_state (list): A list of state event dicts to send when the room is created. For example, a room could be made encrypted immediately by having a ``m.room.encryption`` event dict. power_level_override (dict): A ``m.room.power_levels content`` dict to override the default. The dict will be applied on top of the generated ``m.room.power_levels`` event before it is sent to the room. space (bool): Create as a Space (defaults to False). """ path = ["createRoom"] query_parameters = {"access_token": access_token} body = { "visibility": visibility.value, "creation_content": {"m.federate": federate}, "is_direct": is_direct, } if alias: body["room_alias_name"] = alias if name: body["name"] = name if topic: body["topic"] = topic if room_version: body["room_version"] = room_version if room_type: body["creation_content"]["type"] = room_type if preset: body["preset"] = preset.value if invite: body["invite"] = list(invite) if initial_state: body["initial_state"] = list(initial_state) if power_level_override: body["power_level_content_override"] = power_level_override if predecessor: body["creation_content"]["predecessor"] = predecessor if space: body["creation_content"]["type"] = "m.space" return ("POST", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def join(access_token: str, room_id: str) -> Tuple[str, str, str]: """Join a room. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room identifier or alias to join. """ path = ["join", room_id] query_parameters = {"access_token": access_token} body = {} return ("POST", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def room_leave(access_token: str, room_id: str) -> Tuple[str, str, str]: """Leave a room. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room that will be left. """ path = ["rooms", room_id, "leave"] query_parameters = {"access_token": access_token} body = {} return ("POST", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def room_forget(access_token: str, room_id: str) -> Tuple[str, str, str]: """Forget a room. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room id of the room that will be forgotten. """ path = ["rooms", room_id, "forget"] query_parameters = {"access_token": access_token} body = {} return ("POST", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def room_messages( access_token: str, room_id: str, start: str, end: Optional[str] = None, direction: MessageDirection = MessageDirection.back, limit: int = 10, message_filter: Optional[Dict[Any, Any]] = None, ) -> Tuple[str, str]: """Get room messages. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. room_id (str): room id of the room for which to download the messages start (str): The token to start returning events from. end (str): The token to stop returning events at. direction (MessageDirection): The direction to return events from. limit (int): The maximum number of events to return. message_filter (Optional[Dict[Any, Any]]): A filter dict that should be used for this room messages request. """ query_parameters = { "access_token": access_token, "from": start, "limit": limit, } if end: query_parameters["to"] = end if isinstance(direction, str): if direction in ("b", "back"): direction = MessageDirection.back elif direction in ("f", "front"): direction = MessageDirection.front else: raise ValueError("Invalid direction") if direction is MessageDirection.front: query_parameters["dir"] = "f" else: query_parameters["dir"] = "b" if isinstance(message_filter, dict): filter_json = json.dumps(message_filter, separators=(",", ":")) query_parameters["filter"] = filter_json path = ["rooms", room_id, "messages"] return "GET", Api._build_path(path, query_parameters) @staticmethod def keys_upload( access_token: str, key_dict: Dict[str, Any] ) -> Tuple[str, str, str]: """Publish end-to-end encryption keys. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. key_dict (Dict): The dictionary containing device and one-time keys that will be published to the server. """ query_parameters = {"access_token": access_token} body = key_dict path = ["keys", "upload"] return ("POST", Api._build_path(path, query_parameters), Api.to_json(body)) @staticmethod def keys_query( access_token: str, user_set: Iterable[str], token: Optional[str] = None ) -> Tuple[str, str, str]: """Query the current devices and identity keys for the given users. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. user_set (Set[str]): The users for which the keys should be downloaded. token (Optional[str]): If the client is fetching keys as a result of a device update received in a sync request, this should be the 'since' token of that sync request, or any later sync token. """ query_parameters = {"access_token": access_token} path = ["keys", "query"] content: Dict[str, Dict[str, List]] = { "device_keys": {user: [] for user in user_set} } if token: content["token"] = token # type: ignore return ("POST", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def keys_claim( access_token: str, user_set: Dict[str, Iterable[str]] ) -> Tuple[str, str, str]: """Claim one-time keys for use in Olm pre-key messages. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. user_set (Dict[str, List[str]]): The users and devices for which to claim one-time keys to be claimed. A map from user ID, to a list of device IDs. """ query_parameters = {"access_token": access_token} path = ["keys", "claim"] payload: DefaultDict[str, Dict[str, str]] = defaultdict(dict) for user_id, device_list in user_set.items(): for device_id in device_list: payload[user_id][device_id] = "signed_curve25519" content = {"one_time_keys": payload} return ("POST", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def to_device( access_token: str, event_type: str, content: Dict[Any, Any], tx_id: Union[str, UUID], ) -> Tuple[str, str, str]: r"""Send to-device events to a set of client devices. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. event_type (str): The type of the event which will be sent. content (Dict): The messages to send. A map from user ID, to a map from device ID to message body. The device ID may also be \*, meaning all known devices for the user. tx_id (str): The transaction ID for this event. """ query_parameters = {"access_token": access_token} path = ["sendToDevice", event_type, str(tx_id)] return ("PUT", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def devices(access_token: str) -> Tuple[str, str]: """Get the list of devices for the current user. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. """ query_parameters = {"access_token": access_token} path = ["devices"] return "GET", Api._build_path(path, query_parameters) @staticmethod def update_device( access_token: str, device_id: str, content: Dict[str, str] ) -> Tuple[str, str, str]: """Update the metadata of the given device. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. device_id (str): The device for which the metadata will be updated. content (Dict): A dictionary of metadata values that will be updated for the device. """ query_parameters = {"access_token": access_token} path = ["devices", device_id] return ("PUT", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def delete_devices( access_token: str, devices: List[str], auth_dict: Optional[Dict[str, str]] = None, ) -> Tuple[str, str, str]: """Delete a device. This API endpoint uses the User-Interactive Authentication API. This tells the server to delete the given devices and invalidate their associated access tokens. Should first be called with no additional authentication information. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. devices (List[str]): A list of devices which will be deleted. auth_dict (Dict): Additional authentication information for the user-interactive authentication API. """ query_parameters = {"access_token": access_token} path = ["delete_devices"] content: Dict[str, Any] = {"devices": devices} if auth_dict: content["auth"] = auth_dict return ("POST", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def joined_members(access_token: str, room_id: str) -> Tuple[str, str]: """Get the list of joined members for a room. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. room_id (str): Room id of the room where the user is typing. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "joined_members"] return "GET", Api._build_path(path, query_parameters) @staticmethod def joined_rooms(access_token: str) -> Tuple[str, str]: """Get the list of joined rooms for the logged in account. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. """ query_parameters = {"access_token": access_token} path = ["joined_rooms"] return "GET", Api._build_path(path, query_parameters) @staticmethod def room_resolve_alias(room_alias: str) -> Tuple[str, str]: """Resolve a room alias to a room ID. Returns the HTTP method and HTTP path for the request. Args: room_alias (str): The alias to resolve """ path = ["directory", "room", room_alias] return "GET", Api._build_path(path) @staticmethod def room_delete_alias(access_token: str, room_alias: str) -> Tuple[str, str]: """Delete a room alias. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. room_alias (str): The alias to delete """ query_parameters = {"access_token": access_token} path = ["directory", "room", room_alias] return "DELETE", Api._build_path(path, query_parameters) @staticmethod def room_put_alias( access_token: str, room_alias: str, room_id: str ) -> Tuple[str, str, str]: """Add a room alias. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. room_alias (str): The alias to add room_id (str): The room ID to map to """ query_parameters = {"access_token": access_token} path = ["directory", "room", room_alias] body = { "room_id": room_id, } return "PUT", Api._build_path(path, query_parameters), Api.to_json(body) @staticmethod def room_get_visibility(room_id: str) -> Tuple[str, str]: """Get visibility of a room in the directory. Returns the HTTP method and HTTP path for the request. Args: room_id (str): The room ID to query. """ path = ["directory", "list", "room", room_id] return "GET", Api._build_path(path) @staticmethod def room_typing( access_token: str, room_id: str, user_id: str, typing_state: bool = True, timeout: int = 30000, ) -> Tuple[str, str, str]: """Send a typing notice to the server. This tells the server that the user is typing for the next N milliseconds or that the user has stopped typing. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): Room id of the room where the user is typing. user_id (str): The user who has started to type. typing_state (bool): A flag representing whether the user started or stopped typing timeout (int): For how long should the new typing notice be valid for in milliseconds. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "typing", user_id] content = {"typing": typing_state} if typing_state: content["timeout"] = timeout # type: ignore return ("PUT", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def update_receipt_marker( access_token: str, room_id: str, event_id: str, receipt_type: str = "m.read", ) -> Tuple[str, str]: """Update the marker of given `receipt_type` to specified `event_id`. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. room_id (str): Room id of the room where the marker should be updated event_id (str): The event ID the read marker should be located at receipt_type (str): The type of receipt to send. Currently, only `m.read` is supported by the Matrix specification. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "receipt", receipt_type, event_id] return ("POST", Api._build_path(path, query_parameters)) @staticmethod def room_read_markers( access_token: str, room_id: str, fully_read_event: str, read_event: Optional[str] = None, ) -> Tuple[str, str, str]: """Update fully read marker and optionally read marker for a room. This sets the position of the read marker for a given room, and optionally the read receipt's location. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): Room id of the room where the read markers should be updated fully_read_event (str): The event ID the read marker should be located at. read_event (Optional[str]): The event ID to set the read receipt location at. """ query_parameters = {"access_token": access_token} path = ["rooms", room_id, "read_markers"] content = {"m.fully_read": fully_read_event} if read_event: content["m.read"] = read_event return ("POST", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def content_repository_config(access_token: str) -> Tuple[str, str]: """Get the content repository configuration, such as upload limits. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. """ query_parameters = {"access_token": access_token} path = ["config"] return ( "GET", Api._build_path(path, query_parameters, MATRIX_MEDIA_API_PATH), ) @staticmethod def upload( access_token: str, filename: Optional[str] = None, ) -> Tuple[str, str, str]: """Upload a file's content to the content repository. Returns the HTTP method, HTTP path and empty data for the request. The real data should be read from the file that should be uploaded. Note: This requests also requires the Content-Type http header to be set. Args: access_token (str): The access token to be used with the request. filename (str): The name of the file being uploaded """ query_parameters = {"access_token": access_token} path = ["upload"] if filename: query_parameters["filename"] = filename return ( "POST", Api._build_path(path, query_parameters, MATRIX_MEDIA_API_PATH), "", ) @staticmethod def download( server_name: str, media_id: str, filename: Optional[str] = None, allow_remote: bool = True, file: Optional[os.PathLike] = None, ) -> Tuple[str, str]: """Get the content of a file from the content repository. Returns the HTTP method and HTTP path for the request. Args: server_name (str): The server name from the mxc:// URI. media_id (str): The media ID from the mxc:// URI. filename (str, optional): A filename to be returned in the response by the server. If None (default), the original name of the file will be returned instead, if there is one. allow_remote (bool): Indicates to the server that it should not attempt to fetch the media if it is deemed remote. This is to prevent routing loops where the server contacts itself. file (os.PathLike): The file to stream the downloaded content to. """ query_parameters = { "allow_remote": "true" if allow_remote else "false", } end = "" if filename: end = filename path = ["download", server_name, media_id, end] return ("GET", Api._build_path(path, query_parameters, MATRIX_MEDIA_API_PATH)) @staticmethod def thumbnail( server_name: str, media_id: str, width: int, height: int, method=ResizingMethod.scale, # ŧype: ResizingMethod allow_remote: bool = True, ) -> Tuple[str, str]: """Get the thumbnail of a file from the content repository. Returns the HTTP method and HTTP path for the request. Note: The actual thumbnail may be larger than the size specified. Args: server_name (str): The server name from the mxc:// URI. media_id (str): The media ID from the mxc:// URI. width (int): The desired width of the thumbnail. height (int): The desired height of the thumbnail. method (ResizingMethod): The desired resizing method. allow_remote (bool): Indicates to the server that it should not attempt to fetch the media if it is deemed remote. This is to prevent routing loops where the server contacts itself. """ query_parameters = { "width": width, "height": height, "method": method.value, "allow_remote": "true" if allow_remote else "false", } path = ["thumbnail", server_name, media_id] return ("GET", Api._build_path(path, query_parameters, MATRIX_MEDIA_API_PATH)) @staticmethod def profile_get( user_id: str, access_token: Optional[str] = None ) -> Tuple[str, str]: """Get the combined profile information for a user. Returns the HTTP method and HTTP path for the request. Args: user_id (str): User id to get the profile for. access_token (str): The access token to be used with the request. If omitted, an unauthenticated request is performed. """ assert user_id query_parameters = {} if access_token is not None: query_parameters["access_token"] = access_token path = ["profile", user_id] return "GET", Api._build_path(path, query_parameters) @staticmethod def profile_get_displayname( user_id: str, access_token: Optional[str] = None ) -> Tuple[str, str]: """Get display name. Returns the HTTP method and HTTP path for the request. Args: user_id (str): User id to get display name for. access_token (str): The access token to be used with the request. If omitted, an unauthenticated request is performed. """ query_parameters = {} if access_token is not None: query_parameters["access_token"] = access_token path = ["profile", user_id, "displayname"] return "GET", Api._build_path(path, query_parameters) @staticmethod def profile_set_displayname( access_token: str, user_id: str, display_name: str ) -> Tuple[str, str, str]: """Set display name. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. user_id (str): User id to set display name for. display_name (str): Display name for user to set. """ query_parameters = {"access_token": access_token} content = {"displayname": display_name} path = ["profile", user_id, "displayname"] return ("PUT", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def profile_get_avatar( user_id: str, access_token: Optional[str] = None ) -> Tuple[str, str]: """Get avatar URL. Returns the HTTP method and HTTP path for the request. Args: user_id (str): User id to get avatar for. access_token (str): The access token to be used with the request. If omitted, an unauthenticated request is performed. """ query_parameters = {} if access_token is not None: query_parameters["access_token"] = access_token path = ["profile", user_id, "avatar_url"] return "GET", Api._build_path(path, query_parameters) @staticmethod def profile_set_avatar( access_token: str, user_id: str, avatar_url: str ) -> Tuple[str, str, str]: """Set avatar url. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. user_id (str): User id to set display name for. avatar_url (str): matrix content URI of the avatar to set. """ query_parameters = {"access_token": access_token} content = {"avatar_url": avatar_url} path = ["profile", user_id, "avatar_url"] return ("PUT", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def get_presence(access_token: str, user_id: str) -> Tuple[str, str]: """Get the given user's presence state. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. user_id (str): User id whose presence state to get. """ query_parameters = {"access_token": access_token} path = ["presence", user_id, "status"] return ( "GET", Api._build_path(path, query_parameters), ) @staticmethod def set_presence( access_token: str, user_id: str, presence: str, status_msg: Optional[str] = None ): """This API sets the given user's presence state. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. user_id (str): User id whose presence state to get. presence (str): The new presence state. status_msg (str, optional): The status message to attach to this state. """ query_parameters = {"access_token": access_token} content = {"presence": presence} if status_msg: content["status_msg"] = status_msg path = ["presence", user_id, "status"] return ("PUT", Api._build_path(path, query_parameters), Api.to_json(content)) @staticmethod def whoami(access_token: str) -> Tuple[str, str]: """Get information about the owner of a given access token. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. """ query_parameters = {"access_token": access_token} path = ["account", "whoami"] return "GET", Api._build_path(path, query_parameters) @staticmethod def room_context( access_token: str, room_id: str, event_id: str, limit: Optional[int] = None ) -> Tuple[str, str]: """Fetch a number of events that happened before and after an event. This allows clients to get the context surrounding an event. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. room_id (str): The room_id of the room that contains the event and its context. event_id (str): The event_id of the event that we wish to get the context for. limit(int, optional): The maximum number of events to request. """ query_parameters = {"access_token": access_token} if limit: query_parameters["limit"] = limit path = ["rooms", room_id, "context", event_id] return "GET", Api._build_path(path, query_parameters) @staticmethod def upload_filter( access_token: str, user_id: str, event_fields: Optional[List[str]] = None, event_format: EventFormat = EventFormat.client, presence: Optional[Dict[str, Any]] = None, account_data: Optional[Dict[str, Any]] = None, room: Optional[Dict[str, Any]] = None, ) -> Tuple[str, str, str]: """Upload a new filter definition to the homeserver. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. user_id (str): ID of the user uploading the filter. event_fields (Optional[List[str]]): List of event fields to include. If this list is absent then all fields are included. The entries may include '.' characters to indicate sub-fields. A literal '.' character in a field name may be escaped using a '\'. event_format (EventFormat): The format to use for events. presence (Dict[str, Any]): The presence updates to include. The dict corresponds to the `EventFilter` type described in https://matrix.org/docs/spec/client_server/latest#id240 account_data (Dict[str, Any]): The user account data that isn't associated with rooms to include. The dict corresponds to the `EventFilter` type described in https://matrix.org/docs/spec/client_server/latest#id240 room (Dict[str, Any]): Filters to be applied to room data. The dict corresponds to the `RoomFilter` type described in https://matrix.org/docs/spec/client_server/latest#id240 """ path = ["user", user_id, "filter"] query_parameters = {"access_token": access_token} content = { "event_fields": event_fields, "event_format": event_format.value, "presence": presence, "account_data": account_data, "room": room, } content = {k: v for k, v in content.items() if v is not None} return ( "POST", Api._build_path(path, query_parameters), Api.to_json(content), ) @staticmethod def get_openid_token(access_token: str, user_id: str): """Gets an OpenID token object that the requester may supply to another service to verify their identity in matrix. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. user_id (str): The user who requested the OpenID token """ path = ["user", user_id, "openid", "request_token"] query_parameters = {"access_token": access_token} body = {} return ( "POST", Api._build_path(path, query_parameters), Api.to_json(body), ) @staticmethod def set_pushrule( access_token: str, scope: str, kind: PushRuleKind, rule_id: str, before: Optional[str] = None, after: Optional[str] = None, actions: Sequence[PushAction] = (), conditions: Optional[Sequence[PushCondition]] = None, pattern: Optional[str] = None, ) -> Tuple[str, str, str]: """Create or modify an existing user-created push rule. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. scope (str): The scope of this rule, e.g. ``"global"``. Homeservers currently only process ``global`` rules for event matching, while ``device`` rules are a planned feature. It is up to clients to interpret any other scope name. kind (PushRuleKind): The kind of rule. rule_id (str): The identifier of the rule. Must be unique within its scope and kind. For rules of ``room`` kind, this is the room ID to match for. For rules of ``sender`` kind, this is the user ID to match. before (Optional[str]): Position this rule before the one matching the given rule ID. The rule ID cannot belong to a predefined server rule. ``before`` and ``after`` cannot be both specified. after (Optional[str]): Position this rule after the one matching the given rule ID. The rule ID cannot belong to a predefined server rule. ``before`` and ``after`` cannot be both specified. actions (Sequence[PushAction]): Actions to perform when the conditions for this rule are met. The given actions replace the existing ones. conditions (Sequence[PushCondition]): Event conditions that must hold true for the rule to apply to that event. A rule with no conditions always hold true. Only applicable to ``underride`` and ``override`` rules. pattern (Optional[str]): Glob-style pattern to match against for the event's content. Only applicable to ``content`` rules. """ path = ["pushrules", scope, kind.value, rule_id] query_parameters = {"access_token": access_token} content: Dict[str, Any] = {"actions": [a.as_value for a in actions]} if before is not None and after is not None: raise TypeError("before and after cannot be both specified") elif before is not None: query_parameters["before"] = before elif after is not None: query_parameters["after"] = after if pattern is not None: if kind != PushRuleKind.content: raise TypeError("pattern can only be set for content rules") content["pattern"] = pattern if conditions is not None: if kind not in (PushRuleKind.override, PushRuleKind.underride): raise TypeError( "conditions can only be set for override/underride rules", ) content["conditions"] = [c.as_value for c in conditions] return ( "PUT", Api._build_path(path, query_parameters), Api.to_json(content), ) @staticmethod def delete_pushrule( access_token: str, scope: str, kind: PushRuleKind, rule_id: str, ) -> Tuple[str, str]: """Delete an existing user-created push rule. Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. scope (str): The scope of this rule, e.g. ``"global"``. kind (PushRuleKind): The kind of rule. rule_id (str): The identifier of the rule. Must be unique within its scope and kind. """ path = ["pushrules", scope, kind.value, rule_id] query_parameters = {"access_token": access_token} return ("DELETE", Api._build_path(path, query_parameters)) @staticmethod def enable_pushrule( access_token: str, scope: str, kind: PushRuleKind, rule_id: str, enable: bool, ) -> Tuple[str, str, str]: """Enable or disable an existing built-in or user-created push rule. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. scope (str): The scope of this rule, e.g. ``"global"``. kind (PushRuleKind): The kind of rule. rule_id (str): The identifier of the rule. Must be unique within its scope and kind. enable (bool): Whether to enable or disable the rule. """ path = ["pushrules", scope, kind.value, rule_id, "enabled"] query_parameters = {"access_token": access_token} content = {"enabled": enable} return ( "PUT", Api._build_path(path, query_parameters), Api.to_json(content), ) @staticmethod def set_pushrule_actions( access_token: str, scope: str, kind: PushRuleKind, rule_id: str, actions: Sequence[PushAction], ) -> Tuple[str, str, str]: """Set the actions for an existing built-in or user-created push rule. Unlike ``set_pushrule``, this method can edit built-in server rules. Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. scope (str): The scope of this rule, e.g. ``"global"``. kind (PushRuleKind): The kind of rule. rule_id (str): The identifier of the rule. Must be unique within its scope and kind. actions (Sequence[PushAction]): Actions to perform when the conditions for this rule are met. The given actions replace the existing ones. """ path = ["pushrules", scope, kind.value, rule_id, "actions"] query_parameters = {"access_token": access_token} content = {"actions": [a.as_value for a in actions]} return ( "PUT", Api._build_path(path, query_parameters), Api.to_json(content), ) @staticmethod def delete_room_alias(access_token: str, alias: str) -> Tuple[str, str]: """Delete an room alias Returns the HTTP method and HTTP path for the request. Args: access_token (str): The access token to be used with the request. alias (str): The room alias """ query_parameters = {"access_token": access_token} path = ["directory", "room", alias] return ("DELETE", Api._build_path(path, query_parameters)) @staticmethod def put_room_alias( access_token: str, alias: str, room_id: str ) -> Tuple[str, str, str]: """Add an room alias Returns the HTTP method, HTTP path and data for the request. Args: access_token (str): The access token to be used with the request. alias (str): The room alias room_id (str): The room to point to """ query_parameters = {"access_token": access_token} path = ["directory", "room", alias] content = {} content["room_id"] = room_id return ( "PUT", Api._build_path(path, query_parameters), Api.to_json(content), ) matrix-nio-0.24.0/nio/client/000077500000000000000000000000001455215747700157505ustar00rootroot00000000000000matrix-nio-0.24.0/nio/client/__init__.py000066400000000000000000000003021455215747700200540ustar00rootroot00000000000000import sys from .base_client import Client, ClientConfig from .http_client import HttpClient, RequestInfo, TransportType from .async_client import AsyncClient, AsyncClientConfig, DataProvider matrix-nio-0.24.0/nio/client/async_client.py000066400000000000000000004263061455215747700210100ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # Copyright © 2020-2021 Famedly GmbH # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import asyncio import io import json import logging import os import warnings from asyncio import Event as AsyncioEvent from dataclasses import dataclass from functools import partial, wraps from json.decoder import JSONDecodeError from pathlib import Path from typing import ( Any, Callable, Coroutine, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Type, Union, ) from urllib.parse import urlparse from uuid import UUID, uuid4 import aiofiles from aiofiles.threadpool.binary import AsyncBufferedReader from aiofiles.threadpool.text import AsyncTextIOWrapper from aiohttp import ( ClientResponse, ClientSession, ClientTimeout, ContentTypeError, TraceConfig, ) from aiohttp.client_exceptions import ClientConnectionError from aiohttp.connector import Connection from aiohttp_socks import ProxyConnector from ..api import ( Api, EventFormat, MessageDirection, PushRuleKind, ResizingMethod, RoomPreset, RoomVisibility, _FilterT, ) from ..crypto import ( AsyncDataT, OlmDevice, async_encrypt_attachment, async_generator_from_data, ) from ..event_builders import ToDeviceMessage from ..events import ( BadEventType, Event, MegolmEvent, PushAction, PushCondition, RoomKeyRequest, RoomKeyRequestCancellation, ToDeviceEvent, ) from ..exceptions import ( LocalProtocolError, TransferCancelledError, ) from ..monitors import TransferMonitor from ..responses import ( ContentRepositoryConfigError, ContentRepositoryConfigResponse, DeleteDevicesAuthResponse, DeleteDevicesError, DeleteDevicesResponse, DeletePushRuleError, DeletePushRuleResponse, DevicesError, DevicesResponse, DirectRoomsErrorResponse, DirectRoomsResponse, DiscoveryInfoError, DiscoveryInfoResponse, DiskDownloadResponse, DownloadError, EnablePushRuleError, EnablePushRuleResponse, ErrorResponse, FileResponse, GetOpenIDTokenError, GetOpenIDTokenResponse, JoinedMembersError, JoinedMembersResponse, JoinedRoomsError, JoinedRoomsResponse, JoinError, JoinResponse, KeysClaimError, KeysClaimResponse, KeysQueryError, KeysQueryResponse, KeysUploadError, KeysUploadResponse, LoginError, LoginInfoError, LoginInfoResponse, LoginResponse, LogoutError, LogoutResponse, MemoryDownloadResponse, PresenceGetError, PresenceGetResponse, PresenceSetError, PresenceSetResponse, ProfileGetAvatarError, ProfileGetAvatarResponse, ProfileGetDisplayNameError, ProfileGetDisplayNameResponse, ProfileGetError, ProfileGetResponse, ProfileSetAvatarError, ProfileSetAvatarResponse, ProfileSetDisplayNameError, ProfileSetDisplayNameResponse, RegisterErrorResponse, RegisterInteractiveError, RegisterInteractiveResponse, RegisterResponse, Response, RoomBanError, RoomBanResponse, RoomContextError, RoomContextResponse, RoomCreateError, RoomCreateResponse, RoomDeleteAliasError, RoomDeleteAliasResponse, RoomForgetError, RoomForgetResponse, RoomGetEventError, RoomGetEventResponse, RoomGetStateError, RoomGetStateEventError, RoomGetStateEventResponse, RoomGetStateResponse, RoomGetVisibilityError, RoomGetVisibilityResponse, RoomInviteError, RoomInviteResponse, RoomKeyRequestError, RoomKeyRequestResponse, RoomKickError, RoomKickResponse, RoomKnockError, RoomKnockResponse, RoomLeaveError, RoomLeaveResponse, RoomMessagesError, RoomMessagesResponse, RoomPutAliasError, RoomPutAliasResponse, RoomPutStateError, RoomPutStateResponse, RoomReadMarkersResponse, RoomRedactError, RoomRedactResponse, RoomResolveAliasError, RoomResolveAliasResponse, RoomSendError, RoomSendResponse, RoomTypingError, RoomTypingResponse, RoomUnbanResponse, RoomUpdateAliasError, RoomUpdateAliasResponse, RoomUpgradeError, RoomUpgradeResponse, SetPushRuleActionsError, SetPushRuleActionsResponse, SetPushRuleError, SetPushRuleResponse, ShareGroupSessionError, ShareGroupSessionResponse, SpaceGetHierarchyError, SpaceGetHierarchyResponse, SyncError, SyncResponse, ThumbnailError, ThumbnailResponse, ToDeviceError, ToDeviceResponse, UpdateDeviceError, UpdateDeviceResponse, UpdateReceiptMarkerResponse, UploadError, UploadFilterError, UploadFilterResponse, UploadResponse, WhoamiError, WhoamiResponse, ) from . import Client, ClientConfig from .base_client import ClientCallback, logged_in_async, store_loaded _ShareGroupSessionT = Union[ShareGroupSessionError, ShareGroupSessionResponse] _ProfileGetDisplayNameT = Union[ ProfileGetDisplayNameResponse, ProfileGetDisplayNameError ] _ProfileSetDisplayNameT = Union[ ProfileSetDisplayNameResponse, ProfileSetDisplayNameError ] DataProvider = Callable[[int, int], AsyncDataT] SynchronousFile = ( io.TextIOBase, io.BufferedReader, io.BufferedRandom, io.BytesIO, io.FileIO, ) SynchronousFileType = Union[ io.TextIOBase, io.BufferedReader, io.BufferedRandom, io.BytesIO, io.FileIO, ] AsyncFile = (AsyncBufferedReader, AsyncTextIOWrapper) AsyncFileType = Union[AsyncBufferedReader, AsyncTextIOWrapper] logger = logging.getLogger(__name__) async def on_request_chunk_sent(session, context, params): """TraceConfig callback to run when a chunk is sent for client uploads.""" context_obj = context.trace_request_ctx if isinstance(context_obj, TransferMonitor): context_obj.transferred += len(params.chunk) async def connect_wrapper(self, *args, **kwargs) -> Connection: connection = await type(self).connect(self, *args, **kwargs) connection.transport.set_write_buffer_limits(16 * 1024) return connection def client_session(func): """Ensure that the Async client has a valid client session.""" @wraps(func) async def wrapper(self, *args, **kwargs): if not self.client_session: trace = TraceConfig() trace.on_request_chunk_sent.append(on_request_chunk_sent) connector = ProxyConnector.from_url(self.proxy) if self.proxy else None self.client_session = ClientSession( timeout=ClientTimeout(total=self.config.request_timeout), trace_configs=[trace], connector=connector, ) self.client_session.connector.connect = partial( connect_wrapper, self.client_session.connector, ) return await func(self, *args, **kwargs) return wrapper @dataclass(frozen=True) class AsyncClientConfig(ClientConfig): """Async nio client configuration. Attributes: max_limit_exceeded (int, optional): How many 429 (Too many requests) errors can a request encounter before giving up and returning an ErrorResponse. Default is None for unlimited. max_timeouts (int, optional): How many timeout connection errors can a request encounter before giving up and raising the error: a ClientConnectionError, TimeoutError, or asyncio.TimeoutError. Default is None for unlimited. backoff_factor (float): A backoff factor to apply between retries for timeouts, starting from the second try. nio will sleep for `backoff_factor * (2 ** (total_retries - 1))` seconds. For example, with the default backoff_factor of 0.1, nio will sleep for 0.0, 0.2, 0.4, ... seconds between retries. max_timeout_retry_wait_time (float): The maximum time in seconds to wait between retries for timeouts, by default 60. request_timeout (float): How many seconds a request has to finish, before it is retried or raise an `asycio.TimeoutError` depending on `max_timeouts`. Defaults to 60 seconds, and can be disabled with `0`. `AsyncClient.sync()` overrides this option with its `timeout` argument. The `download()`, `thumbnail()` and `upload()` methods ignore this option and use `0`. io_chunk_size (int): The size (in bytes) of the chunks to read from the IO streams when saving files to disk. Defaults to 64 KiB. """ max_limit_exceeded: Optional[int] = None max_timeouts: Optional[int] = None backoff_factor: float = 0.1 max_timeout_retry_wait_time: float = 60 request_timeout: float = 60 io_chunk_size: int = 64 * 1024 class AsyncClient(Client): """An async IO matrix client. Args: homeserver (str): The URL of the homeserver which we want to connect to. user (str, optional): The user which will be used when we log in to the homeserver. device_id (str, optional): An unique identifier that distinguishes this client instance. If not set the server will provide one after log in. store_path (str, optional): The directory that should be used for state storage. config (AsyncClientConfig, optional): Configuration for the client. ssl (bool/ssl.SSLContext, optional): SSL validation mode. None for default SSL check (ssl.create_default_context() is used), False for skip SSL certificate validation connection. proxy (str, optional): The proxy that should be used for the HTTP connection. Supports SOCKS4(a), SOCKS5, HTTP (tunneling) via an URL like e.g. 'socks5://user:password@127.0.0.1:1080'. Attributes: synced (Event): An asyncio event that is fired every time the client successfully syncs with the server. Note, this event will only be fired if the `sync_forever()` method is used. A simple example can be found bellow. Example: >>> client = AsyncClient("https://example.org", "example") >>> login_response = loop.run_until_complete( >>> client.login("hunter1") >>> ) >>> asyncio.run(client.sync_forever(30000)) This example assumes a full sync on every run. If a sync token is provided for the `since` parameter of the `sync_forever` method `full_state` should be set to `True` as well. Example: >>> asyncio.run( >>> client.sync_forever(30000, since="token123", >>> full_state=True) >>> ) The client can also be configured to store and restore the sync token automatically. The `full_state` argument should be set to `True` in that case as well. Example: >>> config = ClientConfig(store_sync_tokens=True) >>> client = AsyncClient("https://example.org", "example", >>> store_path="/home/example", >>> config=config) >>> login_response = loop.run_until_complete( >>> client.login("hunter1") >>> ) >>> asyncio.run(client.sync_forever(30000, full_state=True)) """ def __init__( self, homeserver: str, user: str = "", device_id: Optional[str] = "", store_path: Optional[str] = "", config: Optional[AsyncClientConfig] = None, ssl: Optional[bool] = None, proxy: Optional[str] = None, ): self.homeserver = homeserver self.client_session: Optional[ClientSession] = None self.ssl = ssl self.proxy = proxy self._presence: Optional[str] = None self.synced = AsyncioEvent() self.response_callbacks: List[ClientCallback] = [] self.sharing_session: Dict[str, AsyncioEvent] = {} is_config = isinstance(config, ClientConfig) is_async_config = isinstance(config, AsyncClientConfig) if is_config and not is_async_config: warnings.warn( "Pass an AsyncClientConfig instead of ClientConfig.", DeprecationWarning, ) config = AsyncClientConfig(**config.__dict__) self.config: AsyncClientConfig = config or AsyncClientConfig() super().__init__(user, device_id, store_path, self.config) def add_response_callback( self, func: Coroutine[Any, Any, Response], cb_filter: Union[Tuple[Type], Type, None] = None, ): """Add a coroutine that will be called if a response is received. Args: func (Coroutine): The coroutine that will be called with the response as the argument. cb_filter (Type, optional): A type or a tuple of types for which the callback should be called. Example: >>> # A callback that will be called every time our `sync_forever` >>> # method successfully syncs with the server. >>> async def sync_cb(response): ... print(f"We synced, token: {response.next_batch}") ... >>> client.add_response_callback(sync_cb, SyncResponse) >>> await client.sync_forever(30000) """ cb = ClientCallback(func, cb_filter) self.response_callbacks.append(cb) async def parse_body(self, transport_response: ClientResponse) -> Dict[Any, Any]: """Parse the body of the response. Low-level function which is normally only used by other methods of this class. Args: transport_response(ClientResponse): The transport response that contains the body of the response. Returns a dictionary representing the response. """ try: return await transport_response.json() except (JSONDecodeError, ContentTypeError): try: # matrix.org return an incorrect content-type for .well-known # API requests, which leads to .text() working but not .json() return json.loads(await transport_response.text()) except (JSONDecodeError, ContentTypeError): pass return {} async def create_matrix_response( self, response_class: Type, transport_response: ClientResponse, data: Optional[Tuple[Any, ...]] = None, save_to: Optional[os.PathLike] = None, ) -> Response: """Transform a transport response into a nio matrix response. Low-level function which is normally only used by other methods of this class. Args: response_class (Type): The class that the requests belongs to. transport_response (ClientResponse): The underlying transport response that contains our response body. data (Tuple, optional): Extra data that is required to instantiate the response class. save_to (PathLike, optional): If set, the ``FileResponse`` body will be saved to this file. Returns a subclass of `Response` depending on the type of the response_class argument. """ data = data or () content_type = transport_response.content_type is_json = content_type == "application/json" name = None if transport_response.content_disposition: name = transport_response.content_disposition.filename if issubclass(response_class, FileResponse) and is_json: parsed_dict = await self.parse_body(transport_response) resp = response_class.from_data(parsed_dict, content_type, name) elif issubclass(response_class, FileResponse): if not save_to: body = await transport_response.read() else: save_to = Path(save_to) if save_to.is_dir(): save_to = save_to / name async with aiofiles.open(save_to, "wb") as f: async for chunk in transport_response.content.iter_chunked( self.config.io_chunk_size ): await f.write(chunk) body = save_to resp = response_class.from_data(body, content_type, name) elif ( issubclass(response_class, RoomGetStateEventResponse) and transport_response.status == 404 ): parsed_dict = await self.parse_body(transport_response) resp = response_class.create_error(parsed_dict, data[-1]) elif ( transport_response.status == 401 and response_class == DeleteDevicesResponse ): parsed_dict = await self.parse_body(transport_response) resp = DeleteDevicesAuthResponse.from_dict(parsed_dict) else: parsed_dict = await self.parse_body(transport_response) resp = response_class.from_dict(parsed_dict, *data) resp.transport_response = transport_response return resp async def _run_to_device_callbacks(self, event: Union[ToDeviceEvent]): for cb in self.to_device_callbacks: await cb.execute(event) async def _handle_to_device(self, response: SyncResponse): decrypted_to_device = [] for index, to_device_event in enumerate(response.to_device_events): decrypted_event = self._handle_decrypt_to_device(to_device_event) if decrypted_event: decrypted_to_device.append((index, decrypted_event)) to_device_event = decrypted_event # Do not pass room key request events to our user here. We don't # want to notify them about requests that get automatically handled # or canceled right away. if isinstance( to_device_event, (RoomKeyRequest, RoomKeyRequestCancellation) ): continue await self._run_to_device_callbacks(to_device_event) self._replace_decrypted_to_device(decrypted_to_device, response) async def _handle_invited_rooms(self, response: SyncResponse): for room_id, info in response.rooms.invite.items(): room = self._get_invited_room(room_id) for event in info.invite_state: room.handle_event(event) for cb in self.event_callbacks: await cb.execute(event, room) async def _handle_joined_rooms(self, response: SyncResponse) -> None: encrypted_rooms: Set[str] = set() for room_id, join_info in response.rooms.join.items(): self._handle_joined_state(room_id, join_info, encrypted_rooms) room = self.rooms[room_id] decrypted_events: List[Tuple[int, Union[Event, BadEventType]]] = [] for index, event in enumerate(join_info.timeline.events): decrypted_event = self._handle_timeline_event( event, room_id, room, encrypted_rooms ) if decrypted_event: event = decrypted_event decrypted_events.append((index, decrypted_event)) for cb in self.event_callbacks: await cb.execute(event, room) # Replace the Megolm events with decrypted ones for index, event in decrypted_events: join_info.timeline.events[index] = event for event in join_info.ephemeral: room.handle_ephemeral_event(event) for cb in self.ephemeral_callbacks: await cb.execute(event, room) for event in join_info.account_data: room.handle_account_data(event) for cb in self.room_account_data_callbacks: await cb.execute(event, room) if room.encrypted and self.olm is not None: self.olm.update_tracked_users(room) self.encrypted_rooms.update(encrypted_rooms) if self.store: self.store.save_encrypted_rooms(encrypted_rooms) async def _handle_presence_events(self, response: SyncResponse): for event in response.presence_events: for room_id in self.rooms.keys(): if event.user_id not in self.rooms[room_id].users: continue self.rooms[room_id].users[event.user_id].presence = event.presence self.rooms[room_id].users[ event.user_id ].last_active_ago = event.last_active_ago self.rooms[room_id].users[ event.user_id ].currently_active = event.currently_active self.rooms[room_id].users[event.user_id].status_msg = event.status_msg for cb in self.presence_callbacks: await cb.execute(event) async def _handle_global_account_data_events( # type: ignore self, response: SyncResponse, ) -> None: for event in response.account_data_events: for cb in self.global_account_data_callbacks: await cb.execute(event) async def _handle_expired_verifications(self): expired_verifications = self.olm.clear_verifications() for event in expired_verifications: for cb in self.to_device_callbacks: await cb.execute(event) async def _handle_sync(self, response: SyncResponse) -> None: # We already received such a sync response, do nothing in that case. if self.next_batch == response.next_batch: return self.next_batch = response.next_batch if self.config.store_sync_tokens and self.store: self.store.save_sync_token(self.next_batch) await self._handle_to_device(response) await self._handle_invited_rooms(response) await self._handle_joined_rooms(response) await self._handle_presence_events(response) await self._handle_global_account_data_events(response) if self.olm: await self._handle_expired_verifications() self._handle_olm_events(response) await self._collect_key_requests() async def _collect_key_requests(self): events = self.olm.collect_key_requests() for event in events: await self._run_to_device_callbacks(event) async def receive_response(self, response: Response) -> None: """Receive a Matrix Response and change the client state accordingly. Automatically called for all "high-level" methods of this API (each function documents calling it). Some responses will get edited for the callers convenience e.g. sync responses that contain encrypted messages. The encrypted messages will be replaced by decrypted ones if decryption is possible. Args: response (Response): the response that we wish the client to handle """ if not isinstance(response, Response): raise ValueError("Invalid response received") if isinstance(response, SyncResponse): await self._handle_sync(response) else: super().receive_response(response) async def get_timeout_retry_wait_time(self, got_timeouts: int) -> float: if got_timeouts < 2: return 0.0 return min( self.config.backoff_factor * (2 ** (min(got_timeouts, 1000) - 1)), self.config.max_timeout_retry_wait_time, ) async def _send( self, response_class: Type, method: str, path: str, data: Union[None, str, AsyncDataT] = None, response_data: Optional[Tuple[Any, ...]] = None, content_type: Optional[str] = None, trace_context: Optional[Any] = None, data_provider: Optional[DataProvider] = None, timeout: Optional[float] = None, content_length: Optional[int] = None, save_to: Optional[os.PathLike] = None, ): headers = ( {"Content-Type": content_type} if content_type else {"Content-Type": "application/json"} ) if content_length is not None: headers["Content-Length"] = str(content_length) if self.config.custom_headers is not None: headers.update(self.config.custom_headers) got_429 = 0 max_429 = self.config.max_limit_exceeded got_timeouts = 0 max_timeouts = self.config.max_timeouts while True: if data_provider: # mypy expects an "Awaitable[Any]" but data_provider is a # method generated during runtime that may or may not be # Awaitable. The actual type is a union of the types that we # can receive from reading files. data = await data_provider(got_429, got_timeouts) # type: ignore try: transport_resp = await self.send( method, path, data, headers, trace_context, timeout, ) resp = await self.create_matrix_response( response_class=response_class, transport_response=transport_resp, data=response_data, save_to=save_to, ) if transport_resp.status == 429 or ( isinstance(resp, ErrorResponse) and resp.status_code in ("M_LIMIT_EXCEEDED", 429) ): got_429 += 1 if max_429 is not None and got_429 > max_429: break await self.run_response_callbacks([resp]) retry_after_ms = getattr(resp, "retry_after_ms", 0) or 5000 logger.warning( "Got 429 response (ratelimited), sleeping for %dms", retry_after_ms, ) await asyncio.sleep(retry_after_ms / 1000) else: break except (ClientConnectionError, TimeoutError, asyncio.TimeoutError): got_timeouts += 1 if max_timeouts is not None and got_timeouts > max_timeouts: raise wait = await self.get_timeout_retry_wait_time(got_timeouts) logger.warning("Timed out, sleeping for %ds", wait) await asyncio.sleep(wait) await self.receive_response(resp) return resp @client_session async def send( self, method: str, path: str, data: Union[None, str, AsyncDataT] = None, headers: Optional[Dict[str, str]] = None, trace_context: Optional[Any] = None, timeout: Optional[float] = None, ) -> ClientResponse: """Send a request to the homeserver. This function does not call receive_response(). Args: method (str): The request method that should be used. One of get, post, put, delete. path (str): The URL path of the request. data (str, optional): Data that will be posted with the request. headers (Dict[str,str] , optional): Additional request headers that should be used with the request. trace_context (Any, optional): An object to use for the ClientSession TraceConfig context timeout (int, optional): How many seconds the request has before raising `asyncio.TimeoutError`. Overrides `AsyncClient.config.request_timeout` if not `None`. """ assert self.client_session return await self.client_session.request( method, self.homeserver + path, data=data, ssl=self.ssl, headers=headers, trace_request_ctx=trace_context, timeout=self.config.request_timeout if timeout is None else timeout, ) async def mxc_to_http( self, mxc: str, homeserver: Optional[str] = None, ) -> Optional[str]: """Convert a matrix content URI to a HTTP URI.""" return Api.mxc_to_http(mxc, homeserver or self.homeserver) async def login_raw( self, auth_dict: Dict[str, Any] ) -> Union[LoginResponse, LoginError]: """Login to the homeserver using a raw dictionary. Calls receive_response() to update the client state if necessary. Args: auth_dict (Dict[str, Any]): The auth dictionary. See the example below and here https://matrix.org/docs/spec/client_server/r0.6.0#authentication-types for detailed documentation Example: >>> auth_dict = { >>> "type": "m.login.password", >>> "identifier": { >>> "type": "m.id.thirdparty", >>> "medium": "email", >>> "address": "testemail@mail.org" >>> }, >>> "password": "PASSWORDABCD", >>> "initial_device_display_name": "Test user" >>> } Returns either a `LoginResponse` if the request was successful or a `LoginError` if there was an error with the request. """ if auth_dict is None or auth_dict == {}: raise ValueError("Auth dictionary shall not be empty") method, path, data = Api.login_raw(auth_dict) return await self._send(LoginResponse, method, path, data) async def register_interactive( self, username: str, password: str, auth_dict: Dict[str, Any], device_name: str = "", ) -> Union[RegisterInteractiveResponse, RegisterInteractiveError]: """Makes a request to the register endpoint using the provided auth dictionary. This is allows for interactive registration flows from the homeserver. Calls receive_response() to update the client state if necessary. Args: username (str): Username to register the new user as. password (str): New password for the user. auth_dict (dict): The auth dictionary. device_name (str): A display name to assign to a newly-created device. Ignored if the logged in device corresponds to a known device. Returns a 'RegisterInteractiveResponse' if successful. """ method, path, data = Api.register( user=username, password=password, device_name=device_name, device_id=self.device_id, auth_dict=auth_dict, ) return await self._send(RegisterInteractiveResponse, method, path, data) async def register_with_token( self, username: str, password: str, registration_token: str, device_name: str = "", ) -> Union[RegisterResponse, RegisterErrorResponse]: """Registers a user using a registration token. See https://spec.matrix.org/latest/client-server-api/#token-authenticated-registration Returns either a `RegisterResponse` if the request was successful or a `RegisterErrorResponse` if there was an error with the request. """ # must first register without token to get a session token resp = await self.register_interactive( username, password, auth_dict={"initial_device_display_name": self.device_id or "matrix-nio"}, ) if isinstance(resp, RegisterInteractiveError): return RegisterErrorResponse( resp.message, resp.status_code, resp.retry_after_ms, resp.soft_logout ) # use session token to register with token session_token = resp.session resp = await self.register_interactive( username, password, auth_dict={ "type": "m.login.registration_token", "token": registration_token, "session": session_token, }, ) if isinstance(resp, RegisterInteractiveError): return RegisterErrorResponse( resp.message, resp.status_code, resp.retry_after_ms, resp.soft_logout ) # finally call register with dummy auth with original session token # to complete registration and acquire access token return await self.register( username, password, device_name=device_name, session_token=session_token ) async def register( self, username: str, password: str, device_name: str = "", session_token: Optional[str] = None, ) -> Union[RegisterResponse, RegisterErrorResponse]: """Register with homeserver. Calls receive_response() to update the client state if necessary. Args: username (str): Username to register the new user as. password (str): New password for the user. device_name (str, optional): A display name to assign to a newly-created device. Ignored if the logged in device corresponds to a known device. session_token (str, optional): The session token the server provided during interactive registration. If not provided, the session token is not added to the request's auth dict. Returns a 'RegisterResponse' if successful. """ auth_dict = {"type": "m.login.dummy"} if session_token is not None: auth_dict["session"] = session_token method, path, data = Api.register( user=username, password=password, device_name=device_name, device_id=self.device_id, auth_dict=auth_dict, ) return await self._send(RegisterResponse, method, path, data) async def discovery_info( self, ) -> Union[DiscoveryInfoResponse, DiscoveryInfoError]: """Get discovery information about current `AsyncClient.homeserver`. Returns either a `DiscoveryInfoResponse` if the request was successful or a `DiscoveryInfoError` if there was an error with the request. Some homeservers do not redirect requests to their main domain and instead require clients to use a specific URL for communication. If the domain specified by the `AsyncClient.homeserver` URL implements the [.well-known](https://matrix.org/docs/spec/client_server/latest#id178), discovery mechanism, this method can be used to retrieve the actual homeserver URL from it. Example: >>> client = AsyncClient(homeserver="https://example.org") >>> response = await client.discovery_info() >>> if isinstance(response, DiscoveryInfoResponse): >>> client.homeserver = response.homeserver_url """ method, path = Api.discovery_info() return await self._send(DiscoveryInfoResponse, method, path) async def login_info(self) -> Union[LoginInfoResponse, LoginInfoError]: """Get the available login methods from the server Returns either a `LoginInfoResponse` if the request was successful or a `LoginInfoError` if there was an error with the request. """ method, path = Api.login_info() return await self._send(LoginInfoResponse, method, path) async def login( self, password: Optional[str] = None, device_name: Optional[str] = "", token: Optional[str] = None, ) -> Union[LoginResponse, LoginError]: """Login to the homeserver. Calls receive_response() to update the client state if necessary. Args: password (str, optional): The user's password. device_name (str): A display name to assign to a newly-created device. Ignored if the logged in device corresponds to a known device. token (str, optional): A login token, for example provided by a single sign-on service. Either a password or a token needs to be provided. Returns either a `LoginResponse` if the request was successful or a `LoginError` if there was an error with the request. """ if password is None and token is None: raise ValueError("Either a password or a token needs to be provided") method, path, data = Api.login( self.user, password=password, device_name=device_name, device_id=self.device_id, token=token, ) return await self._send(LoginResponse, method, path, data) @logged_in_async async def logout( self, all_devices: bool = False ) -> Union[LogoutResponse, LogoutError]: """Logout from the homeserver. Calls receive_response() to update the client state if necessary. Returns either 'LogoutResponse' if the request was successful or a `Logouterror` if there was an error with the request. """ method, path, data = Api.logout(self.access_token, all_devices) return await self._send(LogoutResponse, method, path, data) @logged_in_async async def sync( self, timeout: Optional[int] = 0, sync_filter: Optional[_FilterT] = None, since: Optional[str] = None, full_state: Optional[bool] = None, set_presence: Optional[str] = None, ) -> Union[SyncResponse, SyncError]: """Synchronise the client's state with the latest state on the server. In general you should use sync_forever() which handles additional tasks automatically (like sending encryption keys among others). Calls receive_response() to update the client state if necessary. Args: timeout(int, optional): The maximum time that the server should wait for new events before it should return the request anyways, in milliseconds. If ``0``, no timeout is applied. If ``None``, use ``AsyncClient.config.request_timeout``. If a timeout is applied and the server fails to return after 15 seconds of expected timeout, the client will timeout by itself. sync_filter (Union[None, str, Dict[Any, Any]): A filter ID that can be obtained from ``AsyncClient.upload_filter()`` (preferred), or filter dict that should be used for this sync request. full_state (bool, optional): Controls whether to include the full state for all rooms the user is a member of. If this is set to true, then all state events will be returned, even if since is non-empty. The timeline will still be limited by the since parameter. since (str, optional): A token specifying a point in time where to continue the sync from. Defaults to the last sync token we received from the server using this API call. set_presence (str, optional): The presence state. One of: ["online", "offline", "unavailable"] Returns either a `SyncResponse` if the request was successful or a `SyncError` if there was an error with the request. """ sync_token = since or self.next_batch presence = set_presence or self._presence method, path = Api.sync( self.access_token, since=sync_token or self.loaded_sync_token, timeout=( int(self.config.request_timeout) * 1000 if timeout is None else timeout or None ), filter=sync_filter, full_state=full_state, set_presence=presence, ) response = await self._send( SyncResponse, method, path, # 0 if full_state: server doesn't respect timeout if full_state # + 15: give server a chance to naturally return before we timeout timeout=0 if full_state else timeout / 1000 + 15 if timeout else timeout, ) return response @logged_in_async async def send_to_device_messages( self, ) -> List[Union[ToDeviceResponse, ToDeviceError]]: """Send out outgoing to-device messages. Automatically called by sync_forever(). """ if not self.outgoing_to_device_messages: return [] tasks = [] for message in self.outgoing_to_device_messages: task = asyncio.ensure_future(self.to_device(message)) tasks.append(task) return await asyncio.gather(*tasks) async def run_response_callbacks( self, responses: List[Union[Response, ErrorResponse]] ): """Run the configured response callbacks for the given responses. Low-level function which is normally only used by other methods of this class. Automatically called by sync_forever() and all functions calling receive_response(). """ for response in responses: for cb in self.response_callbacks: await cb.execute(response) @logged_in_async async def sync_forever( self, timeout: Optional[int] = None, sync_filter: Optional[_FilterT] = None, since: Optional[str] = None, full_state: Optional[bool] = None, loop_sleep_time: Optional[int] = None, first_sync_filter: Optional[_FilterT] = None, set_presence: Optional[str] = None, ): """Continuously sync with the configured homeserver. This method calls the sync method in a loop. To react to events event callbacks should be configured. The loop also makes sure to handle other required requests between syncs, including to_device messages and sending encryption keys if required. To react to the responses a response callback should be added. Args: timeout (int, optional): The maximum time that the server should wait for new events before it should return the request anyways, in milliseconds. If ``0``, no timeout is applied. If ``None``, ``AsyncClient.config.request_timeout`` is used. In any case, ``0`` is always used for the first sync. If a timeout is applied and the server fails to return after 15 seconds of expected timeout, the client will timeout by itself. sync_filter (Union[None, str, Dict[Any, Any]): A filter ID that can be obtained from ``AsyncClient.upload_filter()`` (preferred), or filter dict that should be used for sync requests. full_state (bool, optional): Controls whether to include the full state for all rooms the user is a member of. If this is set to true, then all state events will be returned, even if since is non-empty. The timeline will still be limited by the since parameter. This argument will be used only for the first sync request. since (str, optional): A token specifying a point in time where to continue the sync from. Defaults to the last sync token we received from the server using this API call. This argument will be used only for the first sync request, the subsequent sync requests will use the token from the last sync response. loop_sleep_time (int, optional): The sleep time, if any, between successful sync loop iterations in milliseconds. first_sync_filter (Union[None, str, Dict[Any, Any]): A filter ID that can be obtained from ``AsyncClient.upload_filter()`` (preferred), or filter dict to use for the first sync request only. If `None` (default), the `sync_filter` parameter's value is used. To have no filtering for the first sync regardless of `sync_filter`'s value, pass `{}`. set_presence (str, optional): The presence state. One of: ["online", "offline", "unavailable"] """ first_sync = True while True: try: use_filter = ( first_sync_filter if first_sync and first_sync_filter is not None else sync_filter ) use_timeout = 0 if first_sync else timeout tasks = [] # Make sure that if this is our first sync that the sync happens # before the other requests, this helps to ensure that after one # fired synced event the state is indeed fully synced. if first_sync: presence = set_presence or self._presence sync_response = await self.sync( use_timeout, use_filter, since, full_state, presence ) await self.run_response_callbacks([sync_response]) else: presence = set_presence or self._presence tasks = [ asyncio.ensure_future(coro) for coro in ( self.sync( use_timeout, use_filter, since, full_state, presence ), self.send_to_device_messages(), ) ] if self.should_upload_keys: tasks.append(asyncio.ensure_future(self.keys_upload())) if self.should_query_keys: tasks.append(asyncio.ensure_future(self.keys_query())) if self.should_claim_keys: tasks.append( asyncio.ensure_future( self.keys_claim(self.get_users_for_key_claiming()), ) ) for response in asyncio.as_completed(tasks): await self.run_response_callbacks([await response]) first_sync = False full_state = None since = None self.synced.set() self.synced.clear() if loop_sleep_time: await asyncio.sleep(loop_sleep_time / 1000) except asyncio.CancelledError: # noqa: PERF203 for task in tasks: task.cancel() raise @logged_in_async @store_loaded async def start_key_verification( self, device: OlmDevice, tx_id: Optional[str] = None ) -> Union[ToDeviceResponse, ToDeviceError]: """Start a interactive key verification with the given device. Returns either a `ToDeviceResponse` if the request was successful or a `ToDeviceError` if there was an error with the request. Args: device (OlmDevice): An device with which we would like to start the interactive key verification process. """ message = self.create_key_verification(device) return await self.to_device(message, tx_id) @logged_in_async @store_loaded async def cancel_key_verification( self, transaction_id: str, reject: bool = False, tx_id: Optional[str] = None, ) -> Union[ToDeviceResponse, ToDeviceError]: """Cancel a interactive key verification with the given device. Returns either a `ToDeviceResponse` if the request was successful or a `ToDeviceError` if there was an error with the request. Args: transaction_id (str): An transaction id of a valid key verification process. reject (bool): Is the cancelation reason because we're rejecting the short auth string and mark it as mismatching or a normal user cancelation. Raises a LocalProtocolError no verification process with the given transaction ID exists or if reject is True and the short auth string couldn't be shown yet because plublic keys weren't yet exchanged. """ if transaction_id not in self.key_verifications: raise LocalProtocolError( f"Key verification with the transaction id {transaction_id} does not exist." ) sas = self.key_verifications[transaction_id] if reject: sas.reject_sas() else: sas.cancel() message = sas.get_cancellation() return await self.to_device(message, tx_id) @logged_in_async @store_loaded async def accept_key_verification( self, transaction_id: str, tx_id: Optional[str] = None ) -> Union[ToDeviceResponse, ToDeviceError]: """Accept a key verification start event. Returns either a `ToDeviceResponse` if the request was successful or a `ToDeviceError` if there was an error with the request. Args: transaction_id (str): An transaction id of a valid key verification process. """ if transaction_id not in self.key_verifications: raise LocalProtocolError( f"Key verification with the transaction id {transaction_id} does not exist." ) sas = self.key_verifications[transaction_id] message = sas.accept_verification() return await self.to_device(message, tx_id) @logged_in_async @store_loaded async def confirm_short_auth_string( self, transaction_id: str, tx_id: Optional[str] = None ) -> Union[ToDeviceResponse, ToDeviceError]: """Confirm a short auth string and mark it as matching. Returns either a `ToDeviceResponse` if the request was successful or a `ToDeviceError` if there was an error with the request. Args: transaction_id (str): An transaction id of a valid key verification process. """ message = self.confirm_key_verification(transaction_id) return await self.to_device(message, tx_id) @logged_in_async async def to_device( self, message: ToDeviceMessage, tx_id: Optional[str] = None, ) -> Union[ToDeviceResponse, ToDeviceError]: """Send a to-device message. Calls receive_response() to update the client state if necessary. Returns either a `ToDeviceResponse` if the request was successful or a `ToDeviceError` if there was an error with the request. Args: message (ToDeviceMessage): The message that should be sent out. tx_id (str, optional): The transaction ID for this message. Should be unique. """ uuid = tx_id or uuid4() method, path, data = Api.to_device( self.access_token, message.type, message.as_dict(), uuid ) return await self._send( ToDeviceResponse, method, path, data, response_data=(message,) ) @logged_in_async @store_loaded async def keys_upload(self) -> Union[KeysUploadResponse, KeysUploadError]: """Upload the E2E encryption keys. This uploads the long lived session keys as well as the required amount of one-time keys. Automatically called by sync_forever(). Calls receive_response() to update the client state if necessary. Raises LocalProtocolError if the client isn't logged in, if the session store isn't loaded or if no encryption keys need to be uploaded. """ if not self.should_upload_keys: raise LocalProtocolError("No key upload needed.") assert self.olm keys_dict = self.olm.share_keys() method, path, data = Api.keys_upload(self.access_token, keys_dict) return await self._send(KeysUploadResponse, method, path, data) @logged_in_async @store_loaded async def keys_query(self) -> Union[KeysQueryResponse, KeysQueryError]: """Query the server for user keys. This queries the server for device keys of users with which we share an encrypted room. Automatically called by sync_forever() and room_send(). Calls receive_response() to update the client state if necessary. Raises LocalProtocolError if the client isn't logged in, if the session store isn't loaded or if no key query needs to be performed. """ user_list = self.users_for_key_query if not user_list: raise LocalProtocolError("No key query required.") # TODO pass the sync token here if it's a device update that triggered # our need for a key query. method, path, data = Api.keys_query(self.access_token, user_list) return await self._send(KeysQueryResponse, method, path, data) @logged_in_async async def devices(self) -> Union[DevicesResponse, DevicesError]: """Get the list of devices for the current user. Calls receive_response() to update the client state if necessary. Returns either a `DevicesResponse` if the request was successful or a `DevicesError` if there was an error with the request. """ method, path = Api.devices(self.access_token) return await self._send(DevicesResponse, method, path) @logged_in_async async def update_device( self, device_id: str, content: Dict[str, str] ) -> Union[UpdateDeviceResponse, UpdateDeviceError]: """Update the metadata of the given device. Returns either a `UpdateDeviceResponse` if the request was successful or a `UpdateDeviceError` if there was an error with the request. Args: device_id (str): The device for which the metadata will be updated. content (Dict[str, str]): A dictionary of metadata values that will be updated for the device. Example: >>> device_id = "QBUAZIFURK" >>> content = {"display_name": "My new device"} >>> await client.update_device(device_id, content) """ method, path, data = Api.update_device(self.access_token, device_id, content) return await self._send(UpdateDeviceResponse, method, path, data) @logged_in_async async def delete_devices( self, devices: List[str], auth: Optional[Dict[str, str]] = None ) -> Union[DeleteDevicesResponse, DeleteDevicesError]: """Delete a list of devices. This tells the server to delete the given devices and invalidate their associated access tokens. Calls receive_response() to update the client state if necessary. Returns either a `DeleteDevicesResponse` if the request was successful or a `DeleteDevicesError` if there was an error with the request. This endpoint supports user-interactive auth, calling this method without an auth dictionary will return a `DeleteDevicesAuthResponse` which can be used to introspect the valid authentication methods that the server supports. Args: devices (List[str]): A list of devices which will be deleted. auth (Dict): Additional authentication information for the user-interactive authentication API. Example: >>> devices = ["QBUAZIFURK", "AUIECTSRND"] >>> auth = {"type": "m.login.password", ... "user": "example", ... "password": "hunter1"} >>> await client.delete_devices(devices, auth) """ method, path, data = Api.delete_devices( self.access_token, devices, auth_dict=auth ) return await self._send(DeleteDevicesResponse, method, path, data) @logged_in_async async def space_get_hierarchy( self, space_id: str, from_page: Optional[str] = None, limit: Optional[int] = None, max_depth: Optional[int] = None, suggested_only: bool = False, ) -> Union[SpaceGetHierarchyResponse, SpaceGetHierarchyError]: """Gets the space's room hierarchy. Calls receive_response() to update the client state if necessary. Returns either a `SpaceGetHierarchyResponse` if the request was successful or a `SpaceGetHierarchyError` if there was an error with the request. Args: space_id (str): The ID of the space to get the hierarchy for. from_page (str, optional): Pagination token from a previous request to this endpoint. limit (int, optional): The maximum number of rooms to return. max_depth (int, optional): The maximum depth of the returned tree. suggested_only (bool, optional): Whether or not to only return rooms that are considered suggested. Defaults to False. """ method, path = Api.space_get_hierarchy( self.access_token, space_id, from_page=from_page, limit=limit, max_depth=max_depth, suggested_only=suggested_only, ) return await self._send(SpaceGetHierarchyResponse, method, path) @logged_in_async async def joined_members( self, room_id: str ) -> Union[JoinedMembersResponse, JoinedMembersError]: """Get the list of joined members for a room. Calls receive_response() to update the client state if necessary. Returns either a `JoinedMembersResponse` if the request was successful or a `JoinedMembersError` if there was an error with the request. Args: room_id(str): The room id of the room for which we wan't to request the joined member list. """ method, path = Api.joined_members(self.access_token, room_id) return await self._send( JoinedMembersResponse, method, path, response_data=(room_id,) ) @logged_in_async async def joined_rooms( self, ) -> Union[JoinedRoomsResponse, JoinedRoomsError]: """Get the list of joined rooms. Calls receive_response() to update the client state if necessary. Returns either a `JoinedRoomsResponse` if the request was successful or a `JoinedRoomsError` if there was an error with the request. """ method, path = Api.joined_rooms(self.access_token) return await self._send(JoinedRoomsResponse, method, path) @logged_in_async async def room_send( self, room_id: str, message_type: str, content: Dict[Any, Any], tx_id: Optional[str] = None, ignore_unverified_devices: bool = False, ) -> Union[RoomSendResponse, RoomSendError]: """Send a message to a room. Calls receive_response() to update the client state if necessary. Args: room_id(str): The room id of the room where the message should be sent to. message_type(str): A string identifying the type of the message. content(Dict[Any, Any]): A dictionary containing the content of the message. tx_id(str, optional): The transaction ID of this event used to uniquely identify this message. ignore_unverified_devices(bool): If the room is encrypted and contains unverified devices, the devices can be marked as ignored here. Ignored devices will still receive encryption keys for messages but they won't be marked as verified. If the room where the message should be sent is encrypted the message will be encrypted before sending. This method also makes sure that the room members are fully synced and that keys are queried before sending messages to an encrypted room. If the method can't sync the state fully to send out an encrypted message after a couple of retries it raises `SendRetryError`. Raises `LocalProtocolError` if the client isn't logged in. """ uuid: Union[str, UUID] = tx_id or uuid4() if self.olm: try: room = self.rooms[room_id] except KeyError: raise LocalProtocolError(f"No such room with id {room_id} found.") if room.encrypted: # Check if the members are synced, otherwise users might not get # the megolm seession. if not room.members_synced: responses = [] responses.append(await self.joined_members(room_id)) if self.should_query_keys: responses.append(await self.keys_query()) # Check if we need to share a group session, it might have been # invalidated or expired. if self.olm.should_share_group_session(room_id): try: event = self.sharing_session[room_id] await event.wait() except KeyError: await self.share_group_session( room_id, ignore_unverified_devices=ignore_unverified_devices, ) # Reactions as of yet don't support encryption. # Relevant spec proposal https://github.com/matrix-org/matrix-doc/pull/1849 if message_type != "m.reaction": # Encrypt our content and change the message type. message_type, content = self.encrypt(room_id, message_type, content) method, path, data = Api.room_send( self.access_token, room_id, message_type, content, uuid ) return await self._send(RoomSendResponse, method, path, data, (room_id,)) @logged_in_async @client_session async def list_direct_rooms( self, ) -> Union[DirectRoomsResponse, DirectRoomsErrorResponse]: """ Lists all rooms flagged with m.direct that the client is participating in. Returns a DirectRoomListResponse if the request was successful, or DirectRoomListErrorResponse if there was an error, or the current user has never marked any rooms marked with m.direct """ method, path = Api.direct_room_list(self.access_token, self.user_id) return await self._send(DirectRoomsResponse, method, path) @logged_in_async async def room_get_event( self, room_id: str, event_id: str ) -> Union[RoomGetEventResponse, RoomGetEventError]: """Get a single event based on roomId/eventId. Calls receive_response() to update the client state if necessary. Returns either a `RoomGetEventResponse` if the request was successful or a `RoomGetEventError` if there was an error with the request. Args: room_id (str): The room id of the room where the event is in. event_id (str): The event id to get. """ method, path = Api.room_get_event(self.access_token, room_id, event_id) return await self._send(RoomGetEventResponse, method, path) @logged_in_async async def room_put_state( self, room_id: str, event_type: str, content: Dict[Any, Any], state_key: str = "", ) -> Union[RoomPutStateResponse, RoomPutStateError]: """Send a state event to a room. Calls receive_response() to update the client state if necessary. Returns either a `RoomPutStateResponse` if the request was successful or a `RoomPutStateError` if there was an error with the request. Args: room_id (str): The room id of the room to send the event to. event_type (str): The type of the state to send. content (Dict[Any, Any]): The content of the event to be sent. state_key (str): The key of the state event to send. """ method, path, data = Api.room_put_state( self.access_token, room_id, event_type, content, state_key=state_key, ) return await self._send( RoomPutStateResponse, method, path, data, response_data=(room_id,), ) @logged_in_async async def room_get_state( self, room_id: str, ) -> Union[RoomGetStateResponse, RoomGetStateError]: """Fetch state for a room. Calls receive_response() to update the client state if necessary. Returns either a `RoomGetStateResponse` if the request was successful or a `RoomGetStateError` if there was an error with the request. Args: room_id (str): The room id of the room to fetch state from. """ method, path = Api.room_get_state( self.access_token, room_id, ) return await self._send( RoomGetStateResponse, method, path, response_data=(room_id,), ) @logged_in_async async def room_get_state_event( self, room_id: str, event_type: str, state_key: str = "" ) -> Union[RoomGetStateEventResponse, RoomGetStateEventError]: """Fetch a state event from a room. Calls receive_response() to update the client state if necessary. Returns either a `RoomGetStateEventResponse` if the request was successful or a `RoomGetStateEventError` if there was an error with the request. Args: room_id (str): The room id of the room to fetch the event from. event_type (str): The type of the state to fetch. state_key (str): The key of the state event to fetch. """ method, path = Api.room_get_state_event( self.access_token, room_id, event_type, state_key=state_key ) return await self._send( RoomGetStateEventResponse, method, path, response_data=( event_type, state_key, room_id, ), ) @logged_in_async async def room_redact( self, room_id: str, event_id: str, reason: Optional[str] = None, tx_id: Union[None, str, UUID] = None, ) -> Union[RoomRedactResponse, RoomRedactError]: """Strip information out of an event. Calls receive_response() to update the client state if necessary. Returns either a `RoomRedactResponse` if the request was successful or a `RoomRedactError` if there was an error with the request. Args: room_id (str): The room id of the room that contains the event that will be redacted. event_id (str): The ID of the event that will be redacted. tx_id (str/UUID, optional): A transaction ID for this event. reason(str, optional): A description explaining why the event was redacted. """ method, path, data = Api.room_redact( self.access_token, room_id, event_id, tx_id=tx_id or uuid4(), reason=reason, ) return await self._send( RoomRedactResponse, method, path, data, response_data=(room_id,), ) async def room_resolve_alias( self, room_alias: str, ) -> Union[RoomResolveAliasResponse, RoomResolveAliasError]: """Resolve a room alias to a room ID. Calls receive_response() to update the client state if necessary. Returns either a `RoomResolveAliasResponse` if the request was successful or a `RoomResolveAliasError if there was an error with the request. Args: room_alias (str): The alias to resolve """ method, path = Api.room_resolve_alias(room_alias) return await self._send( RoomResolveAliasResponse, method, path, response_data=(room_alias,), ) @logged_in_async async def room_delete_alias( self, room_alias: str, ) -> Union[RoomDeleteAliasResponse, RoomDeleteAliasError]: """Delete a room alias. Calls receive_response() to update the client state if necessary. Returns either a `RoomDeleteAliasResponse` if the request was successful or a `RoomDeleteAliasError if there was an error with the request. Args: room_alias (str): The alias to delete """ method, path = Api.room_delete_alias( self.access_token, room_alias, ) return await self._send( RoomDeleteAliasResponse, method, path, response_data=(room_alias,), ) @logged_in_async async def room_put_alias( self, room_alias: str, room_id: str, ) -> Union[RoomPutAliasResponse, RoomPutAliasError]: """Add a room alias. Calls receive_response() to update the client state if necessary. Returns either a `RoomPutAliasResponse` if the request was successful or a `RoomPutAliasError if there was an error with the request. Args: room_alias (str): The alias to add room_id (str): The room ID to map to """ method, path, data = Api.room_put_alias( self.access_token, room_alias, room_id, ) return await self._send( RoomPutAliasResponse, method, path, data=data, response_data=(room_alias, room_id), ) async def room_get_visibility( self, room_id: str, ) -> Union[RoomGetVisibilityResponse, RoomGetVisibilityError]: """Get visibility for a room. Calls receive_response() to update the client state if necessary. Returns either a `RoomGetVisibilityResponse` if the request was successful or a `RoomGetVisibilityError if there was an error with the request. Args: room_id (str): The room ID to get visibility for """ method, path = Api.room_get_visibility(room_id) return await self._send( RoomGetVisibilityResponse, method, path, response_data=(room_id,), ) @logged_in_async @store_loaded async def keys_claim( self, user_set: Dict[str, Iterable[str]] ) -> Union[KeysClaimResponse, KeysClaimError]: """Claim one-time keys for a set of user and device pairs. Automatically called by sync_forever() and room_send(). Calls receive_response() to update the client state if necessary. Args: user_set(Dict[str, Iterator[str]]): A dictionary mapping from a user id to a iterator of device ids. If a user set for a specific room is required it can be obtained using the `get_missing_sessions()` method. Raises LocalProtocolError if the client isn't logged in, if the session store isn't loaded, no room with the given room id exists or the room isn't an encrypted room. """ method, path, data = Api.keys_claim(self.access_token, user_set) return await self._send(KeysClaimResponse, method, path, data) @logged_in_async @store_loaded async def share_group_session( self, room_id: str, ignore_unverified_devices: bool = False, ) -> Union[ShareGroupSessionResponse, ShareGroupSessionError]: """Share a group session with a room. This method sends a group session to members of a room. Automatically called by room_send(). Calls receive_response() to update the client state if necessary. Args: room_id(str): The room id of the room where the message should be sent to. ignore_unverified_devices(bool): Mark unverified devices as ignored. Ignored devices will still receive encryption keys for messages but they won't be marked as verified. Raises LocalProtocolError if the client isn't logged in, if the session store isn't loaded, no room with the given room id exists, the room isn't an encrypted room or a key sharing request is already in flight for this room. """ assert self.olm try: room = self.rooms[room_id] except KeyError: raise LocalProtocolError(f"No such room with id {room_id}") if not room.encrypted: raise LocalProtocolError(f"Room with id {room_id} is not encrypted") if room_id in self.sharing_session: raise LocalProtocolError(f"Already sharing a group session for {room_id}") self.sharing_session[room_id] = AsyncioEvent() missing_sessions = self.get_missing_sessions(room_id) if missing_sessions: await self.keys_claim(missing_sessions) shared_with = set() try: requests = [] for sharing_with, to_device_dict in self.olm.share_group_session_parallel( room_id, list(room.users.keys()), ignore_unverified_devices=ignore_unverified_devices, ): method, path, data = Api.to_device( self.access_token, "m.room.encrypted", to_device_dict, uuid4() ) requests.append( self._send( ShareGroupSessionResponse, method, path, data, response_data=(room_id, sharing_with), ) ) for response in await asyncio.gather(*requests, return_exceptions=True): if isinstance(response, ShareGroupSessionResponse): shared_with.update(response.users_shared_with) # Mark the session as shared, usually the olm machine will do this # for us, but if there was no-one to share the session with it we # need to do it ourselves. self.olm.outbound_group_sessions[room_id].shared = True except ClientConnectionError: raise finally: event = self.sharing_session.pop(room_id) event.set() return ShareGroupSessionResponse(room_id, shared_with) @logged_in_async @store_loaded async def request_room_key( self, event: MegolmEvent, tx_id: Optional[str] = None, ) -> Union[RoomKeyRequestResponse, RoomKeyRequestError]: """Request a missing room key. This sends out a message to other devices requesting a room key from them. Calls receive_response() to update the client state if necessary. Returns either a `RoomKeyRequestResponse` if the request was successful or a `RoomKeyRequestError` if there was an error with the request. Raises a LocalProtocolError if the room key was already requested. Args: event (MegolmEvent): An undecrypted MegolmEvent for which we would like to request the decryption key. """ uuid = tx_id or uuid4() if event.session_id in self.outgoing_key_requests: raise LocalProtocolError( "A key sharing request is already sent" " out for this session id." ) assert self.user_id assert self.device_id message = event.as_key_request(self.user_id, self.device_id) method, path, data = Api.to_device( self.access_token, message.type, message.as_dict(), uuid ) return await self._send( RoomKeyRequestResponse, method, path, data, ( event.session_id, event.session_id, event.room_id, event.algorithm, ), ) async def close(self): """Close the underlying http session.""" if self.client_session: await self.client_session.close() self.client_session = None @store_loaded async def export_keys(self, outfile: str, passphrase: str, count: int = 10000): """Export all the Megolm decryption keys of this device. The keys will be encrypted using the passphrase. Note that this does not save other information such as the private identity keys of the device. Args: outfile (str): The file to write the keys to. passphrase (str): The encryption passphrase. count (int): Optional. Round count for the underlying key derivation. It is not recommended to specify it unless absolutely sure of the consequences. """ assert self.store assert self.olm loop = asyncio.get_event_loop() inbound_group_store = self.store.load_inbound_group_sessions() export_keys = partial( self.olm.export_keys_static, inbound_group_store, outfile, passphrase, count, ) await loop.run_in_executor(None, export_keys) @store_loaded async def import_keys(self, infile: str, passphrase: str): """Import Megolm decryption keys. The keys will be added to the current instance as well as written to database. Args: infile (str): The file containing the keys. passphrase (str): The decryption passphrase. Raises `EncryptionError` if the file is invalid or couldn't be decrypted. Raises the usual file errors if the file couldn't be opened. """ assert self.store assert self.olm loop = asyncio.get_event_loop() import_keys = partial(self.olm.import_keys_static, infile, passphrase) sessions = await loop.run_in_executor(None, import_keys) for session in sessions: # This could be improved by writing everything to db at once at # the end if self.olm.inbound_group_store.add(session): self.store.save_inbound_group_session(session) @logged_in_async async def room_create( self, visibility: RoomVisibility = RoomVisibility.private, alias: Optional[str] = None, name: Optional[str] = None, topic: Optional[str] = None, room_version: Optional[str] = None, room_type: Optional[str] = None, federate: bool = True, is_direct: bool = False, preset: Optional[RoomPreset] = None, invite: Sequence[str] = (), initial_state: Sequence[Dict[str, Any]] = (), power_level_override: Optional[Dict[str, Any]] = None, predecessor: Optional[Dict[str, Any]] = None, space: bool = False, ) -> Union[RoomCreateResponse, RoomCreateError]: """Create a new room. Calls receive_response() to update the client state if necessary. Returns either a `RoomCreateResponse` if the request was successful or a `RoomCreateError` if there was an error with the request. Args: visibility (RoomVisibility): whether to have the room published in the server's room directory or not. Defaults to ``RoomVisibility.private``. alias (str, optional): The desired canonical alias local part. For example, if set to "foo" and the room is created on the "example.com" server, the room alias will be "#foo:example.com". name (str, optional): A name to set for the room. topic (str, optional): A topic to set for the room. room_version (str, optional): The room version to set. If not specified, the homeserver will use its default setting. If a version not supported by the homeserver is specified, a 400 ``M_UNSUPPORTED_ROOM_VERSION`` error will be returned. room_type (str, optional): The room type to set. If not specified, the homeserver will use its default setting. In spec v1.2 the following room types are specified: - ``m.space`` Unspecified room types are permitted through the use of Namespaced Identifiers. federate (bool): Whether to allow users from other homeservers from joining the room. Defaults to ``True``. Cannot be changed later. is_direct (bool): If this should be considered a direct messaging room. If ``True``, the server will set the ``is_direct`` flag on ``m.room.member events`` sent to the users in ``invite``. Defaults to ``False``. preset (RoomPreset, optional): The selected preset will set various rules for the room. If unspecified, the server will choose a preset from the ``visibility``: ``RoomVisibility.public`` equates to ``RoomPreset.public_chat``, and ``RoomVisibility.private`` equates to a ``RoomPreset.private_chat``. invite (list): A list of user id to invite to the room. initial_state (list): A list of state event dicts to send when the room is created. For example, a room could be made encrypted immediately by having a ``m.room.encryption`` event dict. power_level_override (dict): A ``m.room.power_levels content`` dict to override the default. The dict will be applied on top of the generated ``m.room.power_levels`` event before it is sent to the room. predecessor (dict): A reference to the room this room replaces, if the previous room was upgraded. Containing the event ID of the last known event in the old room. And the ID of the old room. ``event_id``: ``$something:example.org``, ``room_id``: ``!oldroom:example.org`` space (bool): Create as a Space (defaults to False). """ method, path, data = Api.room_create( self.access_token, visibility=visibility, alias=alias, name=name, topic=topic, room_version=room_version, room_type=room_type, federate=federate, is_direct=is_direct, preset=preset, invite=invite, initial_state=initial_state, power_level_override=power_level_override, predecessor=predecessor, space=space, ) return await self._send(RoomCreateResponse, method, path, data) @logged_in_async async def join(self, room_id: str) -> Union[JoinResponse, JoinError]: """Join a room. This tells the server to join the given room. If the room is not public, the user must be invited. Calls receive_response() to update the client state if necessary. Returns either a `JoinResponse` if the request was successful or a `JoinError` if there was an error with the request. Args: room_id: The room id or alias of the room to join. """ method, path, data = Api.join(self.access_token, room_id) return await self._send(JoinResponse, method, path, data) @logged_in_async async def room_knock( self, room_id: str, reason: Optional[str] = None, ) -> Union[RoomKnockResponse, RoomKnockError]: """Knock on a room. Calls receive_response() to update the client state if necessary. Returns either a `RoomKnockResponse` if the request was successful or a `RoomKnockError` if there was an error with the request. Args: room_id (str): The room id of the room that the user is knocking on. reason (str, optional): The reason for the knock. """ method, path, data = Api.room_knock( self.access_token, room_id, reason, ) return await self._send(RoomKnockResponse, method, path, data) @logged_in_async async def room_enable_knocking( self, room_id: str, ) -> Union[RoomPutStateResponse, RoomPutStateError]: """Enables knocking for a room. Returns either a `RoomPutStateResponse` if the request was successful or a `RoomPutStateError` if there was an error with the request. Args: room_id (str): The room id of the room to enable knocking for. """ return await self.room_put_state( room_id, event_type="m.room.join_rules", content={"join_rule": "knock"}, ) @logged_in_async async def room_invite( self, room_id: str, user_id: str, ) -> Union[RoomInviteResponse, RoomInviteError]: """Invite a user to a room. Calls receive_response() to update the client state if necessary. Returns either a `RoomInviteResponse` if the request was successful or a `RoomInviteError` if there was an error with the request. Args: room_id (str): The room id of the room that the user will be invited to. user_id (str): The user id of the user that should be invited. """ method, path, data = Api.room_invite( self.access_token, room_id, user_id, ) return await self._send(RoomInviteResponse, method, path, data) @logged_in_async async def room_leave( self, room_id: str ) -> Union[RoomLeaveResponse, RoomLeaveError]: """Leave a room or reject an invite. This tells the server to leave the given room. If the user was only invited, the invite is rejected. Calls receive_response() to update the client state if necessary. Returns either a `RoomLeaveResponse` if the request was successful or a `RoomLeaveError` if there was an error with the request. Args: room_id: The room id of the room to leave. """ method, path, data = Api.room_leave(self.access_token, room_id) return await self._send(RoomLeaveResponse, method, path, data) @logged_in_async async def room_forget( self, room_id: str ) -> Union[RoomForgetResponse, RoomForgetError]: """Forget a room. This tells the server to forget the given room's history for our user. If all users on a homeserver forget the room, the room will be eligible for deletion from that homeserver. Calls receive_response() to update the client state if necessary. Returns either a `RoomForgetResponse` if the request was successful or a `RoomForgetError` if there was an error with the request. Args: room_id (str): The room id of the room to forget. """ method, path, data = Api.room_forget(self.access_token, room_id) return await self._send( RoomForgetResponse, method, path, data, response_data=(room_id,) ) @logged_in_async async def room_kick( self, room_id: str, user_id: str, reason: Optional[str] = None, ) -> Union[RoomKickResponse, RoomKickError]: """Kick a user from a room, or withdraw their invitation. Kicking a user adjusts their membership to "leave" with an optional reason. Calls receive_response() to update the client state if necessary. Returns either a `RoomKickResponse` if the request was successful or a `RoomKickError` if there was an error with the request. Args: room_id (str): The room id of the room that the user will be kicked from. user_id (str): The user_id of the user that should be kicked. reason (str, optional): A reason for which the user is kicked. """ method, path, data = Api.room_kick( self.access_token, room_id, user_id, reason, ) return await self._send(RoomKickResponse, method, path, data) @logged_in_async async def room_ban( self, room_id: str, user_id: str, reason: Optional[str] = None, ) -> Union[RoomBanResponse, RoomBanError]: """Ban a user from a room. When a user is banned from a room, they may not join it or be invited to it until they are unbanned. If they are currently in the room, they will be kicked or have their invitation withdrawn first. Calls receive_response() to update the client state if necessary. Returns either a `RoomBanResponse` if the request was successful or a `RoomBanError` if there was an error with the request. Args: room_id (str): The room id of the room that the user will be banned from. user_id (str): The user_id of the user that should be banned. reason (str, optional): A reason for which the user is banned. """ method, path, data = Api.room_ban( self.access_token, room_id, user_id, reason, ) return await self._send(RoomBanResponse, method, path, data) @logged_in_async async def room_unban( self, room_id: str, user_id: str, ) -> Union[RoomBanResponse, RoomBanError]: """Unban a user from a room. This allows them to be invited and join the room again. Calls receive_response() to update the client state if necessary. Returns either a `RoomUnbanResponse` if the request was successful or a `RoomUnbanError` if there was an error with the request. Args: room_id (str): The room id of the room that the user will be unbanned from. user_id (str): The user_id of the user that should be unbanned. """ method, path, data = Api.room_unban( self.access_token, room_id, user_id, ) return await self._send(RoomUnbanResponse, method, path, data) @logged_in_async async def room_context( self, room_id: str, event_id: str, limit: Optional[int] = None, ) -> Union[RoomContextResponse, RoomContextError]: """Fetch a number of events that happened before and after an event. This allows clients to get the context surrounding an event. Calls receive_response() to update the client state if necessary. Returns either a `RoomContextResponse` if the request was successful or a `RoomContextError` if there was an error with the request. Args: room_id (str): The room id of the room that contains the event and its context. event_id (str): The event_id of the event that we wish to get the context for. limit(int, optional): The maximum number of events to request. """ method, path = Api.room_context(self.access_token, room_id, event_id, limit) return await self._send( RoomContextResponse, method, path, response_data=(room_id,) ) @logged_in_async async def room_messages( self, room_id: str, start: str, end: Optional[str] = None, direction: MessageDirection = MessageDirection.back, limit: int = 10, message_filter: Optional[Dict[Any, Any]] = None, ) -> Union[RoomMessagesResponse, RoomMessagesError]: """Fetch a list of message and state events for a room. It uses pagination query parameters to paginate history in the room. Calls receive_response() to update the client state if necessary. Returns either a `RoomMessagesResponse` if the request was successful or a `RoomMessagesResponse` if there was an error with the request. Args: room_id (str): The room id of the room for which we would like to fetch the messages. start (str): The token to start returning events from. This token can be obtained from a prev_batch token returned for each room by the sync API, or from a start or end token returned by a previous request to this endpoint. end (str, optional): The token to stop returning events at. This token can be obtained from a prev_batch token returned for each room by the sync endpoint, or from a start or end token returned by a previous request to this endpoint. direction (MessageDirection, optional): The direction to return events from. Defaults to MessageDirection.back. limit (int, optional): The maximum number of events to return. Defaults to 10. message_filter (Optional[Dict[Any, Any]]): A filter dict that should be used for this room messages request. Example: >>> response = await client.room_messages(room_id, previous_batch) >>> next_response = await client.room_messages(room_id, ... response.end) """ method, path = Api.room_messages( self.access_token, room_id, start, end=end, direction=direction, limit=limit, message_filter=message_filter, ) return await self._send( RoomMessagesResponse, method, path, response_data=(room_id,) ) @logged_in_async async def room_typing( self, room_id: str, typing_state: bool = True, timeout: int = 30000, ) -> Union[RoomTypingResponse, RoomTypingError]: """Send a typing notice to the server. This tells the server that the user is typing for the next N milliseconds or that the user has stopped typing. Calls receive_response() to update the client state if necessary. Returns either a `RoomTypingResponse` if the request was successful or a `RoomTypingError` if there was an error with the request. Args: room_id (str): The room id of the room where the user is typing. typing_state (bool): A flag representing whether the user started or stopped typing. timeout (int): For how long should the new typing notice be valid for in milliseconds. """ method, path, data = Api.room_typing( self.access_token, room_id, self.user_id, typing_state, timeout ) return await self._send( RoomTypingResponse, method, path, data, response_data=(room_id,) ) @logged_in_async async def update_receipt_marker( self, room_id: str, event_id: str, receipt_type: str = "m.read", ) -> None: """Update the marker of given the `receipt_type` to specified `event_id`. Calls receive_response() to update the client state if necessary. Returns either a `UpdateReceiptMarkerResponse` if the request was successful or a `UpdateReceiptMarkerError` if there was an error with the request. Args: room_id (str): Room id of the room where the marker should be updated event_id (str): The event ID the read marker should be located at receipt_type (str): The type of receipt to send. Currently, only `m.read` is supported by the Matrix specification. """ method, path = Api.update_receipt_marker( self.access_token, room_id, event_id, receipt_type, ) return await self._send( UpdateReceiptMarkerResponse, method, path, "{}", ) @logged_in_async async def room_read_markers( self, room_id: str, fully_read_event: str, read_event: Optional[str] = None ): """Update the fully read marker (and optionally the read receipt) for a room. Calls receive_response() to update the client state if necessary. Returns either a `RoomReadMarkersResponse` if the request was successful or a `RoomReadMarkersError` if there was an error with the request. This sets the position of the read markers. - `fully_read_event` is the latest event in the set of events that the user has either fully read or indicated they aren't interested in. It permits the implementation of a "jump to first unread message" kind of feature. It is _private_ (not exposed to other room participants). - `read_event` is the most recent message the user has read and is also known as a _read receipt_. A read receipt being set on an event does not imply that all previous events have been seen. This happens in cases such as when a user comes back to a room after hundreds of messages have been sent and _only_ reads the most recent message. The read receipt is _public_ (exposed to other room participants). If you want to set the read receipt, you _must_ set `read_event`. Args: room_id (str): The room ID of the room where the read markers should be updated. fully_read_event (str): The event ID that the user has fully read up to. read_event (Optional[str]): The event ID to set the read receipt location at. """ method, path, data = Api.room_read_markers( self.access_token, room_id, fully_read_event, read_event ) return await self._send( RoomReadMarkersResponse, method, path, data, response_data=(room_id,) ) @logged_in_async async def content_repository_config( self, ) -> Union[ContentRepositoryConfigResponse, ContentRepositoryConfigError]: """Get the content repository configuration, such as upload limits. Calls receive_response() to update the client state if necessary. Returns either a `ContentRepositoryConfigResponse` if the request was successful or a `ContentRepositoryConfigError` if there was an error with the request. """ method, path = Api.content_repository_config(self.access_token) return await self._send(ContentRepositoryConfigResponse, method, path) @staticmethod async def _process_data_chunk(chunk, monitor=None): if monitor and monitor.cancel: raise TransferCancelledError while monitor and monitor.pause: await asyncio.sleep(0.1) return chunk async def _plain_data_generator(self, data, monitor=None): """Yield chunks of bytes from data. If a monitor is passed, update its ``transferred`` property and suspend yielding chunks while its ``pause`` attribute is ``True``. Raise ``TransferCancelledError`` if ``monitor.cancel`` is ``True``. """ async for value in async_generator_from_data(data): yield await self._process_data_chunk(value, monitor) async def _encrypted_data_generator( self, data, decryption_dict, monitor=None, ): """Yield encrypted chunks of bytes from data. If a monitor is passed, update its ``transferred`` property and suspend yielding chunks while its ``pause`` attribute is ``True``. The last yielded value will be the decryption dict. Raise ``TransferCancelledError`` if ``monitor.cancel`` is ``True``. """ async for value in async_encrypt_attachment(data): if isinstance(value, dict): # last yielded value decryption_dict.update(value) else: yield await self._process_data_chunk(value, monitor) @logged_in_async async def upload( self, data_provider: Union[DataProvider, SynchronousFileType, AsyncFileType], content_type: str = "application/octet-stream", filename: Optional[str] = None, encrypt: bool = False, monitor: Optional[TransferMonitor] = None, filesize: Optional[int] = None, ) -> Tuple[Union[UploadResponse, UploadError], Optional[Dict[str, Any]]]: """Upload a file to the content repository. This method ignores `AsyncClient.config.request_timeout` and uses `0`. Calls receive_response() to update the client state if necessary. Returns a tuple containing: - Either a `UploadResponse` if the request was successful, or a `UploadError` if there was an error with the request - A dict with file decryption info if encrypt is ``True``, else ``None``. Raises a ``TransferCancelledError`` if a monitor is passed and its ``cancelled`` property becomes set to ``True``. Args: data_provider (Callable, SynchronousFile, AsyncFile): A function returning the data to upload or a file object. File objects must be opened in binary mode (``mode="r+b"``). Callables returning a path string, Path, async iterable or aiofiles open binary file object allow the file data to be read in an asynchronous and lazy way (without reading the entire file into memory). Returning a synchronous iterable or standard open binary file object will still allow the data to be read lazily, but not asynchronously. The function will be called again if the upload fails due to a server timeout, in which case it must restart from the beginning. Callables receive two arguments: the total number of 429 "Too many request" errors that occurred, and the total number of server timeout exceptions that occurred, thus cleanup operations can be performed for retries if necessary. content_type (str): The content MIME type of the file, e.g. "image/png". Defaults to "application/octet-stream", corresponding to a generic binary file. Custom values are ignored if encrypt is ``True``. filename (str, optional): The file's original name. encrypt (bool): If the file's content should be encrypted, necessary for files that will be sent to encrypted rooms. Defaults to ``False``. monitor (TransferMonitor, optional): If a ``TransferMonitor`` object is passed, it will be updated by this function while uploading. From this object, statistics such as currently transferred bytes or estimated remaining time can be gathered while the upload is running as a task; it also allows for pausing and cancelling. filesize (int, optional): Size in bytes for the file to transfer. If left as ``None``, some servers might refuse the upload. It's common to use this alongside :py:meth:`room_send`. An example of uploading a plain text file follows, but the principle is the same for media, you just need to add an additional "info" key to the content. See `the Matrix client-server spec `_ for more details. Example: >>> file_stat = await aiofiles.os.stat("sample.py") >>> async with aiofiles.open("sample.py", "r+b") as f: >>> resp, maybe_keys = await client.upload( ... f, ... content_type="text/plain", ... filename="hello.py", ... filesize=file_stat.st_size() ... ) >>> await client.room_send( ... room_id="!myfaveroom:example.org", ... message_type="m.room.message", ... content = { ... "msgtype": "m.file", ... "url": resp.content_uri, ... "body": "descriptive title (like the filename)" ... } ... ) """ http_method, path, _ = Api.upload(self.access_token, filename) decryption_dict: Dict[str, Any] = {} initial_file_pos = 0 async def provider(got_429, got_timeouts): nonlocal initial_file_pos if monitor and (got_429 or got_timeouts): # We have to restart from scratch monitor.transferred = 0 if isinstance(data_provider, Callable): data = data_provider(got_429, got_timeouts) elif isinstance(data_provider, SynchronousFile): if got_429 or got_timeouts: data_provider.seek(initial_file_pos) else: initial_file_pos = data_provider.tell() data = data_provider elif isinstance(data_provider, AsyncFile): if got_429 or got_timeouts: await data_provider.seek(initial_file_pos) else: initial_file_pos = await data_provider.tell() data = data_provider else: raise TypeError( f"data_provider type {type(data_provider)} " "is not of a usable type " f"(Callable, {SynchronousFile}, {AsyncFile})" ) if encrypt: return self._encrypted_data_generator( data, decryption_dict, monitor, ) return self._plain_data_generator(data, monitor) response = await self._send( UploadResponse, http_method, path, data_provider=provider, content_type="application/octet-stream" if encrypt else content_type, trace_context=monitor, timeout=0, content_length=filesize, ) # After the upload finished and we get the response above, if encrypt # is True, decryption_dict will have been updated from inside the # self._encrypted_data_generator(). return (response, decryption_dict if encrypt else None) @client_session async def download( self, mxc: Optional[str] = None, filename: Optional[str] = None, allow_remote: bool = True, server_name: Optional[str] = None, media_id: Optional[str] = None, save_to: Optional[os.PathLike] = None, ) -> Union[DiskDownloadResponse, MemoryDownloadResponse, DownloadError]: """Get the content of a file from the content repository. This method ignores `AsyncClient.config.request_timeout` and uses `0`. Calls receive_response() to update the client state if necessary. Returns either a `MemoryDownloadResponse` or `DiskDownloadResponse` if the request was successful or a `DownloadError` if there was an error with the request. The parameters `server_name` and `media_id` are deprecated and will be removed in a future release. Use `mxc` instead. Args: mxc (str, optional): The mxc:// URI. filename (str, optional): A filename to be returned in the response by the server. If None (default), the original name of the file will be returned instead, if there is one. allow_remote (bool): Indicates to the server that it should not attempt to fetch the media if it is deemed remote. This is to prevent routing loops where the server contacts itself. server_name (str, optional): [deprecated] The server name from the mxc:// URI. media_id (str, optional): [deprecated] The media ID from the mxc:// URI. save_to (PathLike, optional): If set, the downloaded file will be saved to this path, instead of being saved in-memory. """ # TODO: support TransferMonitor if mxc is None: if server_name is None or media_id is None: # Too few parameters are passed. raise TypeError( "Either `mxc` or both the `server_name` and `media_id` are required" ) if server_name is not None or media_id is not None: # Deprecated parameters are passed. warnings.warn( "The parameters `server_name` and `media_id` are deprecated " "and will be removed in a future release. Use `mxc` instead", DeprecationWarning, ) else: if server_name is not None or media_id is not None: # Potentially clashing parameters are passed. raise TypeError( "The parameters `server_name` and `media_id` are deprecated " "and will be removed in a future release. Use `mxc` instead" ) else: # `mxc` is passed; expected behavior url = urlparse(mxc) server_name = url.netloc media_id = url.path.replace("/", "") http_method, path = Api.download( server_name, media_id, filename, allow_remote, ) response_class = MemoryDownloadResponse if save_to is not None: response_class = DiskDownloadResponse return await self._send( response_class, http_method, path, timeout=0, save_to=save_to, ) @client_session async def thumbnail( self, server_name: str, media_id: str, width: int, height: int, method: ResizingMethod = ResizingMethod.scale, allow_remote: bool = True, ) -> Union[ThumbnailResponse, ThumbnailError]: """Get the thumbnail of a file from the content repository. The actual thumbnail may be larger than the size specified. This method ignores `AsyncClient.config.request_timeout` and uses `0`. Calls receive_response() to update the client state if necessary. Returns either a `ThumbnailResponse` if the request was successful or a `ThumbnailError` if there was an error with the request. Args: server_name (str): The server name from the mxc:// URI. media_id (str): The media ID from the mxc:// URI. width (int): The desired width of the thumbnail. height (int): The desired height of the thumbnail. method (ResizingMethod): The desired resizing method. allow_remote (bool): Indicates to the server that it should not attempt to fetch the media if it is deemed remote. This is to prevent routing loops where the server contacts itself. """ http_method, path = Api.thumbnail( server_name, media_id, width, height, method, allow_remote ) return await self._send( ThumbnailResponse, http_method, path, timeout=0, ) @client_session async def get_profile( self, user_id: Optional[str] = None ) -> Union[ProfileGetResponse, ProfileGetError]: """Get a user's combined profile information. This queries the display name and avatar matrix content URI of a user from the server. Additional profile information may be present. The currently logged in user is queried if no user is specified. Calls receive_response() to update the client state if necessary. Returns either a `ProfileGetResponse` if the request was successful or a `ProfileGetError` if there was an error with the request. Args: user_id (str): User id of the user to get the profile for. """ method, path = Api.profile_get( user_id or self.user_id, access_token=self.access_token or None ) return await self._send( ProfileGetResponse, method, path, ) @client_session async def get_presence( self, user_id: str ) -> Union[PresenceGetResponse, PresenceGetError]: """Get a user's presence state. This queries the presence state of a user from the server. Calls receive_response() to update the client state if necessary. Returns either a `PresenceGetResponse` if the request was successful or a `PresenceGetError` if there was an error with the request. Args: user_id (str): User id of the user to get the presence state for. """ method, path = Api.get_presence(self.access_token, user_id) return await self._send( PresenceGetResponse, method, path, response_data=(user_id,) ) @client_session async def set_presence( self, presence: str, status_msg: Optional[str] = None ) -> Union[PresenceSetResponse, PresenceSetError]: """Set our user's presence state. This tells the server to set presence state of the currently logged in user to the supplied string. Calls receive_response() to update the client state if necessary. Returns either a `PresenceSetResponse` if the request was successful or a `PresenceSetError` if there was an error with the request. Args: presence (str): The new presence state. One of: ["online", "offline", "unavailable"] status_msg (str, optional): The status message to attach to this state. """ method, path, data = Api.set_presence( self.access_token, self.user_id, presence, status_msg ) resp = await self._send(PresenceSetResponse, method, path, data) if isinstance(resp, PresenceSetResponse): self._presence = presence return resp @client_session async def get_displayname( self, user_id: Optional[str] = None ) -> _ProfileGetDisplayNameT: """Get a user's display name. This queries the display name of a user from the server. The currently logged in user is queried if no user is specified. Calls receive_response() to update the client state if necessary. Returns either a `ProfileGetDisplayNameResponse` if the request was successful or a `ProfileGetDisplayNameError` if there was an error with the request. Args: user_id (str): User id of the user to get the display name for. """ method, path = Api.profile_get_displayname( user_id or self.user_id, access_token=self.access_token or None ) return await self._send( ProfileGetDisplayNameResponse, method, path, ) @logged_in_async async def set_displayname(self, displayname: str) -> _ProfileSetDisplayNameT: """Set user's display name. This tells the server to set display name of the currently logged in user to the supplied string. Calls receive_response() to update the client state if necessary. Returns either a `ProfileSetDisplayNameResponse` if the request was successful or a `ProfileSetDisplayNameError` if there was an error with the request. Args: displayname (str): Display name to set. """ method, path, data = Api.profile_set_displayname( self.access_token, self.user_id, displayname ) return await self._send( ProfileSetDisplayNameResponse, method, path, data, ) @client_session async def get_avatar( self, user_id: Optional[str] = None ) -> Union[ProfileGetAvatarResponse, ProfileGetAvatarError]: """Get a user's avatar URL. This queries the avatar matrix content URI of a user from the server. The currently logged in user is queried if no user is specified. Calls receive_response() to update the client state if necessary. Returns either a `ProfileGetAvatarResponse` if the request was successful or a `ProfileGetAvatarError` if there was an error with the request. Args: user_id (str): User id of the user to get the avatar for. """ method, path = Api.profile_get_avatar( user_id or self.user_id, access_token=self.access_token or None ) return await self._send( ProfileGetAvatarResponse, method, path, ) @logged_in_async async def set_avatar( self, avatar_url: str ) -> Union[ProfileSetAvatarResponse, ProfileSetAvatarError]: """Set the user's avatar URL. This tells the server to set the avatar of the currently logged in user to supplied matrix content URI. Calls receive_response() to update the client state if necessary. Returns either a `ProfileSetAvatarResponse` if the request was successful or a `ProfileSetAvatarError` if there was an error with the request. Args: avatar_url (str): matrix content URI of the avatar to set. """ method, path, data = Api.profile_set_avatar( self.access_token, self.user_id, avatar_url ) return await self._send( ProfileSetAvatarResponse, method, path, data, ) @logged_in_async async def get_openid_token( self, user_id: str ) -> Union[GetOpenIDTokenResponse, GetOpenIDTokenError]: """Gets an OpenID token object that the requester may supply to another service to verify their identity in matrix. Returns either a `GetOpenIDTokenResponse` if the request was successful or a `GetOpenIDTokenError` if there was an error with the request. Args: user_id (str): The user who requested the OpenID token """ method, path, data = Api.get_openid_token(self.access_token, user_id) return await self._send(GetOpenIDTokenResponse, method, path, data) @logged_in_async async def upload_filter( self, user_id: Optional[str] = None, event_fields: Optional[List[str]] = None, event_format: EventFormat = EventFormat.client, presence: Optional[Dict[str, Any]] = None, account_data: Optional[Dict[str, Any]] = None, room: Optional[Dict[str, Any]] = None, ) -> Union[UploadFilterResponse, UploadFilterError]: """Upload a new filter definition to the homeserver. Returns either a `UploadFilterResponse` if the request was successful or a `UploadFilterError` if there was an error with the request. The filter ID from the successful responses can be used for the ``AsyncClient.sync()``, ``AsyncClient.sync_forever()`` and ``AsyncClient.room_messages()`` methods. Args: user_id (Optional[str]): ID of the user uploading the filter. If not provider, the current logged in user's ID is used. event_fields (Optional[List[str]]): List of event fields to include. If this list is absent then all fields are included. The entries may include '.' characters to indicate sub-fields. A literal '.' character in a field name may be escaped using a '\'. event_format (EventFormat): The format to use for events. presence (Dict[str, Any]): The presence updates to include. The dict corresponds to the `EventFilter` type described in https://matrix.org/docs/spec/client_server/latest#id240 account_data (Dict[str, Any]): The user account data that isn't associated with rooms to include. The dict corresponds to the `EventFilter` type described in https://matrix.org/docs/spec/client_server/latest#id240 room (Dict[str, Any]): Filters to be applied to room data. The dict corresponds to the `RoomFilter` type described in https://matrix.org/docs/spec/client_server/latest#id240 """ method, path, data = Api.upload_filter( self.access_token, user_id or self.user_id, event_fields, event_format, presence, account_data, room, ) return await self._send(UploadFilterResponse, method, path, data) async def whoami(self) -> Union[WhoamiResponse, WhoamiError]: """Get information about the logged-in user from the homeserver. Returns either a `WhoamiResponse` if the request was successful or a `WhoamiError` if there was an error with the request. On a successful response, the client's state will be updated with the user_id and device_id returned, if different from the current state. """ if self.access_token is None: raise ValueError("No access_token is set.") method, path = Api.whoami(self.access_token) return await self._send(WhoamiResponse, method, path) @logged_in_async async def set_pushrule( self, scope: str, kind: PushRuleKind, rule_id: str, before: Optional[str] = None, after: Optional[str] = None, actions: Sequence[PushAction] = (), conditions: Optional[Sequence[PushCondition]] = None, pattern: Optional[str] = None, ) -> Union[SetPushRuleResponse, SetPushRuleError]: """Create or modify an existing push rule. Returns either a `SetPushRuleResponse` if the request was successful or a `SetPushRuleError` if there was an error with the request. Args: scope (str): The scope of this rule, e.g. ``"global"``. Homeservers currently only process ``global`` rules for event matching, while ``device`` rules are a planned feature. It is up to clients to interpret any other scope name. kind (PushRuleKind): The kind of rule. rule_id (str): The identifier of the rule. Must be unique within its scope and kind. For rules of ``room`` kind, this is the room ID to match for. For rules of ``sender`` kind, this is the user ID to match. before (Optional[str]): Position this rule before the one matching the given rule ID. The rule ID cannot belong to a predefined server rule. ``before`` and ``after`` cannot be both specified. after (Optional[str]): Position this rule after the one matching the given rule ID. The rule ID cannot belong to a predefined server rule. ``before`` and ``after`` cannot be both specified. actions (Sequence[PushAction]): Actions to perform when the conditions for this rule are met. The given actions replace the existing ones. conditions (Sequence[PushCondition]): Event conditions that must hold true for the rule to apply to that event. A rule with no conditions always hold true. Only applicable to ``underride`` and ``override`` rules. pattern (Optional[str]): Glob-style pattern to match against for the event's content. Only applicable to ``content`` rules. Example: >>> client.set_pushrule( ... scope = "global", ... kind = PushRuleKind.room, ... rule_id = "!foo123:example.org", ... actions = [PushNotify(), PushSetTweak("sound", "default")], ... ) ... ... client.set_pushrule( ... scope = "global", ... kind = PushRuleKind.override, ... rule_id = "silence_large_rooms", ... actions = [], ... conditions = [PushRoomMemberCount(10, ">")], ... ) ... ... client.set_pushrule( ... scope = "global", ... kind = PushRuleKind.content, ... rule_id = "highlight_messages_containing_nio_word", ... actions = [PushNotify(), PushSetTweak("highlight", True)], ... pattern = "nio" ... ) """ method, path, data = Api.set_pushrule( self.access_token, scope, kind, rule_id, before, after, actions, conditions, pattern, ) return await self._send(SetPushRuleResponse, method, path, data) @logged_in_async async def delete_pushrule( self, scope: str, kind: PushRuleKind, rule_id: str, ) -> Union[DeletePushRuleResponse, DeletePushRuleError]: """Delete an existing push rule. Returns either a `DeletePushRuleResponse` if the request was successful or a `DeletePushRuleError` if there was an error with the request. Args: scope (str): The scope of this rule, e.g. ``"global"``. Homeservers currently only process ``global`` rules for event matching, while ``device`` rules are a planned feature. It is up to clients to interpret any other scope name. kind (PushRuleKind): The kind of rule. rule_id (str): The identifier of the rule. Must be unique within its scope and kind. """ method, path = Api.delete_pushrule( self.access_token, scope, kind, rule_id, ) return await self._send(DeletePushRuleResponse, method, path) @logged_in_async async def enable_pushrule( self, scope: str, kind: PushRuleKind, rule_id: str, enable: bool, ) -> Union[EnablePushRuleResponse, EnablePushRuleError]: """Enable or disable an existing push rule. Returns either a `EnablePushRuleResponse` if the request was successful or a `EnablePushRuleError` if there was an error with the request. Args: scope (str): The scope of this rule, e.g. ``"global"``. Homeservers currently only process ``global`` rules for event matching, while ``device`` rules are a planned feature. It is up to clients to interpret any other scope name. kind (PushRuleKind): The kind of rule. rule_id (str): The identifier of the rule. Must be unique within its scope and kind. enable (bool): Whether to enable or disable this rule. """ method, path, data = Api.enable_pushrule( self.access_token, scope, kind, rule_id, enable, ) return await self._send(EnablePushRuleResponse, method, path, data) @logged_in_async async def set_pushrule_actions( self, scope: str, kind: PushRuleKind, rule_id: str, actions: Sequence[PushAction], ) -> Union[SetPushRuleActionsResponse, SetPushRuleActionsError]: """Set the actions for an existing built-in or user-created push rule. Unlike ``set_pushrule``, this method can edit built-in server rules. Returns the HTTP method, HTTP path and data for the request. Returns either a `SetPushRuleActionsResponse` if the request was successful or a `SetPushRuleActionsError` if there was an error with the request. Args: scope (str): The scope of this rule, e.g. ``"global"``. Homeservers currently only process ``global`` rules for event matching, while ``device`` rules are a planned feature. It is up to clients to interpret any other scope name. kind (PushRuleKind): The kind of rule. rule_id (str): The identifier of the rule. Must be unique within its scope and kind. actions (Sequence[PushAction]): Actions to perform when the conditions for this rule are met. The given actions replace the existing ones. """ method, path, data = Api.set_pushrule_actions( self.access_token, scope, kind, rule_id, actions, ) return await self._send(SetPushRuleActionsResponse, method, path, data) @logged_in_async async def room_update_aliases( self, room_id: str, canonical_alias: Union[str, None] = None, alt_aliases: Optional[List[str]] = None, ): """Update the aliases of an existing room. This method will not transfer aliases from one room to another! Remove the old alias before trying to assign it again Args: room_id (str): Room-ID of the room to assign / remove aliases from canonical_alias (str, None): The main alias of the room alt_aliases (list[str], None): List of alternative aliases for the room If None is passed as canonical_alias or alt_aliases the existing aliases will be removed without assigning new aliases. """ alt_aliases = alt_aliases or [] # Concentrate new aliases if canonical_alias is None: new_aliases = [] else: new_aliases = alt_aliases + [canonical_alias] # Get current aliases current_aliases = [] current_alias_event = await self.room_get_state_event( room_id, "m.room.canonical_alias" ) if isinstance(current_alias_event, RoomGetStateEventResponse): current_aliases.append(current_alias_event.content["alias"]) if "alt_aliases" in current_alias_event.content: alt_aliases = current_alias_event.content["alt_aliases"] current_aliases.extend(alt_aliases) # Unregister old aliases for alias in current_aliases: if alias not in new_aliases: if isinstance( await self.room_delete_alias(alias), RoomDeleteAliasError ): return RoomUpdateAliasError(f"Could not delete alias {alias}") # Register new aliases for alias in new_aliases: if isinstance( await self.room_put_alias(alias, room_id), RoomDeleteAliasError ): return RoomUpdateAliasError(f"Could not put alias {alias}") # Send m.room.canonical_alias event put_alias_event = await self.room_put_state( room_id, "m.room.canonical_alias", {"alias": canonical_alias, "alt_aliases": alt_aliases}, ) if isinstance(put_alias_event, RoomPutStateError): return RoomUpdateAliasError("Failed to put m.room.canonical_alias") return RoomUpdateAliasResponse() @logged_in_async async def room_upgrade( self, old_room_id: str, new_room_version: str, copy_events: list = [ "m.room.server_acl", "m.room.encryption", "m.room.name", "m.room.avatar", "m.room.topic", "m.room.guest_access", "m.room.history_visibility", "m.room.join_rules", "m.room.power_levels", ], room_upgrade_message: str = "This room has been replaced", room_power_level_overwrite: Optional[Dict[str, Any]] = None, ) -> Union[RoomUpgradeResponse, RoomUpgradeError]: """Upgrade an existing room. Args: old_room_id (str): Room-ID of the old room new_room_version (str): The new room version copy_events (list): List of state-events to copy from the old room Defaults m.room.server_acl, m.room.encryption, m.room.name, m.room.avatar, m.room.topic, m.room.guest_access, m.room.history_visibility, m.room.join_rules, m.room.power_levels room_upgrade_message (str): Message inside the tombstone-event room_power_level_overwrite (dict): A ``m.room.power_levels content`` dict to override the default. The dict will be applied on top of the generated ``m.room.power_levels`` event before it is sent to the room. """ # Check if we are allowed to tombstone a room if not await self.has_event_permission(old_room_id, "m.room.tombstone"): return RoomUpgradeError("Not allowed to upgrade room") # Get state events for the old room old_room_state_events = await self.room_get_state(old_room_id) if isinstance(old_room_state_events, RoomGetStateError): return RoomUpgradeError("Failed to get room events") # Get initial_state and power_level old_room_power_levels = None new_room_initial_state = [] for event in old_room_state_events.events: if ( event["type"] in copy_events and not event["type"] == "m.room.power_levels" ): new_room_initial_state.append(event) if event["type"] == "m.room.power_levels": old_room_power_levels = event["content"] # Get last known event from the old room old_room_event = await self.room_messages( start="", room_id=old_room_id, limit=1 ) if isinstance(old_room_event, RoomMessagesError): return RoomUpgradeError("Failed to get last known event") old_room_last_event = old_room_event.chunk[0] # Overwrite power level if a new power level was passed if room_power_level_overwrite is not None: old_room_power_levels = room_power_level_overwrite # Create new room new_room = await self.room_create( room_version=new_room_version, power_level_override=old_room_power_levels, initial_state=new_room_initial_state, predecessor={ "event_id": old_room_last_event.event_id, "room_id": old_room_id, }, ) if isinstance(new_room, RoomCreateError): return RoomUpgradeError("Room creation failed") # Send tombstone event to the old room old_room_tombstone = await self.room_put_state( old_room_id, "m.room.tombstone", {"body": room_upgrade_message, "replacement_room": new_room.room_id}, ) if isinstance(old_room_tombstone, RoomPutStateError): return RoomUpgradeError("Failed to put m.room.tombstone") # Get the old rooms aliases old_room_alias = await self.room_get_state_event( old_room_id, "m.room.canonical_alias" ) if isinstance(old_room_alias, RoomGetStateEventResponse): aliases = [old_room_alias.content["alias"]] if "alt_aliases" in old_room_alias.content: alt_aliases = old_room_alias.content["alt_aliases"] aliases.extend(alt_aliases) else: alt_aliases = [] # Remove the old aliases if isinstance( await self.room_update_aliases(old_room_id), RoomDeleteAliasError ): return RoomUpgradeError("Could update the old rooms aliases") # Assign new aliases if isinstance( await self.room_update_aliases( new_room.room_id, canonical_alias=old_room_alias.content["alias"], alt_aliases=alt_aliases, ), RoomDeleteAliasError, ): return RoomUpgradeError("Could update the new rooms aliases") return RoomUpgradeResponse(new_room.room_id) @logged_in_async async def update_room_topic( self, room_id: str, topic: str, ) -> Union[RoomPutStateResponse, RoomPutStateError]: """Update the room topic Returns either a `RoomPutStateResponse` if the request was successful or a `RoomPutStateError` if there was an error with the request. If you wish to send a `state_key` along with the request, use the `room_put_state` method instead. Args: room_id (str): The room id of the room to be updated. topic (str): The new room topic. """ return await self.room_put_state( room_id, event_type="m.room.topic", content={"topic": topic}, ) @logged_in_async async def has_event_permission( self, room_id: str, event_name: str, event_type: str = "event" ) -> Union[bool, ErrorResponse]: who_am_i = await self.whoami() power_levels = await self.room_get_state_event(room_id, "m.room.power_levels") try: user_power_level = power_levels.content["users"][who_am_i.user_id] except KeyError: user_power_level = power_levels.content["users_default"] else: return ErrorResponse("Couldn't get user power levels") try: event_power_level = power_levels.content["events"][event_name] except KeyError: if event_type == "event": event_power_level = power_levels.content["events_default"] elif event_type == "state": event_power_level = power_levels.content["state_default"] else: return ErrorResponse(f"event_type {event_type} unknown") else: return ErrorResponse("Couldn't get event power levels") return user_power_level >= event_power_level async def has_permission( self, room_id: str, permission_type: str ) -> Union[bool, ErrorResponse]: who_am_i = await self.whoami() power_levels = await self.room_get_state_event(room_id, "m.room.power_levels") try: user_power_level = power_levels.content["users"][who_am_i.user_id] except KeyError: user_power_level = power_levels.content["users_default"] else: return ErrorResponse("Couldn't get user power levels") try: permission_power_level = power_levels.content[permission_type] except KeyError: return ErrorResponse(f"permission_type {permission_type} unknown") return user_power_level >= permission_power_level matrix-nio-0.24.0/nio/client/base_client.py000066400000000000000000001464631455215747700206100ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations import inspect import logging from collections import defaultdict from dataclasses import dataclass, field from functools import wraps from typing import ( TYPE_CHECKING, Any, Awaitable, Callable, Coroutine, Dict, List, Optional, Set, Tuple, Type, Union, ) from ..crypto import ENCRYPTION_ENABLED, DeviceStore, OutgoingKeyRequest from ..events import ( AccountDataEvent, BadEvent, BadEventType, EphemeralEvent, Event, MegolmEvent, PresenceEvent, RoomEncryptionEvent, RoomKeyRequest, RoomKeyRequestCancellation, RoomMemberEvent, ToDeviceEvent, UnknownBadEvent, ) from ..exceptions import EncryptionError, LocalProtocolError, MembersSyncError from ..responses import ( ErrorResponse, JoinedMembersResponse, KeysClaimResponse, KeysQueryResponse, KeysUploadResponse, LoginResponse, LogoutResponse, PresenceGetResponse, RegisterResponse, Response, RoomContextResponse, RoomForgetResponse, RoomGetEventResponse, RoomInfo, RoomKeyRequestResponse, RoomMessagesResponse, ShareGroupSessionResponse, SyncResponse, ToDeviceResponse, WhoamiResponse, ) from ..rooms import MatrixInvitedRoom, MatrixRoom if ENCRYPTION_ENABLED: from ..crypto import Olm from ..store import DefaultStore, MatrixStore, SqliteMemoryStore if TYPE_CHECKING: from ..crypto import OlmDevice, Sas from ..event_builders import ToDeviceMessage logger = logging.getLogger(__name__) def logged_in(func): @wraps(func) def wrapper(self, *args, **kwargs): if not self.logged_in: raise LocalProtocolError("Not logged in.") return func(self, *args, **kwargs) return wrapper def logged_in_async(func): @wraps(func) async def wrapper(self, *args, **kwargs): if not self.logged_in: raise LocalProtocolError("Not logged in.") return await func(self, *args, **kwargs) return wrapper def store_loaded(fn): @wraps(fn) def inner(self, *args, **kwargs): if not self.store or not self.olm: raise LocalProtocolError("Matrix store and olm account is not loaded.") return fn(self, *args, **kwargs) return inner @dataclass class ClientCallback: """nio internal callback class.""" func: Union[Callable[..., None], Callable[..., Awaitable[None]]] = field() filter: Union[Tuple[Type, ...], Type, None] = None async def execute(self, event, room: Optional[MatrixRoom] = None) -> None: if self.filter is None or isinstance(event, self.filter): result = self.func(room, event) if room else self.func(event) if inspect.isawaitable(result): await result @dataclass(frozen=True) class ClientConfig: """nio client configuration. Attributes: store (MatrixStore, optional): The store that should be used for state storage. store_name (str, optional): Filename that should be used for the store. encryption_enabled (bool, optional): Should end to end encryption be used. pickle_key (str, optional): A passphrase that will be used to encrypt end to end encryption keys. store_sync_tokens (bool, optional): Should the client store and restore sync tokens. custom_headers (Dict[str, str]): A dictionary of custom http headers. Raises an ImportWarning if encryption_enabled is true but the dependencies for encryption aren't installed. """ store: Optional[Type[MatrixStore]] = DefaultStore if ENCRYPTION_ENABLED else None encryption_enabled: bool = ENCRYPTION_ENABLED store_name: str = "" pickle_key: str = "DEFAULT_KEY" store_sync_tokens: bool = False custom_headers: Optional[Dict[str, str]] = None def __post_init__(self): if not ENCRYPTION_ENABLED and self.encryption_enabled: raise ImportWarning( "Encryption is enabled in the client " "configuration but dependencies for E2E " "encryption aren't installed." ) class Client: """Matrix no-IO client. Attributes: access_token (str): Token authorizing the user with the server. Is set after logging in. user_id (str): The full mxid of the current user. This is set after logging in. next_batch (str): The current sync token. rooms (Dict[str, MatrixRoom)): A dictionary containing a mapping of room ids to MatrixRoom objects. All the rooms a user is joined to will be here after a sync. invited_rooms (Dict[str, MatrixInvitedRoom)): A dictionary containing a mapping of room ids to MatrixInvitedRoom objects. All the rooms a user is invited to will be here after a sync. Args: user (str): User that will be used to log in. device_id (str, optional): An unique identifier that distinguishes this client instance. If not set the server will provide one after log in. store_dir (str, optional): The directory that should be used for state storage. config (ClientConfig, optional): Configuration for the client. """ def __init__( self, user: str, device_id: Optional[str] = None, store_path: Optional[str] = "", config: Optional[ClientConfig] = None, ): self.user = user self.device_id = device_id self.store_path = store_path self.olm: Optional[Olm] = None self.store: Optional[MatrixStore] = None self.config = config or ClientConfig() self.user_id = "" # TODO Turn this into a optional string. self.access_token: str = "" self.next_batch = "" self.loaded_sync_token = "" self.rooms: Dict[str, MatrixRoom] = {} self.invited_rooms: Dict[str, MatrixInvitedRoom] = {} self.encrypted_rooms: Set[str] = set() self.event_callbacks: List[ClientCallback] = [] self.ephemeral_callbacks: List[ClientCallback] = [] self.to_device_callbacks: List[ClientCallback] = [] self.presence_callbacks: List[ClientCallback] = [] self.global_account_data_callbacks: List[ClientCallback] = [] self.room_account_data_callbacks: List[ClientCallback] = [] @property def logged_in(self) -> bool: """Check if we are logged in. Returns True if the client is logged in to the server, False otherwise. """ return bool(self.access_token) @property # type: ignore @store_loaded def device_store(self) -> DeviceStore: """Store containing known devices. Returns a ``DeviceStore`` holding all known olm devices. """ assert self.olm return self.olm.device_store @property # type: ignore @store_loaded def olm_account_shared(self) -> bool: """Check if the clients Olm account is shared with the server. Returns True if the Olm account is shared, False otherwise. """ assert self.olm return self.olm.account.shared @property def users_for_key_query(self) -> Set[str]: """Users for whom we should make a key query.""" if not self.olm: return set() return self.olm.users_for_key_query @property def should_upload_keys(self) -> bool: """Check if the client should upload encryption keys. Returns True if encryption keys need to be uploaded, false otherwise. """ if not self.olm: return False return self.olm.should_upload_keys @property def should_query_keys(self) -> bool: """Check if the client should make a key query call to the server. Returns True if a key query is necessary, false otherwise. """ if not self.olm: return False return self.olm.should_query_keys @property def should_claim_keys(self) -> bool: """Check if the client should claim one-time keys for some users. This should be periodically checked and if true a keys claim request should be made with the return value of a `get_users_for_key_claiming()` call as the payload. Keys need to be claimed for various reasons. Every time we need to send an encrypted message to a device and we don't have a working Olm session with them we need to claim one-time keys to create a new Olm session. Returns True if a key query is necessary, false otherwise. """ if not self.olm: return False return bool(self.olm.wedged_devices or self.olm.key_request_devices_no_session) @property def outgoing_key_requests(self) -> Dict[str, OutgoingKeyRequest]: """Our active key requests that we made.""" return self.olm.outgoing_key_requests if self.olm else {} @property def key_verifications(self) -> Dict[str, Sas]: """Key verifications that the client is participating in.""" return self.olm.key_verifications if self.olm else {} @property def outgoing_to_device_messages(self) -> List[ToDeviceMessage]: """To-device messages that we need to send out.""" return self.olm.outgoing_to_device_messages if self.olm else [] def get_active_sas(self, user_id: str, device_id: str) -> Optional[Sas]: """Find a non-canceled SAS verification object for the provided user. Args: user_id (str): The user for which we should find a SAS verification object. device_id (str): The device_id for which we should find the SAS verification object. Returns the object if it's found, otherwise None. """ if not self.olm: return None return self.olm.get_active_sas(user_id, device_id) def load_store(self): """Load the session store and olm account. If the SqliteMemoryStore is set as the store a store path isn't required, if no store path is provided and a store class that requires a path is used this method will be a no op. This method does nothing if the store is already loaded. Raises LocalProtocolError if a store class, user_id and device_id are not set. """ if self.store: return if not self.user_id: raise LocalProtocolError("User id is not set") if not self.device_id: raise LocalProtocolError("Device id is not set") if not self.config.store: raise LocalProtocolError("No store class was provided in the config.") if self.config.encryption_enabled: if self.config.store is SqliteMemoryStore: self.store = self.config.store( self.user_id, self.device_id, self.config.pickle_key, ) else: if not self.store_path: return self.store = self.config.store( self.user_id, self.device_id, self.store_path, self.config.pickle_key, self.config.store_name, ) assert self.store self.olm = Olm(self.user_id, self.device_id, self.store) self.encrypted_rooms = self.store.load_encrypted_rooms() if self.config.store_sync_tokens: self.loaded_sync_token = self.store.load_sync_token() def restore_login( self, user_id: str, device_id: str, access_token: str, ): """Restore a previous login to the homeserver. Args: user_id (str): The full mxid of the current user. device_id (str): An unique identifier that distinguishes this client instance. access_token (str): Token authorizing the user with the server. """ self.user_id = user_id self.device_id = device_id self.access_token = access_token if ENCRYPTION_ENABLED: self.load_store() def room_contains_unverified(self, room_id: str) -> bool: """Check if a room contains unverified devices. Args: room_id (str): Room id of the room that should be checked. Returns True if the room contains unverified devices, false otherwise. Returns False if no Olm session is loaded or if the room isn't encrypted. """ try: room = self.rooms[room_id] except KeyError: raise LocalProtocolError(f"No room found with room id {room_id}") if not room.encrypted: return False if not self.olm: return False for user in room.users: if not self.olm.user_fully_verified(user): return True return False def _invalidate_session_for_member_event(self, room_id: str): if not self.olm: return self.invalidate_outbound_session(room_id) @store_loaded def invalidate_outbound_session(self, room_id: str): """Explicitly remove encryption keys for a room. Args: room_id (str): Room id for the room the encryption keys should be removed. """ assert self.olm session = self.olm.outbound_group_sessions.pop(room_id, None) # There is no need to invalidate the session if it was never # shared, put it back where it was. if session and not session.shared: self.olm.outbound_group_sessions[room_id] = session elif session: logger.info(f"Invalidating session for {room_id}") def _invalidate_outbound_sessions(self, device: OlmDevice) -> None: assert self.olm for room in self.rooms.values(): if device.user_id in room.users: self.invalidate_outbound_session(room.room_id) @store_loaded def verify_device(self, device: OlmDevice) -> bool: """Mark a device as verified. A device needs to be either trusted/ignored or blacklisted to either share room encryption keys with it or not. This method adds the device to the trusted devices and enables sharing room encryption keys with it. Args: device (OlmDevice): The device which should be added to the trust list. Returns true if the device was verified, false if it was already verified. """ assert self.olm changed = self.olm.verify_device(device) if changed: self._invalidate_outbound_sessions(device) return changed @store_loaded def unverify_device(self, device: OlmDevice) -> bool: """Unmark a device as verified. This method removes the device from the trusted devices and disables sharing room encryption keys with it. It also invalidates any encryption keys for rooms that the device takes part of. Args: device (OlmDevice): The device which should be added to the trust list. Returns true if the device was unverified, false if it was already unverified. """ assert self.olm changed = self.olm.unverify_device(device) if changed: self._invalidate_outbound_sessions(device) return changed @store_loaded def blacklist_device(self, device: OlmDevice) -> bool: """Mark a device as blacklisted. Devices on the blacklist will not receive room encryption keys and therefore won't be able to decrypt messages coming from this client. Args: device (OlmDevice): The device which should be added to the blacklist. Returns true if the device was added, false if it was on the blacklist already. """ assert self.olm changed = self.olm.blacklist_device(device) if changed: self._invalidate_outbound_sessions(device) return changed @store_loaded def unblacklist_device(self, device: OlmDevice) -> bool: """Unmark a device as blacklisted. Args: device (OlmDevice): The device which should be removed from the blacklist. Returns true if the device was removed, false if it wasn't on the blacklist and no removal happened. """ assert self.olm changed = self.olm.unblacklist_device(device) if changed: self._invalidate_outbound_sessions(device) return changed @store_loaded def ignore_device(self, device: OlmDevice) -> bool: """Mark a device as ignored. Ignored devices will still receive room encryption keys, despire not being verified. Args: device (OlmDevice): the device to ignore Returns true if device is ignored, or false if it is already on the list of ignored devices. """ assert self.olm changed = self.olm.ignore_device(device) if changed: self._invalidate_outbound_sessions(device) return changed @store_loaded def unignore_device(self, device: OlmDevice) -> bool: """Unmark a device as ignored. Args: device (OlmDevice): The device which should be removed from the list of ignored devices. Returns true if the device was removed, false if it wasn't on the list and no removal happened. """ assert self.olm changed = self.olm.unignore_device(device) if changed: self._invalidate_outbound_sessions(device) return changed def _handle_register( self, response: Union[RegisterResponse, ErrorResponse] ) -> None: if isinstance(response, ErrorResponse): return self.restore_login(response.user_id, response.device_id, response.access_token) def _handle_login(self, response: Union[LoginResponse, ErrorResponse]): if isinstance(response, ErrorResponse): return self.restore_login(response.user_id, response.device_id, response.access_token) def _handle_logout(self, response: Union[LogoutResponse, ErrorResponse]): if not isinstance(response, ErrorResponse): self.access_token = "" @store_loaded def decrypt_event(self, event: MegolmEvent) -> Union[Event, BadEventType]: """Try to decrypt an undecrypted megolm event. Args: event (MegolmEvent): Event that should be decrypted. Returns the decrypted event, raises EncryptionError if there was an error while decrypting. """ if not isinstance(event, MegolmEvent): raise ValueError( "Invalid event, this function can only decrypt " "MegolmEvents" ) assert self.olm return self.olm.decrypt_megolm_event(event) def _handle_decrypt_to_device( self, to_device_event: ToDeviceEvent ) -> Optional[ToDeviceEvent]: if self.olm: return self.olm.handle_to_device_event(to_device_event) return None def _replace_decrypted_to_device( self, decrypted_events: List[Tuple[int, ToDeviceEvent]], response: SyncResponse, ): # Replace the encrypted to_device events with decrypted ones for decrypted_event in decrypted_events: index, event = decrypted_event response.to_device_events[index] = event def _run_to_device_callbacks(self, event: ToDeviceEvent): for cb in self.to_device_callbacks: if cb.filter is None or isinstance(event, cb.filter): cb.func(event) def _handle_to_device(self, response: SyncResponse): decrypted_to_device = [] for index, to_device_event in enumerate(response.to_device_events): decrypted_event = self._handle_decrypt_to_device(to_device_event) if decrypted_event: decrypted_to_device.append((index, decrypted_event)) to_device_event = decrypted_event # Do not pass room key request events to our user here. We don't # want to notify them about requests that get automatically handled # or canceled right away. if isinstance( to_device_event, (RoomKeyRequest, RoomKeyRequestCancellation) ): continue self._run_to_device_callbacks(to_device_event) self._replace_decrypted_to_device(decrypted_to_device, response) def _get_invited_room(self, room_id: str) -> MatrixInvitedRoom: if room_id not in self.invited_rooms: logger.info(f"New invited room {room_id}") self.invited_rooms[room_id] = MatrixInvitedRoom(room_id, self.user_id) return self.invited_rooms[room_id] def _handle_invited_rooms(self, response: SyncResponse): for room_id, info in response.rooms.invite.items(): room = self._get_invited_room(room_id) for event in info.invite_state: room.handle_event(event) for cb in self.event_callbacks: if cb.filter is None or isinstance(event, cb.filter): cb.func(room, event) def _handle_joined_state( self, room_id: str, join_info: RoomInfo, encrypted_rooms: Set[str] ): if room_id in self.invited_rooms: del self.invited_rooms[room_id] if room_id not in self.rooms: logger.info(f"New joined room {room_id}") self.rooms[room_id] = MatrixRoom( room_id, self.user_id, room_id in self.encrypted_rooms ) room = self.rooms[room_id] for event in join_info.state: if isinstance(event, RoomEncryptionEvent): encrypted_rooms.add(room_id) if isinstance(event, RoomMemberEvent): if room.handle_membership(event): self._invalidate_session_for_member_event(room_id) else: room.handle_event(event) if join_info.summary: room.update_summary(join_info.summary) if join_info.unread_notifications: room.update_unread_notifications(join_info.unread_notifications) def _handle_timeline_event( self, event: Union[Event, BadEventType], room_id: str, room: MatrixRoom, encrypted_rooms: Set[str], ) -> Optional[Union[Event, BadEventType]]: decrypted_event = None if isinstance(event, MegolmEvent) and self.olm: event.room_id = room_id decrypted_event = self.olm._decrypt_megolm_no_error(event) if decrypted_event: event = decrypted_event elif isinstance(event, RoomEncryptionEvent): encrypted_rooms.add(room_id) if isinstance(event, RoomMemberEvent): if room.handle_membership(event): self._invalidate_session_for_member_event(room_id) elif isinstance(event, (UnknownBadEvent, BadEvent)): pass else: room.handle_event(event) return decrypted_event def _handle_joined_rooms(self, response: SyncResponse): encrypted_rooms: Set[str] = set() for room_id, join_info in response.rooms.join.items(): self._handle_joined_state(room_id, join_info, encrypted_rooms) room = self.rooms[room_id] decrypted_events: List[Tuple[int, Union[Event, BadEventType]]] = [] for index, event in enumerate(join_info.timeline.events): decrypted_event = self._handle_timeline_event( event, room_id, room, encrypted_rooms ) if decrypted_event: event = decrypted_event decrypted_events.append((index, decrypted_event)) for cb in self.event_callbacks: if cb.filter is None or isinstance(event, cb.filter): cb.func(room, event) # Replace the Megolm events with decrypted ones for index, event in decrypted_events: join_info.timeline.events[index] = event for event in join_info.ephemeral: room.handle_ephemeral_event(event) for cb in self.ephemeral_callbacks: if cb.filter is None or isinstance(event, cb.filter): cb.func(room, event) for event in join_info.account_data: room.handle_account_data(event) for cb in self.room_account_data_callbacks: if cb.filter is None or isinstance(event, cb.filter): cb.func(room, event) if room.encrypted and self.olm is not None: self.olm.update_tracked_users(room) self.encrypted_rooms.update(encrypted_rooms) if self.store: self.store.save_encrypted_rooms(encrypted_rooms) def _handle_presence_events(self, response: SyncResponse): for event in response.presence_events: for room_id in self.rooms.keys(): if event.user_id not in self.rooms[room_id].users: continue self.rooms[room_id].users[event.user_id].presence = event.presence self.rooms[room_id].users[ event.user_id ].last_active_ago = event.last_active_ago self.rooms[room_id].users[ event.user_id ].currently_active = event.currently_active self.rooms[room_id].users[event.user_id].status_msg = event.status_msg for cb in self.presence_callbacks: if cb.filter is None or isinstance(event, cb.filter): cb.func(event) def _handle_global_account_data_events( self, response: SyncResponse, ) -> None: for event in response.account_data_events: for cb in self.global_account_data_callbacks: if cb.filter is None or isinstance(event, cb.filter): cb.func(event) def _handle_expired_verifications(self): expired_verifications = self.olm.clear_verifications() for event in expired_verifications: for cb in self.to_device_callbacks: if cb.filter is None or isinstance(event, cb.filter): cb.func(event) def _handle_olm_events(self, response: SyncResponse): assert self.olm changed_users = set() if response.device_key_count.signed_curve25519: self.olm.uploaded_key_count = response.device_key_count.signed_curve25519 for user in response.device_list.changed: for room in self.rooms.values(): if not room.encrypted: continue if user in room.users: changed_users.add(user) for user in response.device_list.left: for room in self.rooms.values(): if not room.encrypted: continue if user in room.users: changed_users.add(user) self.olm.add_changed_users(changed_users) def _handle_sync( self, response: SyncResponse ) -> Union[None, Coroutine[Any, Any, None]]: # We already received such a sync response, do nothing in that case. if self.next_batch == response.next_batch: return None self.next_batch = response.next_batch if self.config.store_sync_tokens and self.store: self.store.save_sync_token(self.next_batch) self._handle_to_device(response) self._handle_invited_rooms(response) self._handle_joined_rooms(response) self._handle_presence_events(response) self._handle_global_account_data_events(response) if self.olm: self._handle_expired_verifications() self._handle_olm_events(response) self._collect_key_requests() return None def _collect_key_requests(self): events = self.olm.collect_key_requests() for event in events: self._run_to_device_callbacks(event) def _decrypt_event_array(self, array: List[Union[Event, BadEventType]]): if not self.olm: return decrypted_events = [] for index, event in enumerate(array): if isinstance(event, MegolmEvent): new_event = self.olm._decrypt_megolm_no_error(event) if new_event: decrypted_events.append((index, new_event)) for decrypted_event in decrypted_events: index, event = decrypted_event array[index] = event def _handle_context_response(self, response: RoomContextResponse): if isinstance(response.event, MegolmEvent): if self.olm: decrypted_event = self.olm._decrypt_megolm_no_error(response.event) response.event = decrypted_event self._decrypt_event_array(response.events_after) self._decrypt_event_array(response.events_before) def _handle_messages_response(self, response: RoomMessagesResponse): decrypted_events = [] for index, event in enumerate(response.chunk): if isinstance(event, MegolmEvent) and self.olm: new_event = self.olm._decrypt_megolm_no_error(event) if new_event: decrypted_events.append((index, new_event)) for index, event in decrypted_events: response.chunk[index] = event def _handle_olm_response( self, response: Union[ ShareGroupSessionResponse, KeysClaimResponse, KeysQueryResponse, KeysUploadResponse, RoomKeyRequestResponse, ToDeviceResponse, ], ): if not self.olm: return self.olm.handle_response(response) if isinstance(response, ShareGroupSessionResponse): room_id = response.room_id session = self.olm.outbound_group_sessions.get(room_id, None) room = self.rooms.get(room_id, None) if not session or not room: return session.users_shared_with.update(response.users_shared_with) users = room.users for user_id in users: for device in self.device_store.active_user_devices(user_id): user = (user_id, device.id) if ( user not in session.users_shared_with and user not in session.users_ignored ): return logger.info(f"Marking outbound group session for room {room_id} as shared") session.shared = True elif isinstance(response, KeysQueryResponse): for user_id in response.changed: for room in self.rooms.values(): if room.encrypted and user_id in room.users: self.invalidate_outbound_session(room.room_id) def _handle_joined_members(self, response: JoinedMembersResponse): if response.room_id not in self.rooms: return room = self.rooms[response.room_id] joined_user_ids = {m.user_id for m in response.members} for user_id in tuple(room.users): invited = room.users[user_id].invited if not invited and user_id not in joined_user_ids: room.remove_member(user_id) for member in response.members: room.add_member(member.user_id, member.display_name, member.avatar_url) room.members_synced = True if room.encrypted and self.olm is not None: self.olm.update_tracked_users(room) def _handle_room_forget_response(self, response: RoomForgetResponse): self.encrypted_rooms.discard(response.room_id) if response.room_id in self.rooms: room = self.rooms.pop(response.room_id) if room.encrypted and self.store: self.store.delete_encrypted_room(room.room_id) elif response.room_id in self.invited_rooms: del self.invited_rooms[response.room_id] def _handle_presence_response(self, response: PresenceGetResponse): for room_id in self.rooms.keys(): if response.user_id not in self.rooms[room_id].users: continue self.rooms[room_id].users[response.user_id].presence = response.presence self.rooms[room_id].users[ response.user_id ].last_active_ago = response.last_active_ago self.rooms[room_id].users[response.user_id].currently_active = ( response.currently_active or False ) self.rooms[room_id].users[response.user_id].status_msg = response.status_msg def _handle_whoami_response(self, response: WhoamiResponse): self.user_id = response.user_id self.device_id = response.device_id or self.device_id # self.is_guest = response.is_guest def receive_response( self, response: Response ) -> Union[None, Coroutine[Any, Any, None]]: """Receive a Matrix Response and change the client state accordingly. Some responses will get edited for the callers convenience e.g. sync responses that contain encrypted messages. The encrypted messages will be replaced by decrypted ones if decryption is possible. Args: response (Response): the response that we wish the client to handle """ if not isinstance(response, Response): raise ValueError("Invalid response received") if isinstance(response, LoginResponse): self._handle_login(response) elif isinstance(response, LogoutResponse): self._handle_logout(response) elif isinstance(response, RegisterResponse): self._handle_register(response) elif isinstance(response, SyncResponse): self._handle_sync(response) elif isinstance(response, RoomMessagesResponse): self._handle_messages_response(response) elif isinstance(response, RoomContextResponse): self._handle_context_response(response) elif isinstance(response, KeysUploadResponse): self._handle_olm_response(response) elif isinstance(response, KeysQueryResponse): self._handle_olm_response(response) elif isinstance(response, KeysClaimResponse): self._handle_olm_response(response) elif isinstance(response, ShareGroupSessionResponse): self._handle_olm_response(response) elif isinstance(response, JoinedMembersResponse): self._handle_joined_members(response) elif isinstance(response, RoomKeyRequestResponse): self._handle_olm_response(response) elif isinstance(response, RoomForgetResponse): self._handle_room_forget_response(response) elif isinstance(response, ToDeviceResponse): self._handle_olm_response(response) elif isinstance(response, RoomGetEventResponse): if isinstance(response.event, MegolmEvent) and self.olm is not None: try: response.event = self.decrypt_event(response.event) except EncryptionError: pass elif isinstance(response, PresenceGetResponse): self._handle_presence_response(response) elif isinstance(response, WhoamiResponse): self._handle_whoami_response(response) elif isinstance(response, ErrorResponse): if response.soft_logout: self.access_token = "" return None @store_loaded def export_keys(self, outfile: str, passphrase: str, count: int = 10000): """Export all the Megolm decryption keys of this device. The keys will be encrypted using the passphrase. Note that this does not save other information such as the private identity keys of the device. Args: outfile (str): The file to write the keys to. passphrase (str): The encryption passphrase. count (int): Optional. Round count for the underlying key derivation. It is not recommended to specify it unless absolutely sure of the consequences. """ assert self.olm self.olm.export_keys(outfile, passphrase, count=count) @store_loaded def import_keys(self, infile: str, passphrase: str): """Import Megolm decryption keys. The keys will be added to the current instance as well as written to database. Args: infile (str): The file containing the keys. passphrase (str): The decryption passphrase. Raises `EncryptionError` if the file is invalid or couldn't be decrypted. Raises the usual file errors if the file couldn't be opened. """ assert self.olm self.olm.import_keys(infile, passphrase) @store_loaded def get_missing_sessions(self, room_id: str) -> Dict[str, List[str]]: """Get users and devices for which we don't have active Olm sessions. Args: room_id (str): The room id of the room for which we should get the users with missing Olm sessions. Raises `LocalProtocolError` if the room with the provided room id is not found or the room is not encrypted. """ assert self.olm if room_id not in self.rooms: raise LocalProtocolError(f"No room found with room id {room_id}") room = self.rooms[room_id] if not room.encrypted: raise LocalProtocolError(f"Room with id {room_id} is not encrypted") return self.olm.get_missing_sessions(list(room.users)) @store_loaded def get_users_for_key_claiming(self) -> Dict[str, List[str]]: """Get the content for a key claim request that needs to be made. Returns a dictionary containing users as the keys and a list of devices for which we will claim one-time keys. Raises a LocalProtocolError if no key claim request needs to be made. """ assert self.olm return self.olm.get_users_for_key_claiming() @store_loaded def encrypt( self, room_id: str, message_type: str, content: Dict[Any, Any] ) -> Tuple[str, Dict[str, str]]: """Encrypt a message to be sent to the provided room. Args: room_id (str): The room id of the room where the message will be sent. message_type (str): The type of the message. content (str): The dictionary containing the content of the message. Raises `GroupEncryptionError` if the group session for the provided room isn't shared yet. Raises `MembersSyncError` if the room is encrypted but the room members aren't fully loaded due to member lazy loading. Returns a tuple containing the new message type and the new encrypted content. """ assert self.olm try: room = self.rooms[room_id] except KeyError: raise LocalProtocolError(f"No such room with id {room_id} found.") if not room.encrypted: raise LocalProtocolError(f"Room {room_id} is not encrypted") if not room.members_synced: raise MembersSyncError( "The room is encrypted and the members " "aren't fully synced." ) encrypted_content = self.olm.group_encrypt( room_id, {"content": content, "type": message_type}, ) # The relationship needs to be sent unencrypted, so put it in the # unencrypted content. if "m.relates_to" in content: encrypted_content["m.relates_to"] = content["m.relates_to"] message_type = "m.room.encrypted" return message_type, encrypted_content def add_event_callback( self, callback: Callable[[MatrixRoom, Event], Optional[Awaitable[None]]], filter: Union[Type[Event], Tuple[Type[Event], None]], ) -> None: """Add a callback that will be executed on room events. The callback can be used on joined rooms as well as on invited rooms. The room parameter for the callback will have a different type depending on if the room is joined or invited. Args: callback (Callable[[MatrixRoom, Event], Optional[Awaitable[None]]]): A function that will be called if the event type in the filter argument is found in a room timeline. filter (Union[Type[Event], Tuple[Type[Event], ...]]): The event type or a tuple containing multiple types for which the function will be called. """ cb = ClientCallback(callback, filter) self.event_callbacks.append(cb) def add_ephemeral_callback( self, callback: Callable[[MatrixRoom, EphemeralEvent], None], filter: Union[Type[EphemeralEvent], Tuple[Type[EphemeralEvent], ...]], ) -> None: """Add a callback that will be executed on ephemeral room events. Args: callback (Callable[MatrixRoom, EphemeralEvent]): A function that will be called if the event type in the filter argument is found in the ephemeral room event list. filter (Union[Type[EphemeralEvent], Tuple[Type[EphemeralEvent], ...]]): The event type or a tuple containing multiple types for which the function will be called. """ cb = ClientCallback(callback, filter) self.ephemeral_callbacks.append(cb) def add_global_account_data_callback( self, callback: Callable[[AccountDataEvent], None], filter: Union[ Type[AccountDataEvent], Tuple[Type[AccountDataEvent], ...], ], ) -> None: """Add a callback that will be executed on global account data events. Args: callback (Callable[[AccountDataEvent], None]): A function that will be called if the event type in the filter argument is found in the account data event list. filter (Union[Type[AccountDataEvent], Tuple[Type[AccountDataEvent, ...]]): The event type or a tuple containing multiple types for which the function will be called. """ cb = ClientCallback(callback, filter) self.global_account_data_callbacks.append(cb) def add_room_account_data_callback( self, callback: Callable[[MatrixRoom, AccountDataEvent], None], filter: Union[ Type[AccountDataEvent], Tuple[Type[AccountDataEvent], ...], ], ) -> None: """Add a callback that will be executed on room account data events. Args: callback (Callable[[MatrixRoom, AccountDataEvent], None]): A function that will be called if the event type in the filter argument is found in the room account data event list. filter (Union[Type[AccountDataEvent], Tuple[Type[AccountDataEvent, ...]]): The event type or a tuple containing multiple types for which the function will be called. """ cb = ClientCallback(callback, filter) self.room_account_data_callbacks.append(cb) def add_to_device_callback( self, callback: Callable[[ToDeviceEvent], None], filter: Union[Type[ToDeviceEvent], Tuple[Type[ToDeviceEvent], ...]], ) -> None: """Add a callback that will be executed on to-device events. Args: callback (Callable[[ToDeviceEvent], None]): A function that will be called if the event type in the filter argument is found in the to-device part of the sync response. filter (Union[Type[ToDeviceEvent], Tuple[Type[ToDeviceEvent], ...]]): The event type or a tuple containing multiple types for which the function will be called. """ cb = ClientCallback(callback, filter) self.to_device_callbacks.append(cb) def add_presence_callback( self, callback: Callable[[PresenceEvent], None], filter: Union[Type, Tuple[Type]], ): """Add a callback that will be executed on presence events. Args: callback (Callable[[PresenceEvent], None]): A function that will be called if the event type in the filter argument is found in the presence part of the sync response. filter (Union[Type, Tuple[Type]]): The event type or a tuple containing multiple types for which the function will be called. """ cb = ClientCallback(callback, filter) self.presence_callbacks.append(cb) @store_loaded def create_key_verification(self, device: OlmDevice) -> ToDeviceMessage: """Start a new key verification process with the given device. Args: device (OlmDevice): The device which we would like to verify Returns a ``ToDeviceMessage`` that should be sent to to the homeserver. """ assert self.olm return self.olm.create_sas(device) @store_loaded def confirm_key_verification(self, transaction_id: str) -> ToDeviceMessage: """Confirm that the short auth string of a key verification matches. Args: transaction_id (str): The transaction id of the interactive key verification. Returns a ``ToDeviceMessage`` that should be sent to the homeserver. If the other user already confirmed the short auth string on their side this function will also verify the device that is partaking in the verification process. """ if transaction_id not in self.key_verifications: raise LocalProtocolError( f"Key verification with the transaction id {transaction_id} does not exist." ) sas = self.key_verifications[transaction_id] sas.accept_sas() message = sas.get_mac() if sas.verified: self.verify_device(sas.other_olm_device) return message def room_devices(self, room_id: str) -> Dict[str, Dict[str, OlmDevice]]: """Get all Olm devices participating in a room. Args: room_id (str): The id of the room for which we would like to collect all the devices. Returns a dictionary holding the user as the key and a dictionary of the device id as the key and OlmDevice as the value. Raises LocalProtocolError if no room is found with the given room_id. """ devices: Dict[str, Dict[str, OlmDevice]] = defaultdict(dict) if not self.olm: return devices try: room = self.rooms[room_id] except KeyError: raise LocalProtocolError(f"No room found with room id {room_id}") if not room.encrypted: return devices users = room.users.keys() for user in users: user_devices = self.device_store.active_user_devices(user) devices[user] = {d.id: d for d in user_devices} return devices @store_loaded def get_active_key_requests( self, user_id: str, device_id: str ) -> List[RoomKeyRequest]: """Get key requests from a device that are waiting for verification. Args: user_id (str): The id of the user for which we would like to find the active key requests. device_id (str): The id of the device for which we would like to find the active key requests. Example: >>> # A to-device callback that verifies devices that >>> # request room keys and continues the room key sharing process. >>> # Note that a single user/device can have multiple key requests >>> # queued up. >>> def key_share_cb(event): ... user_id = event.sender ... device_id = event.requesting_device_id ... device = client.device_store[user_id][device_id] ... client.verify_device(device) ... for request in client.get_active_key_requests( ... user_id, device_id): ... client.continue_key_share(request) >>> client.add_to_device_callback(key_share_cb) Returns: list: A list of actively waiting key requests from the given user. """ assert self.olm return self.olm.get_active_key_requests(user_id, device_id) @store_loaded def continue_key_share(self, event: RoomKeyRequest) -> bool: """Continue a previously interrupted key share event. To handle room key requests properly client users need to add a callback for RoomKeyRequest: >>> client.add_to_device_callback(callback, RoomKeyRequest) This callback will be run only if a room key request needs user interaction, that is if a room key request is coming from an untrusted device. After a user has verified the requesting device the key sharing can be continued using this method: >>> client.continue_key_share(room_key_request) Args: event (RoomKeyRequest): The event which we would like to continue. If the key share event is continued successfully a to-device message will be queued up in the `client.outgoing_to_device_messages` list waiting to be sent out Returns: bool: True if the request was continued, False otherwise. """ assert self.olm return self.olm.continue_key_share(event) @store_loaded def cancel_key_share(self, event: RoomKeyRequest) -> bool: """Cancel a previously interrupted key share event. This method is the counterpart to the `continue_key_share()` method. If a user choses not to verify a device and does not want to share room keys with such a device it should cancel the request with this method. >>> client.cancel_key_share(room_key_request) Args: event (RoomKeyRequest): The event which we would like to cancel. Returns: bool: True if the request was cancelled, False otherwise. """ assert self.olm return self.olm.cancel_key_share(event) matrix-nio-0.24.0/nio/client/http_client.py000066400000000000000000001201611455215747700206400ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations import json import logging import pprint from collections import deque from collections.abc import Sequence from dataclasses import dataclass, field from email.message import EmailMessage from functools import wraps from typing import Any, Deque, Dict, List, Optional, Tuple, Type, Union from urllib.parse import urlparse from uuid import UUID, uuid4 import h2 import h11 from ..api import Api, MessageDirection, ResizingMethod, RoomPreset, RoomVisibility from ..crypto import OlmDevice from ..event_builders import ToDeviceMessage from ..events import MegolmEvent from ..exceptions import LocalProtocolError, RemoteTransportError from ..http import ( Http2Connection, Http2Request, HttpConnection, HttpRequest, TransportRequest, TransportResponse, TransportType, ) from ..responses import ( DeleteDevicesAuthResponse, DeleteDevicesResponse, DevicesResponse, DownloadResponse, FileResponse, JoinedMembersResponse, JoinResponse, KeysClaimResponse, KeysQueryResponse, KeysUploadError, KeysUploadResponse, LoginInfoResponse, LoginResponse, LogoutResponse, ProfileGetAvatarResponse, ProfileGetDisplayNameResponse, ProfileGetResponse, ProfileSetAvatarResponse, ProfileSetDisplayNameResponse, Response, RoomCreateResponse, RoomForgetResponse, RoomInviteResponse, RoomKeyRequestResponse, RoomKickResponse, RoomLeaveResponse, RoomMessagesResponse, RoomPutStateResponse, RoomReadMarkersResponse, RoomRedactResponse, RoomSendResponse, RoomTypingResponse, ShareGroupSessionResponse, SyncResponse, ThumbnailResponse, ToDeviceResponse, UpdateDeviceResponse, ) from . import Client, ClientConfig from .base_client import logged_in, store_loaded try: from json.decoder import JSONDecodeError except ImportError: JSONDecodeError = ValueError # type: ignore logger = logging.getLogger(__name__) def connected(func): @wraps(func) def wrapper(self, *args, **kwargs): if not self.connection: raise LocalProtocolError("Not connected.") return func(self, *args, **kwargs) return wrapper @dataclass class RequestInfo: request_class: Type[Response] = field() extra_data: Tuple = () class HttpClient(Client): def __init__( self, homeserver: str, user: str = "", device_id: Optional[str] = "", store_path: Optional[str] = "", config: Optional[ClientConfig] = None, ) -> None: self.host, self.extra_path = HttpClient._parse_homeserver(homeserver) self.requests_made: Dict[UUID, RequestInfo] = {} self.parse_queue: Deque[Tuple[RequestInfo, TransportResponse]] = deque() self.connection: Optional[Union[HttpConnection, Http2Connection]] = None super().__init__(user, device_id, store_path, config) @staticmethod def _parse_homeserver(homeserver): if not homeserver.startswith("http"): homeserver = f"https://{homeserver}" homeserver = urlparse(homeserver) if homeserver.port: port = homeserver.port else: if homeserver.scheme == "https": port = 443 elif homeserver.scheme == "http": port = 80 else: raise ValueError("Invalid URI scheme for Homeserver") host = f"{homeserver.hostname}:{port}" extra_path = homeserver.path.strip("/") return host, extra_path @connected def _send( self, request: TransportRequest, request_info: RequestInfo, uuid: Optional[UUID] = None, ) -> Tuple[UUID, bytes]: assert self.connection ret_uuid, data = self.connection.send(request, uuid) self.requests_made[ret_uuid] = request_info return ret_uuid, data def _add_extra_path(self, path): if self.extra_path: return f"/{self.extra_path}{path}" return path def _build_request(self, api_response, timeout=0): def unpack_api_call(method, *rest): return method, rest method, api_data = unpack_api_call(*api_response) if isinstance(self.connection, HttpConnection): if method == "GET": path = self._add_extra_path(api_data[0]) return HttpRequest.get(self.host, path, timeout) elif method == "POST": path, data = api_data path = self._add_extra_path(path) return HttpRequest.post(self.host, path, data, timeout) elif method == "PUT": path, data = api_data path = self._add_extra_path(path) return HttpRequest.put(self.host, path, data, timeout) elif isinstance(self.connection, Http2Connection): if method == "GET": path = api_data[0] path = self._add_extra_path(path) return Http2Request.get(self.host, path, timeout) elif method == "POST": path, data = api_data path = self._add_extra_path(path) return Http2Request.post(self.host, path, data, timeout) elif method == "PUT": path, data = api_data path = self._add_extra_path(path) return Http2Request.put(self.host, path, data, timeout) assert "Invalid connection type" @property def lag(self) -> float: if not self.connection: return 0 return self.connection.elapsed def connect( self, transport_type: Optional[TransportType] = TransportType.HTTP ) -> bytes: if transport_type == TransportType.HTTP: self.connection = HttpConnection() elif transport_type == TransportType.HTTP2: self.connection = Http2Connection() else: raise NotImplementedError return self.connection.connect() def _clear_queues(self): self.requests_made.clear() self.parse_queue.clear() @connected def disconnect(self) -> bytes: assert self.connection data = self.connection.disconnect() self._clear_queues() self.connection = None return data @connected def data_to_send(self) -> bytes: assert self.connection return self.connection.data_to_send() @connected def login_info(self) -> Tuple[UUID, bytes]: """Get the available login methods from the server Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. """ request = self._build_request(Api.login_info()) return self._send(request, RequestInfo(LoginInfoResponse)) @connected def login( self, password: Optional[str] = None, device_name: Optional[str] = "", token: Optional[str] = None, ) -> Tuple[UUID, bytes]: if password is None and token is None: raise ValueError("Either a password or a token needs to be " "provided") request = self._build_request( Api.login( self.user, password=password, device_name=device_name, device_id=self.device_id, token=token, ) ) return self._send(request, RequestInfo(LoginResponse)) @connected def login_raw(self, auth_dict: Dict[str, Any]) -> Tuple[UUID, bytes]: if auth_dict is None or auth_dict == {}: raise ValueError("Auth dictionary shall not be empty") request = self._build_request(Api.login_raw(auth_dict)) return self._send(request, RequestInfo(LoginResponse)) @connected @logged_in def logout(self, all_devices=False): request = self._build_request(Api.logout(self.access_token, all_devices)) return self.send(request, RequestInfo(LogoutResponse)) @connected @logged_in def room_send(self, room_id, message_type, content, tx_id=None): if self.olm: try: room = self.rooms[room_id] except KeyError: raise LocalProtocolError(f"No such room with id {room_id} found.") if room.encrypted: message_type, content = self.encrypt( room_id, message_type, content, ) uuid = tx_id or uuid4() request = self._build_request( Api.room_send(self.access_token, room_id, message_type, content, uuid) ) return self._send(request, RequestInfo(RoomSendResponse, (room_id,)), uuid) @connected @logged_in def room_put_state(self, room_id, event_type, body): request = self._build_request( Api.room_put_state(self.access_token, room_id, event_type, body) ) return self._send(request, RequestInfo(RoomPutStateResponse, (room_id,))) @connected @logged_in def room_redact(self, room_id, event_id, reason=None, tx_id=None): """Strip information out of an event. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: room_id (str): The room id of the room that contains the event that will be redacted. event_id (str): The ID of the event that will be redacted. tx_id (str/UUID, optional): A transaction ID for this event. reason(str, optional): A description explaining why the event was redacted. """ uuid = tx_id or uuid4() request = self._build_request( Api.room_redact( self.access_token, room_id, event_id, tx_id, reason=reason, ) ) return self._send(request, RequestInfo(RoomRedactResponse, (room_id,)), uuid) @connected @logged_in def room_kick(self, room_id, user_id, reason=None): request = self._build_request( Api.room_kick(self.access_token, room_id, user_id, reason=reason) ) return self._send(request, RequestInfo(RoomKickResponse)) @connected @logged_in def room_invite(self, room_id, user_id): request = self._build_request( Api.room_invite(self.access_token, room_id, user_id) ) return self._send(request, RequestInfo(RoomInviteResponse)) @connected @logged_in def room_create( self, visibility: RoomVisibility = RoomVisibility.private, alias: Optional[str] = None, name: Optional[str] = None, topic: Optional[str] = None, room_version: Optional[str] = None, room_type: Optional[str] = None, federate: bool = True, is_direct: bool = False, preset: Optional[RoomPreset] = None, invite: Sequence[str] = (), initial_state: Sequence[Dict[str, Any]] = (), power_level_override: Optional[Dict[str, Any]] = None, ) -> Tuple[UUID, bytes]: """Create a new room. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: visibility (RoomVisibility): whether to have the room published in the server's room directory or not. Defaults to ``RoomVisibility.private``. alias (str, optional): The desired canonical alias local part. For example, if set to "foo" and the room is created on the "example.com" server, the room alias will be "#foo:example.com". name (str, optional): A name to set for the room. topic (str, optional): A topic to set for the room. room_version (str, optional): The room version to set. If not specified, the homeserver will use its default setting. If a version not supported by the homeserver is specified, a 400 ``M_UNSUPPORTED_ROOM_VERSION`` error will be returned. room_type (str, optional): The room type to set. If not specified, the homeserver will use its default setting. In spec v1.2 the following room types are specified: - ``m.space`` Unspecified room types are permitted through the use of Namespaced Identifiers. federate (bool): Whether to allow users from other homeservers from joining the room. Defaults to ``True``. Cannot be changed later. is_direct (bool): If this should be considered a direct messaging room. If ``True``, the server will set the ``is_direct`` flag on ``m.room.member events`` sent to the users in ``invite``. Defaults to ``False``. preset (RoomPreset, optional): The selected preset will set various rules for the room. If unspecified, the server will choose a preset from the ``visibility``: ``RoomVisibility.public`` equates to ``RoomPreset.public_chat``, and ``RoomVisibility.private`` equates to a ``RoomPreset.private_chat``. invite (list): A list of user id to invite to the room. initial_state (list): A list of state event dicts to send when the room is created. For example, a room could be made encrypted immediately by having a ``m.room.encryption`` event dict. power_level_override (dict): A ``m.room.power_levels content`` dict to override the default. The dict will be applied on top of the generated ``m.room.power_levels`` event before it is sent to the room. """ request = self._build_request( Api.room_create( self.access_token, visibility=visibility, alias=alias, name=name, topic=topic, room_version=room_version, room_type=room_type, federate=federate, is_direct=is_direct, preset=preset, invite=invite, initial_state=initial_state, power_level_override=power_level_override, ) ) return self._send(request, RequestInfo(RoomCreateResponse)) @connected @logged_in def join(self, room_id: str) -> Tuple[UUID, bytes]: """Join a room. This tells the server to join the given room. If the room is not public, the user must be invited. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: room_id: The room id or alias of the room to join. """ request = self._build_request(Api.join(self.access_token, room_id)) return self._send(request, RequestInfo(JoinResponse)) @connected @logged_in def room_leave(self, room_id: str) -> Tuple[UUID, bytes]: """Leave a room or reject an invite. This tells the server to leave the given room. If the user was only invited, the invite is rejected. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: room_id: The room id of the room to leave. """ request = self._build_request(Api.room_leave(self.access_token, room_id)) return self._send(request, RequestInfo(RoomLeaveResponse)) @connected @logged_in def room_forget(self, room_id: str) -> Tuple[UUID, bytes]: """Forget a room. This tells the server to forget the given room's history for our user. If all users on a homeserver forget the room, the room will be eligible for deletion from that homeserver. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: room_id (str): The room id of the room to forget. """ request = self._build_request(Api.room_forget(self.access_token, room_id)) return self._send(request, RequestInfo(RoomForgetResponse, (room_id,))) @connected @logged_in def room_messages( self, room_id, start, end=None, direction=MessageDirection.back, limit=10 ): request = self._build_request( Api.room_messages( self.access_token, room_id, start, end=end, direction=direction, limit=limit, ) ) return self._send(request, RequestInfo(RoomMessagesResponse, (room_id,))) @connected @logged_in def room_typing( self, room_id: str, typing_state: bool = True, timeout: int = 30000, ) -> Tuple[UUID, bytes]: """Send a typing notice to the server. This tells the server that the user is typing for the next N milliseconds or that the user has stopped typing. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: room_id (str): Room id of the room where the user is typing. typing_state (bool): A flag representing whether the user started or stopped typing timeout (int): For how long should the new typing notice be valid for in milliseconds. """ request = self._build_request( Api.room_typing( self.access_token, room_id, self.user_id, typing_state, timeout ) ) return self._send(request, RequestInfo(RoomTypingResponse, (room_id,))) @connected @logged_in def room_read_markers( self, room_id: str, fully_read_event: str, read_event: Optional[str] = None, ) -> Tuple[UUID, bytes]: """Update the fully read marker (and optionally the read receipt) for a room. Calls receive_response() to update the client state if necessary. Returns either a `RoomReadMarkersResponse` if the request was successful or a `RoomReadMarkersError` if there was an error with the request. This sets the position of the read markers. - `fully_read_event` is the latest event in the set of events that the user has either fully read or indicated they aren't interested in. It permits the implementation of a "jump to first unread message" kind of feature. It is _private_ (not exposed to other room participants). - `read_event` is the most recent message the user has read and is also known as a _read receipt_. A read receipt being set on an event does not imply that all previous events have been seen. This happens in cases such as when a user comes back to a room after hundreds of messages have been sent and _only_ reads the most recent message. The read receipt is _public_ (exposed to other room participants). If you want to set the read receipt, you _must_ set `read_event`. Args: room_id (str): The room ID of the room where the read markers should be updated. fully_read_event (str): The event ID that the user has fully read up to. read_event (Optional[str]): The event ID to set the read receipt location at. """ request = self._build_request( Api.room_read_markers( self.access_token, room_id, fully_read_event, read_event ) ) return self._send(request, RequestInfo(RoomReadMarkersResponse, (room_id,))) @connected def download( self, server_name: str, media_id: str, filename: Optional[str] = None, allow_remote: bool = True, ) -> Tuple[UUID, bytes]: """Get the content of a file from the content repository. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: server_name (str): The server name from the mxc:// URI. media_id (str): The media ID from the mxc:// URI. filename (str, optional): A filename to be returned in the response by the server. If None (default), the original name of the file will be returned instead, if there is one. allow_remote (bool): Indicates to the server that it should not attempt to fetch the media if it is deemed remote. This is to prevent routing loops where the server contacts itself. """ request = self._build_request( Api.download(server_name, media_id, filename, allow_remote) ) return self._send(request, RequestInfo(DownloadResponse)) @connected def thumbnail( self, server_name: str, media_id: str, width: int, height: int, method=ResizingMethod.scale, # ŧype: ResizingMethod allow_remote: bool = True, ) -> Tuple[UUID, bytes]: """Get the thumbnail of a file from the content repository. Note: The actual thumbnail may be larger than the size specified. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: server_name (str): The server name from the mxc:// URI. media_id (str): The media ID from the mxc:// URI. width (int): The desired width of the thumbnail. height (int): The desired height of the thumbnail. method (ResizingMethod): The desired resizing method. allow_remote (bool): Indicates to the server that it should not attempt to fetch the media if it is deemed remote. This is to prevent routing loops where the server contacts itself. """ request = self._build_request( Api.thumbnail(server_name, media_id, width, height, method, allow_remote) ) return self._send(request, RequestInfo(ThumbnailResponse)) @connected @logged_in @store_loaded def keys_upload(self): keys_dict = self.olm.share_keys() logger.debug(pprint.pformat(keys_dict)) request = self._build_request(Api.keys_upload(self.access_token, keys_dict)) return self._send(request, RequestInfo(KeysUploadResponse)) @connected @logged_in @store_loaded def keys_query(self): """Query the server for user keys. This queries the server for device keys of users with which we share an encrypted room. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. """ user_list = self.users_for_key_query if not user_list: raise LocalProtocolError("No key query required.") request = self._build_request(Api.keys_query(self.access_token, user_list)) return self._send(request, RequestInfo(KeysQueryResponse)) @connected @logged_in @store_loaded def keys_claim(self, room_id): user_list = self.get_missing_sessions(room_id) request = self._build_request(Api.keys_claim(self.access_token, user_list)) return self._send(request, RequestInfo(KeysClaimResponse, (room_id,))) @connected @logged_in @store_loaded def share_group_session( self, room_id: str, ignore_missing_sessions: bool = False, tx_id: Optional[str] = None, ignore_unverified_devices: bool = False, ) -> Tuple[UUID, bytes]: """Share a group session with a room. This method sends a group session to members of a room. Args: room_id(str): The room id of the room where the message should be sent to. tx_id(str, optional): The transaction ID of this event used to uniquely identify this message. ignore_unverified_devices(bool): Mark unverified devices as ignored. Ignored devices will still receive encryption keys for messages but they won't be marked as verified. Raises LocalProtocolError if the client isn't logged in, if the session store isn't loaded, no room with the given room id exists or the room isn't an encrypted room. """ assert self.olm try: room = self.rooms[room_id] except KeyError: raise LocalProtocolError(f"No such room with id {room_id}") if not room.encrypted: raise LocalProtocolError(f"Room with id {room_id} is not encrypted") user_map, to_device_dict = self.olm.share_group_session( room_id, list(room.users.keys()), ignore_missing_sessions, ignore_unverified_devices, ) uuid = tx_id or uuid4() request = self._build_request( Api.to_device(self.access_token, "m.room.encrypted", to_device_dict, uuid) ) return self._send( request, RequestInfo(ShareGroupSessionResponse, (room_id, user_map)) ) @connected @logged_in def devices(self) -> Tuple[UUID, bytes]: request = self._build_request(Api.devices(self.access_token)) return self._send(request, RequestInfo(DevicesResponse)) @connected @logged_in def update_device( self, device_id: str, content: Dict[str, str] ) -> Tuple[UUID, bytes]: request = self._build_request( Api.update_device(self.access_token, device_id, content) ) return self._send(request, RequestInfo(UpdateDeviceResponse)) @connected @logged_in def delete_devices( self, devices: List[str], auth: Optional[Dict[str, str]] = None ) -> Tuple[UUID, bytes]: request = self._build_request( Api.delete_devices(self.access_token, devices, auth_dict=auth) ) return self._send(request, RequestInfo(DeleteDevicesResponse)) @connected @logged_in def joined_members(self, room_id: str) -> Tuple[UUID, bytes]: request = self._build_request(Api.joined_members(self.access_token, room_id)) return self._send(request, RequestInfo(JoinedMembersResponse, (room_id,))) @connected def get_profile(self, user_id: Optional[str] = None) -> Tuple[UUID, bytes]: """Get a user's combined profile information. This queries the display name and avatar matrix content URI of a user from the server. Additional profile information may be present. The currently logged in user is queried if no user is specified. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: user_id (str): User id of the user to get the profile for. """ request = self._build_request( Api.profile_get( user_id or self.user_id, access_token=self.access_token or None ) ) return self._send(request, RequestInfo(ProfileGetResponse)) @connected def get_displayname(self, user_id: Optional[str] = None) -> Tuple[UUID, bytes]: """Get a user's display name. This queries the display name of a user from the server. The currently logged in user is queried if no user is specified. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: user_id (str): User id of the user to get the display name for. """ request = self._build_request( Api.profile_get_displayname( user_id or self.user_id, access_token=self.access_token or None ) ) return self._send(request, RequestInfo(ProfileGetDisplayNameResponse)) @connected @logged_in def set_displayname(self, displayname: str) -> Tuple[UUID, bytes]: """Set the user's display name. This tells the server to set the display name of the currently logged in user to supplied string. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: displayname (str): Display name to set. """ request = self._build_request( Api.profile_set_displayname(self.access_token, self.user_id, displayname) ) return self._send(request, RequestInfo(ProfileSetDisplayNameResponse)) @connected def get_avatar(self, user_id: Optional[str] = None) -> Tuple[UUID, bytes]: """Get a user's avatar URL. This queries the avatar matrix content URI of a user from the server. The currently logged in user is queried if no user is specified. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: user_id (str): User id of the user to get the avatar for. """ request = self._build_request( Api.profile_get_avatar( user_id or self.user_id, access_token=self.access_token or None ) ) return self._send(request, RequestInfo(ProfileGetAvatarResponse)) @connected @logged_in def set_avatar(self, avatar_url: str) -> Tuple[UUID, bytes]: """Set the user's avatar URL. This tells the server to set avatar of the currently logged in user to supplied matrix content URI. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: avatar_url (str): matrix content URI of the avatar to set. """ request = self._build_request( Api.profile_set_avatar(self.access_token, self.user_id, avatar_url) ) return self._send(request, RequestInfo(ProfileSetAvatarResponse)) @connected @logged_in @store_loaded def request_room_key( self, event: MegolmEvent, tx_id: Optional[str] = None ) -> Tuple[UUID, bytes]: """Request a missing room key. This sends out a message to other devices requesting a room key from them. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: event (str): An undecrypted MegolmEvent for which we would like to request the decryption key. """ uuid = tx_id or uuid4() if event.session_id in self.outgoing_key_requests: raise LocalProtocolError( "A key sharing request is already sent" " out for this session id." ) assert self.user_id assert self.device_id message = event.as_key_request(self.user_id, self.device_id) request = self._build_request( Api.to_device(self.access_token, message.type, message.as_dict(), uuid) ) return self._send( request, RequestInfo( RoomKeyRequestResponse, (event.session_id, event.session_id, event.room_id, event.algorithm), ), ) @connected @logged_in @store_loaded def confirm_short_auth_string( self, transaction_id: str, tx_id: Optional[str] = None ) -> Tuple[UUID, bytes]: """Confirm a short auth string and mark it as matching. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: transaction_id (str): An transaction id of a valid key verification process. """ message = self.confirm_key_verification(transaction_id) return self.to_device(message) @connected @logged_in @store_loaded def start_key_verification( self, device: OlmDevice, tx_id: Optional[str] = None ) -> Tuple[UUID, bytes]: """Start a interactive key verification with the given device. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: device (OlmDevice): An device with which we would like to start the interactive key verification process. """ message = self.create_key_verification(device) return self.to_device(message, tx_id) @connected @logged_in @store_loaded def accept_key_verification( self, transaction_id: str, tx_id: Optional[str] = None ) -> Tuple[UUID, bytes]: """Accept a key verification start event. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: transaction_id (str): An transaction id of a valid key verification process. """ if transaction_id not in self.key_verifications: raise LocalProtocolError( f"Key verification with the transaction id {transaction_id} does not exist." ) sas = self.key_verifications[transaction_id] message = sas.accept_verification() return self.to_device(message, tx_id) @connected @logged_in @store_loaded def cancel_key_verification( self, transaction_id: str, tx_id: Optional[str] = None ) -> Tuple[UUID, bytes]: """Abort an interactive key verification. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: transaction_id (str): An transaction id of a valid key verification process. """ if transaction_id not in self.key_verifications: raise LocalProtocolError( f"Key verification with the transaction id {transaction_id} does not exist." ) sas = self.key_verifications[transaction_id] sas.cancel() message = sas.get_cancellation() return self.to_device(message, tx_id) @logged_in @store_loaded def to_device( self, message: ToDeviceMessage, tx_id: Optional[str] = None ) -> Tuple[UUID, bytes]: """Send a message to a specific device. Returns a unique uuid that identifies the request and the bytes that should be sent to the socket. Args: message (ToDeviceMessage): The message that should be sent out. tx_id (str, optional): The transaction ID for this message. Should be unique. """ uuid = tx_id or uuid4() request = self._build_request( Api.to_device(self.access_token, message.type, message.as_dict(), uuid) ) return self._send(request, RequestInfo(ToDeviceResponse, (message,))) @connected @logged_in def sync( self, timeout: Optional[int] = None, filter: Optional[Dict[Any, Any]] = None, full_state: bool = False, ) -> Tuple[UUID, bytes]: request = self._build_request( Api.sync( self.access_token, since=self.next_batch or self.loaded_sync_token, timeout=timeout, filter=filter, full_state=full_state, ), timeout, ) return self._send(request, RequestInfo(SyncResponse)) def parse_body(self, transport_response: TransportResponse) -> Dict[Any, Any]: """Parse the body of the response. Args: transport_response(TransportResponse): The transport response that contains the body of the response. Returns a dictionary representing the response. """ try: return json.loads(transport_response.text) except JSONDecodeError: return {} def _create_response(self, request_info, transport_response, max_events=0): request_class = request_info.request_class extra_data = request_info.extra_data or () try: content_type = str(transport_response.headers[b"content-type"], "utf-8") except KeyError: content_type = None try: disposition = str( transport_response.headers[b"content-disposition"], "utf-8" ) message = EmailMessage() message["Content-Disposition"] = disposition filename = message.get_filename() except KeyError: filename = None is_json = content_type == "application/json" if issubclass(request_class, FileResponse) and is_json: parsed_dict = self.parse_body(transport_response) response = request_class.from_data( parsed_dict, content_type, filename, *extra_data ) elif issubclass(request_class, FileResponse): body = transport_response.content response = request_class.from_data( body, content_type, filename, *extra_data ) else: parsed_dict = self.parse_body(transport_response) if ( transport_response.status_code == 401 and request_class == DeleteDevicesResponse ): response = DeleteDevicesAuthResponse.from_dict(parsed_dict) response = request_class.from_dict(parsed_dict, *extra_data) assert response logger.info(f"Received new response of type {response.__class__.__name__}") response.start_time = transport_response.send_time response.end_time = transport_response.receive_time response.timeout = transport_response.timeout response.status_code = transport_response.status_code response.uuid = transport_response.uuid return response def handle_key_upload_error(self, response): if not self.olm: return if response.status_code in [400, 500]: self.olm.mark_keys_as_published() self.olm.save_account() @connected def receive(self, data: bytes) -> None: """Pass received data to the client""" assert self.connection try: response = self.connection.receive(data) except (h11.RemoteProtocolError, h2.exceptions.ProtocolError) as e: raise RemoteTransportError(e) if response: try: request_info = self.requests_made.pop(response.uuid) except KeyError: logger.error(f"{pprint.pformat(self.requests_made)}") raise if response.is_ok: logger.info(f"Received response of type: {request_info.request_class}") else: logger.info( "Error with response of type type: {}, error code {}".format( request_info.request_class, response.status_code ) ) self.parse_queue.append((request_info, response)) return def next_response( self, max_events: int = 0 ) -> Optional[Union[TransportResponse, Response]]: if not self.parse_queue: return None request_info, transport_response = self.parse_queue.popleft() response = self._create_response(request_info, transport_response, max_events) if isinstance(response, KeysUploadError): self.handle_key_upload_error(response) self.receive_response(response) return response matrix-nio-0.24.0/nio/crypto/000077500000000000000000000000001455215747700160125ustar00rootroot00000000000000matrix-nio-0.24.0/nio/crypto/__init__.py000066400000000000000000000023051455215747700201230ustar00rootroot00000000000000"""nio encryption module. Encryption is handled mostly transparently to the user. The main thing users need to worry about is device verification. While device verification is handled in the Client classes of nio the classes that are used to introspect OlmDevices or device authentication sessions are documented here. """ import sys from .._compat import package_installed from .attachments import decrypt_attachment, encrypt_attachment from .async_attachments import ( AsyncDataT, async_encrypt_attachment, async_generator_from_data, ) if package_installed("olm"): from .sessions import ( # isort:skip InboundGroupSession, InboundSession, OlmAccount, OutboundGroupSession, OutboundSession, Session, ) from .device import DeviceStore, OlmDevice, TrustState from .key_request import OutgoingKeyRequest from .log import logger from .memorystores import GroupSessionStore, SessionStore from .olm_machine import Olm from .sas import Sas, SasState ENCRYPTION_ENABLED = True else: ENCRYPTION_ENABLED = False from .device import DeviceStore, OlmDevice, TrustState from .key_request import OutgoingKeyRequest matrix-nio-0.24.0/nio/crypto/async_attachments.py000066400000000000000000000111571455215747700221010ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # Copyright © 2019 miruka # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """Matrix async encryption/decryption functions for file uploads.""" import asyncio import io from functools import partial from pathlib import Path from typing import Any, AsyncGenerator, AsyncIterable, Dict, Iterable, Union import aiofiles from aiofiles.threadpool.binary import AsyncBufferedReader from Crypto import Random # nosec from Crypto.Cipher import AES # nosec from Crypto.Hash import SHA256 # nosec from Crypto.Util import Counter # nosec from .attachments import _get_decryption_info_dict AsyncDataT = Union[ str, Path, bytes, Iterable[bytes], AsyncIterable[bytes], io.BufferedIOBase, AsyncBufferedReader, ] _EncryptedReturnT = AsyncGenerator[Union[bytes, Dict[str, Any]], None] async def async_encrypt_attachment(data: AsyncDataT) -> _EncryptedReturnT: """Async generator to encrypt data in order to send it as an encrypted attachment. This function lazily encrypts and yields data, thus it can be used to encrypt large files without fully loading them into memory if an iterable or async iterable of bytes is passed as data. Args: data (str/Path/bytes/Iterable[bytes]/AsyncIterable[bytes]/ io.BufferedIOBase/AsyncBufferedReader): The data to encrypt. Passing a path string, Path, async iterable or aiofiles open binary file object allows the file data to be read in an asynchronous and lazy (without reading the entire file into memory) way. Passing a non-async iterable or standard open binary file object will still allow the data to be read lazily, but not asynchronously. Yields: The encrypted bytes for each chunk of data. The last yielded value will be a dict containing the info needed to decrypt data. The keys are: | key: AES-CTR JWK key object. | iv: Base64 encoded 16 byte AES-CTR IV. | hashes.sha256: Base64 encoded SHA-256 hash of the ciphertext. """ key = Random.new().read(32) # 8 bytes IV iv = Random.new().read(8) # 8 bytes counter, prefixed by the IV ctr = Counter.new(64, prefix=iv, initial_value=0) cipher = AES.new(key, AES.MODE_CTR, counter=ctr) sha256 = SHA256.new() loop = asyncio.get_event_loop() async for chunk in async_generator_from_data(data): update_crypt = partial(cipher.encrypt, chunk) crypt_chunk = await loop.run_in_executor(None, update_crypt) update_hash = partial(sha256.update, crypt_chunk) await loop.run_in_executor(None, update_hash) yield crypt_chunk yield _get_decryption_info_dict(key, iv, sha256) async def async_generator_from_data( data: AsyncDataT, chunk_size: int = 4 * 1024, ) -> AsyncGenerator[bytes, None]: aio_opened = False if isinstance(data, (str, Path)): data = await aiofiles.open(data, "rb") aio_opened = True ### if isinstance(data, bytes): chunks = (data[i : i + chunk_size] for i in range(0, len(data), chunk_size)) for chunk in chunks: yield chunk # Test if data is a file obj first, since it's considered Iterable too elif isinstance(data, io.BufferedIOBase): while True: chunk = data.read(chunk_size) if not chunk: return yield chunk elif isinstance(data, AsyncBufferedReader): while True: chunk = await data.read(chunk_size) if not chunk: break yield chunk if aio_opened: await data.close() elif isinstance(data, Iterable): for chunk in data: # type: ignore yield chunk elif isinstance(data, AsyncIterable): async for chunk in data: yield chunk else: raise TypeError(f"Unknown type for data: {data!r}") matrix-nio-0.24.0/nio/crypto/attachments.py000066400000000000000000000115271455215747700207050ustar00rootroot00000000000000# Copyright 2018 Zil0 # Copyright © 2019 Damir Jelić # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This function is part of the matrix-python-sdk and is distributed # under the APACHE 2.0 licence. """Matrix encryption algorithms for file uploads.""" from binascii import Error as BinAsciiError from typing import Any, Dict, Generator, Iterable, Tuple, Union import unpaddedbase64 from Crypto import Random from Crypto.Cipher import AES from Crypto.Hash import SHA256 from Crypto.Util import Counter from ..exceptions import EncryptionError DataT = Union[bytes, Iterable[bytes]] def decrypt_attachment(ciphertext: bytes, key: str, hash: str, iv: str): """Decrypt an encrypted attachment. Args: ciphertext (bytes): The data to decrypt. key (str): AES_CTR JWK key object. hash (str): Base64 encoded SHA-256 hash of the ciphertext. iv (str): Base64 encoded 16 byte AES-CTR IV. Returns: The plaintext bytes. Raises: EncryptionError if the integrity check fails. """ expected_hash = unpaddedbase64.decode_base64(hash) h = SHA256.new() h.update(ciphertext) if h.digest() != expected_hash: raise EncryptionError("Mismatched SHA-256 digest.") try: byte_key: bytes = unpaddedbase64.decode_base64(key) except (BinAsciiError, TypeError): raise EncryptionError("Error decoding key.") try: byte_iv: bytes = unpaddedbase64.decode_base64(iv) except (BinAsciiError, TypeError): raise EncryptionError("Error decoding initial values.") prefix: bytes = byte_iv[:8] cnt: int = int.from_bytes(byte_iv[8:], "big") ctr = Counter.new(64, prefix=prefix, initial_value=cnt) try: cipher = AES.new(byte_key, AES.MODE_CTR, counter=ctr) except ValueError as e: raise EncryptionError(e) return cipher.decrypt(ciphertext) def encrypt_attachment(plaintext: bytes) -> Tuple[bytes, Dict[str, Any]]: """Encrypt data in order to send it as an encrypted attachment. Args: data (bytes): The data to encrypt. Returns: A tuple with the encrypted bytes and a dict containing the info needed to decrypt data. See ``encrypted_attachment_generator()`` for the keys. """ values = list(encrypted_attachment_generator(plaintext)) encrypted_bytes: bytes = b"".join(values[:-1]) # type: ignore keys: Dict[str, Any] = values[-1] # type: ignore return (encrypted_bytes, keys) def encrypted_attachment_generator( data: DataT, ) -> Generator[Union[bytes, Dict[str, Any]], None, None]: """Generator to encrypt data in order to send it as an encrypted attachment. Unlike ``encrypt_attachment()``, this function lazily encrypts and yields data, thus it can be used to encrypt large files without fully loading them into memory if an iterable of bytes is passed as data. Args: data (bytes/Iterable[bytes]): The data to encrypt. Yields: The encrypted bytes for each chunk of data. The last yielded value will be a dict containing the info needed to decrypt data. The keys are: | key: AES-CTR JWK key object. | iv: Base64 encoded 16 byte AES-CTR IV. | hashes.sha256: Base64 encoded SHA-256 hash of the ciphertext. """ key = Random.new().read(32) # 8 bytes IV iv = Random.new().read(8) # 8 bytes counter, prefixed by the IV ctr = Counter.new(64, prefix=iv, initial_value=0) cipher = AES.new(key, AES.MODE_CTR, counter=ctr) sha256 = SHA256.new() if isinstance(data, bytes): data = [data] for chunk in data: encrypted_chunk = cipher.encrypt(chunk) # in executor sha256.update(encrypted_chunk) # in executor yield encrypted_chunk yield _get_decryption_info_dict(key, iv, sha256) def _get_decryption_info_dict( key: bytes, iv: bytes, sha256: SHA256.SHA256Hash ) -> Dict[str, Any]: json_web_key = { "kty": "oct", "alg": "A256CTR", "ext": True, "k": unpaddedbase64.encode_base64(key, urlsafe=True), "key_ops": ["encrypt", "decrypt"], } return { "v": "v2", "key": json_web_key, # Send IV concatenated with counter "iv": unpaddedbase64.encode_base64(iv + b"\x00" * 8), "hashes": { "sha256": unpaddedbase64.encode_base64(sha256.digest()), }, } matrix-nio-0.24.0/nio/crypto/device.py000066400000000000000000000146611455215747700176330ustar00rootroot00000000000000# Copyright © 2020 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from collections import defaultdict from dataclasses import asdict, dataclass, field from enum import Enum from typing import DefaultDict, Dict, Iterator, KeysView, Optional # TODO document the values better. class TrustState(Enum): """The device trust state. An Enum holding differing values that a device trust state can be in. """ unset = 0 verified = 1 blacklisted = 2 ignored = 3 @dataclass class OlmDevice: """Class holding info about users Olm devices. OlmDevices represent user devices with which we can communicate in an encrypted manner. To do so an OlmDevice needs to have its trust state set. The trust state can be set to one of "verified", "ignored", or "blacklisted". Note that the trust state should never be moddified directly on an OlmDevice, all the attributes here are read only. The trust state can be changed by passing the OlmDevice to a nio Client or a MatrixStore class. Attributes: user_id (str): The id of the user that the device belongs to. device_id (str): The device id that combined with the user id uniquely identifies the device. keys (Dict): A dictionary containing the type and the public part of this devices encryption keys. display_name (str): The human readable name of this device. deleted (bool): A boolean signaling if this device has been deleted by its owner. trust_state (TrustState): The trust state of this device. """ user_id: str = field() device_id: str = field() keys: Dict[str, str] = field() display_name: str = "" deleted: bool = False trust_state: TrustState = TrustState.unset @property def id(self) -> str: """The device id. Same as the device_id attribute. """ return self.device_id @property def ed25519(self) -> str: """The ed25519 fingerprint key of the device.""" return self.keys["ed25519"] @ed25519.setter def ed25519(self, new_value): self.keys["ed25519"] = new_value @property def curve25519(self) -> str: """The curve25519 key of the device.""" return self.keys["curve25519"] @curve25519.setter def curve25519(self, new_value): self.keys["curve25519"] = new_value def as_dict(self): """Convert the OlmDevice into a dictionary.""" device = asdict(self) device["trust_state"] = self.trust_state.name return device @property def verified(self) -> bool: """Is the device verified.""" return self.trust_state == TrustState.verified @property def ignored(self) -> bool: """Is the device ignored.""" return self.trust_state == TrustState.ignored @property def blacklisted(self) -> bool: """Is the device blacklisted.""" return self.trust_state == TrustState.blacklisted class DeviceStore: """A store that holds olm devices in memory. The DeviceStore class implements the iter method, devices can be iterated over normally using: >>> for device in device_store: ... print(device.user_id, device.device_id) To get only non-deleted devices of a user the active_user_devices method can be used: >>> for device in device_store.active_user_devices("@bob:example.org"): ... print(device.user_id, device.device_id) """ def __init__(self): self._entries: DefaultDict[str, Dict[str, OlmDevice]] = defaultdict(dict) def __iter__(self) -> Iterator[OlmDevice]: for user_devices in self._entries.values(): yield from user_devices.values() def __getitem__(self, user_id: str) -> Dict[str, OlmDevice]: return self._entries[user_id] def items(self): """List of tuples in the form (user id, dict(device_id, OlmDevice).""" return self._entries.items() def values(self): """List of devices in the form of a dict(device_id, OlmDevice).""" return self._entries.values() def active_user_devices(self, user_id: str) -> Iterator[OlmDevice]: """Get all the non-deleted devices of a user. Args: user_id (str): The user for which we would like to get the devices for. This returns an iterator over all the non-deleted devices of the given user. """ for device in self._entries[user_id].values(): if not device.deleted: yield device def device_from_sender_key( self, user_id: str, sender_key: str ) -> Optional[OlmDevice]: """Get a non-deleted device of a user with the matching sender key. Args: user_id (str): The user id of the device owner. sender_key (str): The encryption key that is owned by the device, usually a curve25519 public key. """ for device in self.active_user_devices(user_id): if device.curve25519 == sender_key: return device return None @property def users(self) -> KeysView[str]: """Get the list of users that the device store knows about.""" return self._entries.keys() def devices(self, user_id: str) -> KeysView[str]: return self._entries[user_id].keys() def add(self, device: OlmDevice) -> bool: """Add the given device to the store. Args: device (OlmDevice): The device that should be added to the store. Returns True if the device was added to the store, False if it already was in the store. """ if device in self: return False self._entries[device.user_id][device.id] = device return True matrix-nio-0.24.0/nio/crypto/key_export.py000066400000000000000000000106511455215747700205600ustar00rootroot00000000000000# Copyright 2018 Zil0 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from atomicwrites import atomic_write from Crypto import Random from Crypto.Cipher import AES from Crypto.Hash import HMAC, SHA256, SHA512 from Crypto.Protocol.KDF import PBKDF2 from Crypto.Util import Counter from unpaddedbase64 import decode_base64, encode_base64 HEADER = "-----BEGIN MEGOLM SESSION DATA-----" FOOTER = "-----END MEGOLM SESSION DATA-----" def encrypt_and_save(data: bytes, outfile: str, passphrase: str, count: int = 100000): """Encrypt keys data and write it to file. Args: data (bytes): The data to encrypt. outfile (str): The file the encrypted data will be written to. passphrase (str): The encryption passphrase. count (int): The round count used when deriving a key from the passphrase. Raises: FileNotFoundError if the path to the file did not exist. """ encrypted_data = encrypt(data, passphrase, count=count) with atomic_write(outfile) as f: f.write(HEADER) f.write("\n") f.write(encrypted_data) f.write("\n") f.write(FOOTER) def decrypt_and_read(infile: str, passphrase: str) -> bytes: """Decrypt keys data from file. Args: infile (str): The file the encrypted data will be written to. passphrase (str): The encryption passphrase. Returns: The decrypted data, as bytes. Raises: ValueError if something went wrong during decryption. FileNotFoundError if the file was not found. """ with open(infile) as f: encrypted_data = f.read() encrypted_data = encrypted_data.replace("\n", "") if not encrypted_data.startswith(HEADER) or not encrypted_data.endswith(FOOTER): raise ValueError("Wrong file format.") return decrypt(encrypted_data[len(HEADER) : -len(FOOTER)], passphrase) def prf(passphrase, salt): """HMAC-SHA-512 pseudorandom function.""" return HMAC.new(passphrase, salt, SHA512).digest() def encrypt(data: bytes, passphrase: str, count: int = 100000): # 128 bits salt salt = Random.new().read(16) # 512 bits derived key derived_key = PBKDF2(passphrase, salt, 64, count, prf) # type: ignore aes_key = derived_key[:32] hmac_key = derived_key[32:64] # 128 bits IV, which will be the initial value initial iv = int.from_bytes(Random.new().read(16), byteorder="big") # Set bit 63 to 0, as specified iv &= ~(1 << 63) ctr = Counter.new(128, initial_value=iv) cipher = AES.new(aes_key, AES.MODE_CTR, counter=ctr) encrypted_data = cipher.encrypt(data) payload = b"".join( ( bytes([1]), # Version salt, int.to_bytes(iv, length=16, byteorder="big"), # 32 bits big-endian round count int.to_bytes(count, length=4, byteorder="big"), encrypted_data, ) ) hmac = HMAC.new(hmac_key, payload, SHA256).digest() return encode_base64(payload + hmac) def decrypt(encrypted_payload: str, passphrase: str): decoded_payload = decode_base64(encrypted_payload) version = decoded_payload[0] if isinstance(version, str): version = ord(version) if version != 1: raise ValueError("Unsupported export format version.") salt = decoded_payload[1:17] iv = int.from_bytes(decoded_payload[17:33], byteorder="big") count = int.from_bytes(decoded_payload[33:37], byteorder="big") encrypted_data = decoded_payload[37:-32] expected_hmac = decoded_payload[-32:] derived_key = PBKDF2(passphrase, salt, 64, count, prf) # type: ignore aes_key = derived_key[:32] hmac_key = derived_key[32:64] hmac = HMAC.new(hmac_key, decoded_payload[:-32], SHA256).digest() if hmac != expected_hmac: raise ValueError("HMAC check failed for encrypted payload.") ctr = Counter.new(128, initial_value=iv) cipher = AES.new(aes_key, AES.MODE_CTR, counter=ctr) return cipher.decrypt(encrypted_data) matrix-nio-0.24.0/nio/crypto/key_request.py000066400000000000000000000045001455215747700207230ustar00rootroot00000000000000# Copyright © 2020 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations from dataclasses import dataclass, field from ..event_builders import RoomKeyRequestMessage, ToDeviceMessage from ..responses import RoomKeyRequestResponse @dataclass class OutgoingKeyRequest: """Key request that we sent out.""" request_id: str = field() session_id: str = field() room_id: str = field() algorithm: str = field() @classmethod def from_response(cls, response: RoomKeyRequestResponse) -> OutgoingKeyRequest: """Create a key request object from a RoomKeyRequestResponse.""" return cls( response.request_id, response.session_id, response.room_id, response.algorithm, ) @classmethod def from_message(cls, message: RoomKeyRequestMessage) -> OutgoingKeyRequest: """Create a key request object from a RoomKeyRequestMessage.""" return cls( message.request_id, message.session_id, message.room_id, message.algorithm, ) @classmethod def from_database(cls, row): """Create a key request object from a database row.""" return cls.from_response(row) def as_cancellation(self, user_id, requesting_device_id): """Turn the key request into a cancellation to-device message.""" content = { "action": "request_cancellation", "request_id": self.request_id, "requesting_device_id": requesting_device_id, } return ToDeviceMessage("m.room_key_request", user_id, "*", content) matrix-nio-0.24.0/nio/crypto/log.py000066400000000000000000000014771455215747700171560ustar00rootroot00000000000000# Copyright © 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import logging logger = logging.getLogger(__name__) matrix-nio-0.24.0/nio/crypto/memorystores.py000066400000000000000000000055421455215747700211420ustar00rootroot00000000000000# Copyright © 2018 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from collections import defaultdict from typing import DefaultDict, Dict, Iterator, List, Optional from .sessions import InboundGroupSession, Session class SessionStore: def __init__(self): self._entries: DefaultDict[str, List[Session]] = defaultdict(list) def add(self, sender_key: str, session: Session) -> bool: if session in self._entries[sender_key]: return False self._entries[sender_key].append(session) self._entries[sender_key].sort(key=lambda x: x.use_time, reverse=True) return True def __iter__(self) -> Iterator[Session]: for session_list in self._entries.values(): yield from session_list def values(self): return self._entries.values() def items(self): return self._entries.items() def get(self, sender_key: str) -> Optional[Session]: if self._entries[sender_key]: return self._entries[sender_key][0] return None def __getitem__(self, sender_key: str) -> List[Session]: return self._entries[sender_key] class GroupSessionStore: def __init__(self): self._entries = defaultdict(lambda: defaultdict(dict)) def __iter__(self) -> Iterator[InboundGroupSession]: for room_sessions in self._entries.values(): for sender_sessions in room_sessions.values(): yield from sender_sessions.values() def add(self, session: InboundGroupSession) -> bool: room_id = session.room_id sender_key = session.sender_key if session in self._entries[room_id][sender_key].values(): return False self._entries[room_id][sender_key][session.id] = session return True def get( self, room_id: str, sender_key: str, session_id: str ) -> Optional[InboundGroupSession]: if session_id in self._entries[room_id][sender_key]: return self._entries[room_id][sender_key][session_id] return None def __getitem__( self, room_id: str ) -> DefaultDict[str, Dict[str, InboundGroupSession]]: return self._entries[room_id] matrix-nio-0.24.0/nio/crypto/olm_machine.py000066400000000000000000002431711455215747700206470ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations import json from collections import defaultdict from datetime import datetime, timedelta from json.decoder import JSONDecodeError from typing import Any, DefaultDict, Dict, Iterator, List, Optional, Set, Tuple, Union import olm from cachetools import LRUCache from jsonschema import SchemaError, ValidationError from olm import OlmGroupSessionError, OlmMessage, OlmPreKeyMessage, OlmSessionError from ..api import Api from ..crypto.sessions import Session from ..event_builders import DummyMessage, RoomKeyRequestMessage, ToDeviceMessage from ..events import ( BadEvent, BadEventType, DummyEvent, EncryptedToDeviceEvent, Event, ForwardedRoomKeyEvent, KeyVerificationAccept, KeyVerificationCancel, KeyVerificationEvent, KeyVerificationKey, KeyVerificationMac, KeyVerificationStart, MegolmEvent, OlmEvent, RoomKeyEvent, RoomKeyRequest, RoomKeyRequestCancellation, UnknownBadEvent, validate_or_badevent, ) from ..exceptions import ( EncryptionError, GroupEncryptionError, LocalProtocolError, OlmTrustError, OlmUnverifiedDeviceError, VerificationError, ) from ..responses import ( KeysClaimResponse, KeysQueryResponse, KeysUploadResponse, RoomKeyRequestResponse, ToDeviceResponse, ) from ..schemas import Schemas, validate_json from ..store import MatrixStore from . import ( DeviceStore, GroupSessionStore, InboundGroupSession, InboundSession, OlmAccount, OlmDevice, OutboundGroupSession, OutboundSession, OutgoingKeyRequest, SessionStore, logger, ) from .key_export import decrypt_and_read, encrypt_and_save from .sas import Sas DecryptedOlmT = Union[RoomKeyEvent, BadEvent, UnknownBadEvent, None] def chunks(lst, n): """Yield successive n-sized chunks from lst.""" for i in range(0, len(lst), n): yield lst[i : i + n] class KeyShareError(Exception): pass class Olm: _olm_algorithm = "m.olm.v1.curve25519-aes-sha2" _megolm_algorithm = "m.megolm.v1.aes-sha2" _algorithms = [_olm_algorithm, _megolm_algorithm] _maxToDeviceMessagesPerRequest = 20 _max_sas_life = timedelta(minutes=20) _unwedging_interval = timedelta(minutes=60) # To protect against replay attacks we store a bunch of data, as the dict # keys we store: # - sender key: a curve25519 public key, 43 bytes # - session id: this is the id of the megolm group session that was # used to encrypt the message, 43 bytes # - message index: an integer representing the current ratchet state, 8 # bytes # The values of the dict hold: # - event id: for v4/v5 rooms this is a sha256 hash encoded as # base64 + a $ sign as the prefix, 44 bytes total # - server timestamp: the origin server timestamp of the message, an # integer, 8 bytes # # This totals in 146 bytes per message. The cache has a limit of 100000 # which results in around 14 MiB of memory in total. _message_index_store_size = 100000 def __init__( self, user_id: str, device_id: str, store: MatrixStore, ) -> None: # Our own user id and device id. A tuple of user_id/device_id is # guaranteed to be unique. self.user_id = user_id self.device_id = device_id # The number of one-time keys we have uploaded on the server. If this # is None no action will be taken. After a sync request the client will # set this for us and depending on the count we will suggest the client # to upload new keys. self.uploaded_key_count: Optional[int] = None # A set of users for which we need to query their device keys. self.users_for_key_query: Set[str] = set() # A store holding all the Olm devices of differing users we know about. self.device_store = DeviceStore() # A store holding all our 1on1 Olm sessions. These sessions are used to # exchange encrypted messages between two devices (e.g. encryption keys # for room message encryption are shared this way). self.session_store = SessionStore() # This store holds all the encryption keys that are used to decrypt # room messages. An encryption key gets added to the store either if we # add our own locally or if it gets shared using 1on1 Olm sessions with # a to-device message with the m.room.encrypted type. self.inbound_group_store = GroupSessionStore() # This dictionary holds the current encryption key that will be used to # encrypt messages for a room. When such a key is created it will be # transformed to a InboundGroupSession and stored in the # inbound_group_store as well (it will be used to decrypt the messages # there). These keys will not be stored permanently, they get rotated # relatively frequently. These keys need to be shared with all the # users/devices in a room before they can be used to encrypt a room # message. # Dict of outbound Megolm sessions Dict[room_id] self.outbound_group_sessions: Dict[str, OutboundGroupSession] = {} self.tracked_users: Set[str] = set() # A dictionary holding key requests that we sent out ourselves. Those # will be stored in the database and restored. self.outgoing_key_requests: Dict[str, OutgoingKeyRequest] = {} # This dictionary holds key requests that we received during a sync # response. We don't handle them right away since they might be # cancelled in the same sync response. self.received_key_requests: Dict[str, RoomKeyRequest] = {} # If a received key request comes from a device for which we don't have # an Olm session the event will end up in this dictionary and the # device will end up in the key_request_devices_no_session list. # After the user claims one-time keys for the device with the missing # Olm session the event will be put back into the received_key_requests # dictionary. self.key_requests_waiting_for_session: Dict[ Tuple[str, str], Dict[str, RoomKeyRequest] ] = defaultdict(dict) self.key_request_devices_no_session: List[OlmDevice] = [] # This dictionary holds key requests that we received but the device # that sent us the key request is not verified/trusted. Such key # requests will be forwarded to users using a callback. # Users will need to verify the device and tell us to continue the key # sharing process using the continue_key_share method. self.key_request_from_untrusted: Dict[str, RoomKeyRequest] = {} # A list of devices for which we need to start a new Olm session. # Matrix clients need to do a one-time key claiming request for the # devices in this list. After a new session is created with the device # it will be removed from this list and a dummy encrypted message will # be queued to be sent as a to-device message. self.wedged_devices: List[OlmDevice] = [] # A cache of megolm events that failed to decrypt because the Olm # session was wedged and thus the decryption key was missed. # We need to unwedge the session and only then send out key re-requests, # otherwise we might again fail to decrypt the Olm message. self.key_re_requests_events: DefaultDict[ Tuple[str, str], List[MegolmEvent] ] = defaultdict(list) # A mapping from a transaction id to a Sas key verification object. The # transaction id uniquely identifies the key verification session. self.key_verifications: Dict[str, Sas] = {} # A list of to-device messages that need to be sent to the homeserver # by the client. This will get populated by common to-device messages # for key-requests, interactive device verification and Olm session # unwedging. self.outgoing_to_device_messages: List[ToDeviceMessage] = [] # A least recently used cache for replay attack protection for Megolm # encrypted messages. This is a dict holding a tuple of the # sender_key, the session id and message index as the key and a tuple # of the event_id and origin server timestamp as the dict values. self.message_index_store = LRUCache(self._message_index_store_size) self.store = store # Try to load an account for this user_id/device id tuple from the # store. account = self.store.load_account() # type: ignore # If no account was found for this user/device create a new one. # Otherwise load all the Olm/Megolm sessions and other relevant account # data from the store as well. if not account: logger.info( f"Creating new Olm account for {self.user_id} on device {self.device_id}" ) account = OlmAccount() self.save_account(account) self.load() self.account: OlmAccount = account def update_tracked_users(self, room): already_tracked = self.tracked_users room_users = set(room.users.keys()) missing = room_users - already_tracked if missing: self.users_for_key_query.update(missing) def add_changed_users(self, users: Set[str]) -> None: """Add users that have changed keys to the query set.""" self.users_for_key_query.update(users) @property def should_query_keys(self): if self.users_for_key_query: return True return False @property def should_upload_keys(self): if not self.account.shared: return True if self.uploaded_key_count is None: return False max_keys = self.account.max_one_time_keys key_count = (max_keys // 2) - self.uploaded_key_count return key_count > 0 def user_fully_verified(self, user_id: str) -> bool: devices = self.device_store.active_user_devices(user_id) for device in devices: if not self.is_device_verified(device) and not self.is_device_blacklisted( device ): return False return True def share_keys(self) -> Dict[str, Any]: def generate_one_time_keys(current_key_count: int) -> None: max_keys = self.account.max_one_time_keys key_count = (max_keys // 2) - current_key_count if key_count <= 0: raise ValueError( "Can't share any keys, too many keys already " "shared" ) self.account.generate_one_time_keys(key_count) def device_keys(): device_keys = { "algorithms": self._algorithms, "device_id": self.device_id, "user_id": self.user_id, "keys": { "curve25519:" + self.device_id: self.account.identity_keys["curve25519"], "ed25519:" + self.device_id: self.account.identity_keys["ed25519"], }, } signature = self.sign_json(device_keys) device_keys["signatures"] = { self.user_id: {"ed25519:" + self.device_id: signature} } return device_keys def one_time_keys(): one_time_key_dict = {} keys = self.account.one_time_keys["curve25519"] for key_id, key in keys.items(): key_dict = {"key": key} signature = self.sign_json(key_dict) one_time_key_dict["signed_curve25519:" + key_id] = { "key": key_dict.pop("key"), "signatures": { self.user_id: {"ed25519:" + self.device_id: signature} }, } return one_time_key_dict content: Dict[Any, Any] = {} # We're sharing our account for the first time, upload the identity # keys and one-time keys as well. if not self.account.shared: content["device_keys"] = device_keys() generate_one_time_keys(0) content["one_time_keys"] = one_time_keys() # Just upload one-time keys. else: if self.uploaded_key_count is None: raise EncryptionError("The uploaded key count is not known") generate_one_time_keys(self.uploaded_key_count) content["one_time_keys"] = one_time_keys() return content def _olm_encrypt(self, session, recipient_device, message_type, content): payload = { "sender": self.user_id, "sender_device": self.device_id, "keys": {"ed25519": self.account.identity_keys["ed25519"]}, "recipient": recipient_device.user_id, "recipient_keys": { "ed25519": recipient_device.ed25519, }, "type": message_type, "content": content, } olm_message = session.encrypt(Api.to_json(payload)) self.store.save_session(recipient_device.curve25519, session) return { "algorithm": self._olm_algorithm, "sender_key": self.account.identity_keys["curve25519"], "ciphertext": { recipient_device.curve25519: { "type": olm_message.message_type, "body": olm_message.ciphertext, } }, } def _queue_dummy_message(self, session, device): olm_dict = self._olm_encrypt(session, device, "m.dummy", {}) logger.info( f"Queuing a dummy Olm message for device {device.device_id} of user {device.user_id}" ) self.outgoing_to_device_messages.append( DummyMessage("m.room.encrypted", device.user_id, device.device_id, olm_dict) ) def handle_to_device_event(self, event): """Consume to-device events decrypting them if necessary. Args: event (ToDeviceEvent): The to-device event that should be handled. Returns a new event if the event was encrypted and successfully decrypted, otherwise None. """ decrypted_event = None if isinstance(event, EncryptedToDeviceEvent): decrypted_event = self.decrypt_event(event) elif isinstance(event, KeyVerificationEvent): self.handle_key_verification(event) elif isinstance(event, (RoomKeyRequest, RoomKeyRequestCancellation)): self._handle_key_requests(event) return decrypted_event def _handle_key_requests(self, event): # We first queue up all the requests here. This avoids handling of # requests that were canceled in the same sync. if isinstance(event, RoomKeyRequest): # TODO handle differing algorithms better. To achieve this the # sessions should know which algorithm they speak. if event.algorithm == Olm._megolm_algorithm: self.received_key_requests[event.request_id] = event else: logger.warning( f"Received key request from {event.sender} via {event.requesting_device_id} " f"with an unknown algorithm: {event.algorithm}" ) elif isinstance(event, RoomKeyRequestCancellation): # Let us first remove key requests that just arrived. Those don't # need anything special. self.received_key_requests.pop(event.request_id, None) # Now come the key requests that are waiting for an Olm session. user_key = (event.sender, event.requesting_device_id) self.key_requests_waiting_for_session[user_key].pop(event.request_id, None) # If there are no key requests that are waiting for this device to # get an Olm session, cancel getting an Olm session as well. if not self.key_requests_waiting_for_session[user_key]: try: device = self.device_store[event.sender][event.requesting_device_id] self.key_request_devices_no_session.remove(device) except (KeyError, ValueError): pass # Finally key requests that are waiting for device # verification. if event.request_id in self.key_request_from_untrusted: # First remove the event from our untrusted queue. self.key_request_from_untrusted.pop(event.request_id) # Since events in the untrusted queue were forwarded to users # we need to forward the cancellation as well. self.received_key_requests[event.request_id] = event def _encrypt_forwarding_key( self, room_id: str, group_session: InboundGroupSession, session: Session, device: OlmDevice, ) -> ToDeviceMessage: """Encrypt a group session to be forwarded as a to-device message.""" key_content = { "algorithm": self._megolm_algorithm, "forwarding_curve25519_key_chain": group_session.forwarding_chain, "room_id": room_id, "sender_claimed_ed25519_key": group_session.ed25519, "sender_key": group_session.sender_key, "session_id": group_session.id, "session_key": group_session.export_session( group_session.first_known_index ), } olm_dict = self._olm_encrypt( session, device, "m.forwarded_room_key", key_content ) return ToDeviceMessage( "m.room.encrypted", device.user_id, device.device_id, olm_dict ) def share_with_ourselves(self, event: RoomKeyRequest) -> None: """Share a room key with some other device owned by our own user. Args: event (RoomKeyRequest): The event of the key request. If the key share request is valid this will queue up a to-device message that holds the room key. Raises EncryptionError if no Olm session was found to encrypt the key. Raises OlmTrustError if the device that requested the key is not verified. Raises a KeyShareError if the request is invalid and can't be handled. """ logger.debug( f"Trying to share key {event.session_id} with {event.sender}:{event.requesting_device_id}" ) group_session = self.inbound_group_store.get( event.room_id, event.sender_key, event.session_id ) if not group_session: raise KeyShareError( f"Failed to re-share key {event.session_id} with {event.sender}: No session found" ) try: device = self.device_store[event.sender][event.requesting_device_id] except KeyError: raise KeyShareError( f"Failed to re-share key {event.session_id} with {event.sender}: " f"Unknown requesting device {event.requesting_device_id}." ) session = self.session_store.get(device.curve25519) if not session: # We need a session for this device first. Put it in a queue for a # key claiming request. if device not in self.key_request_devices_no_session: self.key_request_devices_no_session.append(device) # Put our key forward event in a separate queue, key sharing will # be retried once a key claim request with the device has been # done. self.key_requests_waiting_for_session[(device.user_id, device.device_id)][ event.request_id ] = event raise EncryptionError( f"No Olm session found for {device.user_id} and device {device.id}" ) if not device.verified: raise OlmUnverifiedDeviceError( device, f"Failed to re-share key {event.session_id} with {event.sender}: " f"Device {event.requesting_device_id} is not verified", ) logger.debug( f"Successfully shared a key {event.session_id} with {event.sender}:{event.requesting_device_id}" ) self.outgoing_to_device_messages.append( self._encrypt_forwarding_key(event.room_id, group_session, session, device) ) def get_active_key_requests( self, user_id: str, device_id: str ) -> List[RoomKeyRequest]: """Get key requests from a device that are waiting for verification. Args: user_id (str): The id of the user for which we would like to find the active key requests. device_id (str): The id of the device for which we would like to find the active key requests. """ return [ event for event in self.key_request_from_untrusted.values() if event.sender == user_id and event.requesting_device_id == device_id ] def continue_key_share(self, event: RoomKeyRequest) -> bool: """Continue a previously interrupted key share event. Args: event (RoomKeyRequest): The event which we would like to continue. """ if event not in self.key_request_from_untrusted.values(): raise LocalProtocolError("No such pending key share request found") event = self.key_request_from_untrusted[event.request_id] if not self._collect_single_key_share(event): return False self.key_request_from_untrusted.pop(event.request_id) return True def cancel_key_share(self, event: RoomKeyRequest) -> bool: """Cancel a previously interrupted key share event. Args: event (RoomKeyRequest): The event which we would like to cancel. """ return bool(self.key_request_from_untrusted.pop(event.request_id, None)) def _collect_single_key_share(self, event: RoomKeyRequest) -> bool: # The sender is ourself but on a different device. We share all # keys with ourselves. if event.sender == self.user_id: try: self.share_with_ourselves(event) except KeyShareError as error: logger.warning(error) except EncryptionError as error: # We can safely ignore this, the share_with_ourselves # method will queue up the device for a key claiming # request when that is done the event will be put back # in the received_key_requests queue. logger.warning(error) except OlmTrustError: return False return True def collect_key_requests(self): """Turn queued up key requests into to-device messages for key sharing. Returns RoomKeyRequest events that couldn't be sent out because the requesting device isn't verified or ignored. """ events_for_users = [] for event in self.received_key_requests.values(): # A key request cancellation turning up here means that the # cancellation cancelled a key request from an untrusted device. # Such a request was presented to the user to do the verification # dance before continuing so we need to show the user that the # request was cancelled. if isinstance(event, RoomKeyRequestCancellation): events_for_users.append(event) continue # The collect_single_key_share method tries to produce to-device # messages for the key share request. It will return False if it # wasn't able to produce such a to-device message if the requesting # device isn't trusted. # Forward such requests from untrusted devices to the user so they # can verify the device and continue with the key share request or # reject the request. if not self._collect_single_key_share(event): self.key_request_from_untrusted[event.request_id] = event events_for_users.append(event) self.received_key_requests = {} return events_for_users def _handle_key_claiming(self, response): keys = response.one_time_keys for user_id, user_devices in keys.items(): for device_id, one_time_key in user_devices.items(): # We need to find the device curve key for the wanted # user and his device. try: device = self.device_store[user_id][device_id] except KeyError: logger.warning( f"Curve key for user {user_id} and device {device_id} not found, failed to start Olm session" ) continue logger.info( f"Found curve key for user {user_id} and device {device_id}" ) key_object = next(iter(one_time_key.values())) verified = self.verify_json( key_object, device.ed25519, user_id, device_id ) if verified: logger.info( f"Successfully verified signature for one-time key of device {device_id} of user {user_id}." ) logger.info( f"Creating Outbound Session for device {device_id} of user {user_id}" ) session = self.create_session(key_object["key"], device.curve25519) if device in self.wedged_devices: self.wedged_devices.remove(device) self._queue_dummy_message(session, device) if device in self.key_request_devices_no_session: self.key_request_devices_no_session.remove(device) events = self.key_requests_waiting_for_session.pop( (device.user_id, device.device_id), {} ) self.received_key_requests.update(events) else: logger.warning( "Signature verification for one-time key of " f"device {device_id} of user {user_id} failed, could not start " "Olm session." ) # This function is copyrighted under the Apache 2.0 license Zil0 def _handle_key_query(self, response: KeysQueryResponse) -> None: changed: DefaultDict[str, Dict[str, OlmDevice]] = defaultdict(dict) for user_id, device_dict in response.device_keys.items(): try: self.users_for_key_query.remove(user_id) except KeyError: pass self.tracked_users.add(user_id) for device_id, payload in device_dict.items(): if user_id == self.user_id and device_id == self.device_id: continue if payload["user_id"] != user_id or payload["device_id"] != device_id: logger.warning( "Mismatch in keys payload of device " f"{payload['device_id']} " f"({device_id}) of user {payload['user_id']} " f"({user_id}).", ) continue try: key_dict = payload["keys"] signing_key = key_dict[f"ed25519:{device_id}"] curve_key = key_dict[f"curve25519:{device_id}"] if "unsigned" in payload: display_name = payload["unsigned"].get( "device_display_name", "" ) else: display_name = "" except KeyError as e: logger.warning( f"Invalid identity keys payload from device {device_id} of" f" user {user_id}: {e}." ) continue verified = self.verify_json(payload, signing_key, user_id, device_id) if not verified: logger.warning( f"Signature verification failed for device {device_id} of " f"user {user_id}." ) continue user_devices = self.device_store[user_id] try: device = user_devices[device_id] except KeyError: logger.info( "Adding new device to the device store for " f"user {user_id} with device id {device_id}" ) self.device_store.add( OlmDevice( user_id, device_id, {"ed25519": signing_key, "curve25519": curve_key}, display_name=display_name, ) ) else: if device.ed25519 != signing_key: logger.warning( f"Ed25519 key has changed for device {device_id} " f"of user {user_id}." ) continue if ( device.curve25519 == curve_key and device.display_name == display_name ): continue if device.curve25519 != curve_key: device.curve25519 = curve_key logger.info( "Updating curve key in the device store " f"for user {user_id} with device id {device_id}" ) elif device.display_name != display_name: device.display_name = display_name logger.info( "Updating display name in the device " f"store for user {user_id} with device id {device_id}" ) changed[user_id][device_id] = user_devices[device_id] current_devices = set(device_dict.keys()) stored_devices = { device.id for device in self.device_store.active_user_devices(user_id) } deleted_devices = stored_devices - current_devices for device_id in deleted_devices: device = self.device_store[user_id][device_id] device.deleted = True logger.info(f"Marking device {user_id} of user {device_id} as deleted") changed[user_id][device_id] = device self.store.save_device_keys(changed) response.changed = changed def _mark_to_device_message_as_sent(self, message): """Mark a to-device message as sent. This removes the to-device message from our outgoing to-device list. """ try: self.outgoing_to_device_messages.remove(message) if isinstance(message, DummyMessage): # Queue up key requests to be sent out that happened because of # this wedged session. events = self.key_re_requests_events.pop( (message.recipient, message.recipient_device), [] ) requested_sessions = [] for event in events: # Don't send out key re-requests for the same session twice. # TODO filter this when putting the events in. if event.session_id in requested_sessions: continue message = event.as_key_request( event.sender, self.device_id, event.session_id, event.device_id ) logger.info( f"Queuing a room key re-request for a unwedged " f"Olm session: {event.sender} {event.sender} " f"{event.session_id}." ) self.outgoing_to_device_messages.append(message) requested_sessions.append(event.session_id) elif isinstance(message, RoomKeyRequestMessage): key_request = OutgoingKeyRequest.from_message(message) self.outgoing_key_requests[message.request_id] = key_request self.store.add_outgoing_key_request(key_request) except ValueError: pass def handle_response(self, response): if isinstance(response, KeysUploadResponse): self.account.shared = True self.uploaded_key_count = response.signed_curve25519_count self.mark_keys_as_published() self.save_account() elif isinstance(response, KeysQueryResponse): self._handle_key_query(response) elif isinstance(response, KeysClaimResponse): self._handle_key_claiming(response) elif isinstance(response, RoomKeyRequestResponse): key_request = OutgoingKeyRequest.from_response(response) self.outgoing_key_requests[response.request_id] = key_request self.store.add_outgoing_key_request(key_request) elif isinstance(response, ToDeviceResponse): self._mark_to_device_message_as_sent(response.to_device_message) def _create_inbound_session( self, sender: str, sender_key: str, message: Union[OlmPreKeyMessage, OlmMessage], ) -> InboundSession: logger.info(f"Creating Inbound session for {sender}") # Let's create a new inbound session. session = InboundSession(self.account, message, sender_key) logger.info(f"Created Inbound session for {sender}") # Remove the one time keys the session used so it can't be reused # anymore. self.account.remove_one_time_keys(session) # Save the account now that we removed the one time key. self.save_account() return session def blacklist_device(self, device: OlmDevice) -> bool: return self.store.blacklist_device(device) def unblacklist_device(self, device: OlmDevice) -> bool: return self.store.unblacklist_device(device) def verify_device(self, device: OlmDevice) -> bool: return self.store.verify_device(device) def is_device_verified(self, device: OlmDevice) -> bool: return self.store.is_device_verified(device) def is_device_blacklisted(self, device: OlmDevice) -> bool: return self.store.is_device_blacklisted(device) def unverify_device(self, device: OlmDevice) -> bool: return self.store.unverify_device(device) def ignore_device(self, device: OlmDevice) -> bool: return self.store.ignore_device(device) def unignore_device(self, device: OlmDevice) -> bool: return self.store.unignore_device(device) def is_device_ignored(self, device: OlmDevice) -> bool: return self.store.is_device_ignored(device) def create_session(self, one_time_key: str, curve_key: str) -> OutboundSession: # TODO this can fail session = OutboundSession(self.account, curve_key, one_time_key) # Save the account, add the session to the store and save it to the # database. self.save_account() self.session_store.add(curve_key, session) self.save_session(curve_key, session) return session def create_group_session( self, sender_key: str, sender_fp_key: str, room_id: str, session_id: str, session_key: str, ) -> None: logger.info(f"Creating inbound group session for {room_id} from {sender_key}") try: session = InboundGroupSession( session_key, sender_fp_key, sender_key, room_id ) if session.id != session_id: raise OlmSessionError( "Mismatched session id while creating " "inbound group session" ) except OlmSessionError as e: logger.warning(e) return self.inbound_group_store.add(session) self.save_inbound_group_session(session) def create_outbound_group_session(self, room_id: str) -> None: logger.info(f"Creating outbound group session for {room_id}") session = OutboundGroupSession() self.outbound_group_sessions[room_id] = session id_key = self.account.identity_keys["curve25519"] fp_key = self.account.identity_keys["ed25519"] self.create_group_session( id_key, fp_key, room_id, session.id, session.session_key ) logger.info(f"Created outbound group session for {room_id}") def get_missing_sessions(self, users: List[str]) -> Dict[str, List[str]]: missing: DefaultDict[str, List[str]] = defaultdict(list) for user_id in users: for device in self.device_store.active_user_devices(user_id): # we don't need a session for our own device, skip it if device.id == self.device_id: continue if not self.session_store.get(device.curve25519): logger.warning(f"Missing session for device {device.id}") missing[user_id].append(device.id) return missing def get_users_for_key_claiming(self) -> Dict[str, List[str]]: """Get the content for a key claim request that needs to be made. Returns a dictionary containing users as the keys and a list of devices for which we will claim one-time keys. Raises a LocalProtocolError if no key claim request needs to be made. """ if not self.wedged_devices and not self.key_request_devices_no_session: raise LocalProtocolError("No wedged sessions found.") wedged: DefaultDict[str, List[str]] = defaultdict(list) for device in self.wedged_devices: wedged[device.user_id].append(device.device_id) for device in self.key_request_devices_no_session: if device in wedged[device.user_id]: continue wedged[device.user_id].append(device.device_id) return wedged def _mark_device_for_unwedging(self, sender, sender_key): device = self.device_store.device_from_sender_key(sender, sender_key) if not device: # TODO we should probably mark this user for a key query. logger.warning( "Attempted to mark a device for Olm session " f"unwedging, but no device was found for user {sender} with " f"sender key {sender_key}" ) return session = self.session_store.get(device.curve25519) # Don't mark the device to be unwedged if our newest session is less # than an hour old. if session: session_age = datetime.now() - session.creation_time if session_age < self._unwedging_interval: logger.warning( f"Attempted to mark device {device.device_id} of user " f"{device.user_id} for Olm session unwedging, but a new " "session was created recently." ) return if device not in self.wedged_devices: logger.info( f"Marking device {device.device_id} of user {device.user_id} as wedged" ) self.wedged_devices.append(device) def _try_decrypt( self, sender: str, sender_key: str, message: Union[OlmPreKeyMessage, OlmMessage], ) -> Optional[str]: plaintext = None # Let's try to decrypt with each known session for the sender. # for a specific device? for session in self.session_store[sender_key]: matches = False try: if isinstance(message, OlmPreKeyMessage): # It's a prekey message, check if the session matches # if it doesn't no need to try to decrypt. matches = session.matches(message) if not matches: continue logger.info( "Trying to decrypt olm message using existing " f"session for {sender} and sender_key {sender_key}" ) plaintext = session.decrypt(message) self.save_session(sender_key, session) logger.info( "Successfully decrypted olm message " "using existing session" ) return plaintext except OlmSessionError as e: # Decryption failed using a matching session, we don't want # to create a new session using this prekey message so # raise an exception and log the error. if matches: logger.error( "Found matching session yet decryption " f"failed for sender {sender} and " f"sender key {sender_key}" ) raise EncryptionError("Decryption failed for matching session") # Decryption failed, we'll try another session in the next # iteration. logger.info( f"Error decrypting olm message from {sender} " f"and sender key {sender_key}: {e}" ) return None def _verify_olm_payload(self, sender: str, payload: Dict[Any, Any]) -> bool: # Verify that the sender in the payload matches the sender of the event if sender != payload["sender"]: raise VerificationError("Mismatched sender in Olm payload") # Verify that we're the recipient of the payload. if self.user_id != payload["recipient"]: raise VerificationError("Mismatched recipient in Olm " "payload") # Verify that the recipient fingerprint key matches our own if ( self.account.identity_keys["ed25519"] != payload["recipient_keys"]["ed25519"] ): raise VerificationError("Mismatched recipient key in " "Olm payload") return True def _handle_room_key_event( self, sender: str, sender_key: str, payload: Dict[Any, Any], ) -> Union[RoomKeyEvent, BadEventType, None]: event = RoomKeyEvent.from_dict(payload, sender, sender_key) if isinstance(event, (BadEvent, UnknownBadEvent)): return event content = payload["content"] if event.algorithm != "m.megolm.v1.aes-sha2": logger.error(f"Error: unsupported room key of type {event.algorithm}") return event logger.info( f"Received new group session key for room {event.room_id} from {sender}" ) sender_fp_key = payload["keys"].get("ed25519", None) # TODO handle this better if not sender_fp_key: return None self.create_group_session( sender_key, sender_fp_key, content["room_id"], content["session_id"], content["session_key"], ) return event def _should_accept_forward( self, sender: str, sender_key: str, event: ForwardedRoomKeyEvent, ) -> bool: if event.algorithm != "m.megolm.v1.aes-sha2": logger.error( f"Error: unsupported forwarded room key of type {event.algorithm}" ) return False elif event.session_id not in self.outgoing_key_requests: logger.info( "Ignoring session key we have not requested from device {}.", sender_key ) return False key_request = self.outgoing_key_requests[event.session_id] if ( event.algorithm != key_request.algorithm or event.room_id != key_request.room_id or event.session_id != key_request.session_id ): logger.info( "Ignoring session key with mismatched algorithm, room_id, or " "session id." ) return False device = self.device_store.device_from_sender_key(event.sender, sender_key) # Only accept forwarded room keys from our own trusted devices if not device or not device.verified or not device.user_id == self.user_id: logger.warning( "Received a forwarded room key from a untrusted device " f"{event.sender}, {sender_key}" ) return False return True # This function is copyrighted under the Apache 2.0 license Zil0 def _handle_forwarded_room_key_event( self, sender: str, sender_key: str, payload: Dict[Any, Any], ) -> Union[ForwardedRoomKeyEvent, BadEventType, None]: event = ForwardedRoomKeyEvent.from_dict(payload, sender, sender_key) if isinstance(event, (BadEvent, UnknownBadEvent)): return event if not self._should_accept_forward(sender, sender_key, event): return None content = payload["content"] session_sender_key = content["sender_key"] signing_key = content["sender_claimed_ed25519_key"] chain = content["forwarding_curve25519_key_chain"] chain.append(session_sender_key) session = Olm._import_group_session( content["session_key"], signing_key, session_sender_key, event.room_id, chain, ) if not session: return None if self.inbound_group_store.add(session): self.save_inbound_group_session(session) key_request = self.outgoing_key_requests.pop(event.session_id) self.store.remove_outgoing_key_request(key_request) self.outgoing_to_device_messages.append( key_request.as_cancellation(self.user_id, self.device_id) ) return event def _handle_olm_event( self, sender: str, sender_key: str, payload: Dict[Any, Any], ) -> DecryptedOlmT: logger.info( f"Received Olm event of type: {payload['type']} from {sender} {sender_key}" ) if payload["type"] == "m.room_key": event = self._handle_room_key_event(sender, sender_key, payload) return event # type: ignore elif payload["type"] == "m.forwarded_room_key": return self._handle_forwarded_room_key_event(sender, sender_key, payload) elif payload["type"] == "m.dummy": return DummyEvent.from_dict(payload, sender, sender_key) else: logger.warning(f"Received unsupported Olm event of type {payload['type']}") return None def message_index_ok(self, message_index: int, event: MegolmEvent) -> bool: """Check that the message index corresponds to a known message. If we know about the index already we will do some sanity checking to prevent replay attacks, otherwise we store some info for a later check. Args: message_index (int): The message index of the decrypted message. event (MegolmEvent): The encrypted event that was decrypted and the message index belongs to. Returns True if the message is ok, False if we found conflicting event info indicating a replay attack. """ store_key = (event.sender_key, event.session_id, message_index) try: event_id, timestamp = self.message_index_store[store_key] except KeyError: self.message_index_store[store_key] = ( event.event_id, event.server_timestamp, ) return True if event_id != event.event_id or timestamp != event.server_timestamp: return False return True def check_if_wedged(self, event: MegolmEvent): """Check if a Megolm event failed decryption because they keys got lost because of a wedged Olm session. """ try: device = self.device_store[event.sender][event.device_id] except KeyError: logger.warning( f"Received a undecryptable Megolm event from a unknown " f"device: {event.sender} {event.device_id}" ) self.users_for_key_query.add(event.sender) return session = self.session_store.get(device.curve25519) if not session: logger.warning( f"Received a undecryptable Megolm event from a device " f"with no Olm sessions: {event.sender} {event.device_id}" ) return session_age = datetime.now() - session.creation_time # We received a undecryptable Megolm event from a device that is # currently wedged or has been recently unwedged. If it's recently # unwedged send out a key request, otherwise queue up a key request to # be sent out after we send the dummy message. if ( session_age < self._unwedging_interval and event.session_id not in self.outgoing_key_requests ): logger.info( f"Received a undecryptable Megolm event from a device " f"that we recently established an Olm session with: " f"{event.sender} {event.device_id}." ) message = event.as_key_request( event.sender, self.device_id, event.session_id, event.device_id ) self.outgoing_to_device_messages.append(message) if device in self.wedged_devices: logger.info( f"Received a undecryptable Megolm event from a device " f"that has a wedged Olm session: " f"{event.sender} {event.device_id}." ) self.key_re_requests_events[(device.user_id, device.device_id)].append( event ) def _decrypt_megolm_no_error( self, event: MegolmEvent, room_id: Optional[str] = None ) -> Optional[Union[Event, BadEvent]]: try: return self.decrypt_megolm_event(event, room_id) except EncryptionError: return None def decrypt_megolm_event( self, event: MegolmEvent, room_id: Optional[str] = None ) -> Union[Event, BadEvent]: room_id = room_id or event.room_id if not room_id: raise EncryptionError("Event doesn't contain a room id") verified = False session = self.inbound_group_store.get( room_id, event.sender_key, event.session_id ) if not session: message = ( "Error decrypting megolm event, no session found " f"with session id {event.session_id} for room {room_id}" ) self.check_if_wedged(event) logger.warning(message) raise EncryptionError(message) try: plaintext, message_index = session.decrypt(event.ciphertext) except OlmGroupSessionError as e: message = f"Error decrypting megolm event: {str(e)}" logger.warning(message) raise EncryptionError(message) if not self.message_index_ok(message_index, event): raise EncryptionError( f"Duplicate message index, possible replay attack from " f"{event.sender} {event.sender_key} {event.session_id}" ) # If the message is from our own session mark it as verified if ( event.sender == self.user_id and event.device_id == self.device_id and session.ed25519 == self.account.identity_keys["ed25519"] and event.sender_key == self.account.identity_keys["curve25519"] ): verified = True # Else check that the message is from a verified device else: try: device = self.device_store[event.sender][event.device_id] except KeyError: # We don't have the device keys for this device, add them # to our query set so the client fetches the keys in the next # key query. self.users_for_key_query.add(event.sender) else: # Do not mark events decrypted using a forwarded key as # verified if self.is_device_verified(device) and not session.forwarding_chain: if ( device.ed25519 != session.ed25519 or device.curve25519 != event.sender_key ): message = ( f"Device keys mismatch in event sent by device {device.id}." ) logger.warning(message) raise EncryptionError(message) logger.info(f"Event {event.event_id} successfully verified") verified = True try: parsed_dict: Dict[Any, Any] = json.loads(plaintext) except JSONDecodeError as e: raise EncryptionError(f"Error parsing payload: {str(e)}") bad = validate_or_badevent(parsed_dict, Schemas.room_megolm_decrypted) if bad: return bad parsed_dict["event_id"] = event.event_id if "m.relates_to" not in parsed_dict["content"]: try: parsed_dict["content"]["m.relates_to"] = event.source["content"][ "m.relates_to" ] except KeyError: pass parsed_dict["sender"] = event.sender parsed_dict["origin_server_ts"] = event.server_timestamp if event.transaction_id: parsed_dict["unsigned"] = {"transaction_id": event.transaction_id} new_event = Event.parse_decrypted_event(parsed_dict) if isinstance(new_event, UnknownBadEvent): return new_event new_event.decrypted = True new_event.verified = verified new_event.sender_key = event.sender_key new_event.session_id = event.session_id new_event.room_id = room_id return new_event def decrypt_event( self, event: Union[EncryptedToDeviceEvent, MegolmEvent], room_id: Optional[str] = None, ) -> Union[Event, RoomKeyEvent, BadEventType, None]: logger.debug(f"Decrypting event of type {type(event).__name__}") if isinstance(event, OlmEvent): try: own_key = self.account.identity_keys["curve25519"] own_ciphertext = event.ciphertext[own_key] except KeyError: logger.warning("Olm event doesn't contain ciphertext for our key") return None if own_ciphertext["type"] == 0: message = OlmPreKeyMessage(own_ciphertext["body"]) elif own_ciphertext["type"] == 1: message = OlmMessage(own_ciphertext["body"]) else: logger.warning( f"Unsupported olm message type: {own_ciphertext['type']}" ) return None return self.decrypt(event.sender, event.sender_key, message) elif isinstance(event, MegolmEvent): try: return self.decrypt_megolm_event(event, room_id) except EncryptionError: return None return None def decrypt( self, sender: str, sender_key: str, message: Union[OlmPreKeyMessage, OlmMessage], ) -> DecryptedOlmT: try: # First try to decrypt using an existing session. plaintext = self._try_decrypt(sender, sender_key, message) except EncryptionError: # We found a matching session for a prekey message but decryption # failed, don't try to decrypt any further. # Mark the device for unwedging instead. self._mark_device_for_unwedging(sender, sender_key) return None # Decryption failed with every known session or no known sessions, # let's try to create a new session. if plaintext is None: # New sessions can only be created if it's a prekey message, we # can't decrypt the message if it isn't one at this point in time # anymore, so return early if not isinstance(message, OlmPreKeyMessage): self._mark_device_for_unwedging(sender, sender_key) return None try: # Let's create a new session. s = self._create_inbound_session(sender, sender_key, message) # Now let's decrypt the message using the new session. plaintext = s.decrypt(message) # Store the new session self.session_store.add(sender_key, s) self.save_session(sender_key, s) except OlmSessionError as e: logger.error( f"Failed to create new session from prekeymessage: {str(e)}" ) self._mark_device_for_unwedging(sender, sender_key) return None # Mypy complains that the plaintext can still be empty here, # realistically this can't happen but let's make mypy happy if plaintext is None: logger.error("Failed to decrypt Olm message: unknown error") return None # The plaintext should be valid json, let's parse it and verify it. try: parsed_payload = json.loads(plaintext) except JSONDecodeError as e: # Failed parsing the payload, return early. logger.error(f"Failed to parse Olm message payload: {str(e)}") return None # Validate the payload, check that it contains all required keys as # well that the types of the values are the one we expect. # Note: The keys of the content object aren't checked here, the caller # should check the content depending on the type of the event try: validate_json(parsed_payload, Schemas.olm_event) except (ValidationError, SchemaError) as e: # Something is wrong with the payload log an error and return # early. logger.error( f"Error validating decrypted Olm event from {sender}: {str(e.message)}" ) return None # Verify that the payload properties contain correct values: # sender/recipient/keys/recipient_keys and check if the sender device # is already verified by us try: self._verify_olm_payload(sender, parsed_payload) except VerificationError as e: # We found a mismatched property don't process the event any # further logger.error(e) return None else: # Verification succeeded, handle the event return self._handle_olm_event(sender, sender_key, parsed_payload) def rotate_outbound_group_session(self, room_id): logger.info(f"Rotating outbound group session for room {room_id}") self.create_outbound_group_session(room_id) def should_share_group_session(self, room_id: str) -> bool: """Should the client share a group session. Returns True if no session was shared or the session expired, False otherwise. """ try: session = self.outbound_group_sessions[room_id] except KeyError: return True return session.expired or not session.shared def group_encrypt( self, room_id: str, plaintext_dict: Dict[Any, Any], ) -> Dict[str, str]: if room_id not in self.outbound_group_sessions: self.create_outbound_group_session(room_id) session = self.outbound_group_sessions[room_id] if session.expired: self.rotate_outbound_group_session(room_id) session = self.outbound_group_sessions[room_id] if not session.shared: raise GroupEncryptionError(f"Group session for room {room_id} not shared.") plaintext_dict["room_id"] = room_id ciphertext = session.encrypt(Api.to_json(plaintext_dict)) payload_dict = { "algorithm": self._megolm_algorithm, "sender_key": self.account.identity_keys["curve25519"], "ciphertext": ciphertext, "session_id": session.id, "device_id": self.device_id, } return payload_dict def share_group_session_parallel( self, room_id: str, users: List[str], ignore_unverified_devices: bool = False ) -> Iterator[Tuple[Set[Tuple[str, str]], Dict[str, Any]]]: logger.info(f"Sharing group session for room {room_id}") if room_id not in self.outbound_group_sessions: self.create_outbound_group_session(room_id) group_session = self.outbound_group_sessions[room_id] if group_session.shared: self.create_outbound_group_session(room_id) group_session = self.outbound_group_sessions[room_id] key_content = { "algorithm": self._megolm_algorithm, "room_id": room_id, "session_id": group_session.id, "session_key": group_session.session_key, } already_shared_set = group_session.users_shared_with ignored_set = group_session.users_ignored user_map = [] mark_as_ignored = [] for user_id in users: for device in self.device_store.active_user_devices(user_id): # No need to share the session with our own device if device.id == self.device_id: ignored_set.add((user_id, device.id)) continue if self.is_device_blacklisted(device): ignored_set.add((user_id, device.id)) continue if (user_id, device.id) in already_shared_set or ( user_id, device.id, ) in ignored_set: continue session = self.session_store.get(device.curve25519) if not session: logger.warning( f"Missing Olm session for user {user_id} and device " f"{device.id}, skipping" ) continue if not self.is_device_verified(device): if self.is_device_ignored(device): pass elif ignore_unverified_devices: mark_as_ignored.append(device) else: raise OlmUnverifiedDeviceError( device, f"Device {device.id} for user {device.user_id} is not " f"verified or blacklisted.", ) user_map.append((user_id, device, session)) if mark_as_ignored: self.store.ignore_devices(mark_as_ignored) for user_map_chunk in chunks(user_map, self._maxToDeviceMessagesPerRequest): to_device_dict: Dict[str, Any] = {"messages": {}} sharing_with = set() for user_id, device, session in user_map_chunk: olm_dict = self._olm_encrypt(session, device, "m.room_key", key_content) sharing_with.add((user_id, device.id)) if user_id not in to_device_dict["messages"]: to_device_dict["messages"][user_id] = {} to_device_dict["messages"][user_id][device.id] = olm_dict yield (sharing_with, to_device_dict) def share_group_session( self, room_id: str, users: List[str], ignore_missing_sessions: bool = False, ignore_unverified_devices: bool = False, ) -> Tuple[Set[Tuple[str, str]], Dict[str, Any]]: logger.info(f"Sharing group session for room {room_id}") if room_id not in self.outbound_group_sessions: self.create_outbound_group_session(room_id) group_session = self.outbound_group_sessions[room_id] if group_session.shared: raise LocalProtocolError("Group session already shared") key_content = { "algorithm": self._megolm_algorithm, "room_id": room_id, "session_id": group_session.id, "session_key": group_session.session_key, } to_device_dict: Dict[str, Any] = {"messages": {}} already_shared_set = group_session.users_shared_with ignored_set = group_session.users_ignored user_map = [] mark_as_ignored = [] for user_id in users: for device in self.device_store.active_user_devices(user_id): # No need to share the session with our own device if device.id == self.device_id: ignored_set.add((user_id, device.id)) continue if self.is_device_blacklisted(device): ignored_set.add((user_id, device.id)) continue if (user_id, device.id) in already_shared_set or ( user_id, device.id, ) in ignored_set: continue session = self.session_store.get(device.curve25519) if not session: if ignore_missing_sessions: ignored_set.add((user_id, device.id)) continue else: raise EncryptionError( f"Missing Olm session for user {user_id} and device {device.id}" ) if not self.is_device_verified(device): if self.is_device_ignored(device): pass elif ignore_unverified_devices: mark_as_ignored.append(device) else: raise OlmUnverifiedDeviceError( device, f"Device {device.id} for user {device.user_id} is not verified or blacklisted.", ) user_map.append((user_id, device, session)) if len(user_map) >= self._maxToDeviceMessagesPerRequest: break if len(user_map) >= self._maxToDeviceMessagesPerRequest: break sharing_with = set() if mark_as_ignored: self.store.ignore_devices(mark_as_ignored) for user_id, device, session in user_map: olm_dict = self._olm_encrypt(session, device, "m.room_key", key_content) sharing_with.add((user_id, device.id)) if user_id not in to_device_dict["messages"]: to_device_dict["messages"][user_id] = {} to_device_dict["messages"][user_id][device.id] = olm_dict return sharing_with, to_device_dict def load(self) -> None: self.session_store = self.store.load_sessions() self.inbound_group_store = self.store.load_inbound_group_sessions() self.device_store = self.store.load_device_keys() self.outgoing_key_requests = self.store.load_outgoing_key_requests() def save_session(self, curve_key: str, session: Session) -> None: self.store.save_session(curve_key, session) def save_inbound_group_session(self, session: InboundGroupSession) -> None: self.store.save_inbound_group_session(session) def save_account(self, account: Optional[OlmAccount] = None) -> None: if account: self.store.save_account(account) else: self.store.save_account(self.account) logger.debug("Saving account") def sign_json(self, json_dict: Dict[Any, Any]) -> str: signature = self.account.sign(Api.to_canonical_json(json_dict)) return signature # This function is copyrighted under the Apache 2.0 license Zil0 def verify_json(self, json, user_key, user_id, device_id): """Verifies a signed key object's signature. The object must have a 'signatures' key associated with an object of the form `user_id: {key_id: signature}`. Args: json (dict): The JSON object to verify. user_key (str): The public ed25519 key which was used to sign the object. user_id (str): The user who owns the device. device_id (str): The device who owns the key. Returns: True if the verification was successful, False if not. """ try: signatures = json.pop("signatures") except (KeyError, ValueError): return False key_id = f"ed25519:{device_id}" try: signature_base64 = signatures[user_id][key_id] except KeyError: json["signatures"] = signatures return False unsigned = json.pop("unsigned", None) try: olm.ed25519_verify(user_key, Api.to_canonical_json(json), signature_base64) success = True except olm.utility.OlmVerifyError: success = False json["signatures"] = signatures if unsigned: json["unsigned"] = unsigned return success def mark_keys_as_published(self) -> None: self.account.mark_keys_as_published() @staticmethod def export_keys_static(sessions, outfile, passphrase, count=10000): session_list = [] for session in sessions: payload = { "algorithm": Olm._megolm_algorithm, "sender_key": session.sender_key, "sender_claimed_keys": {"ed25519": session.ed25519}, "forwarding_curve25519_key_chain": session.forwarding_chain, "room_id": session.room_id, "session_id": session.id, "session_key": session.export_session(session.first_known_index), } session_list.append(payload) data = json.dumps(session_list).encode() encrypt_and_save(data, outfile, passphrase, count=count) # This function is copyrighted under the Apache 2.0 license Zil0 def export_keys(self, outfile, passphrase, count=10000): """Export all the Megolm decryption keys of this device. The keys will be encrypted using the passphrase. NOTE: This does not save other information such as the private identity keys of the device. Args: outfile (str): The file to write the keys to. passphrase (str): The encryption passphrase. count (int): Optional. Round count for the underlying key derivation. It is not recommended to specify it unless absolutely sure of the consequences. """ inbound_group_store = self.store.load_inbound_group_sessions() Olm.export_keys_static(inbound_group_store, outfile, passphrase, count) logger.info(f"Successfully exported encryption keys to {outfile}") @staticmethod def _import_group_session( session_key, sender_fp_key, sender_key, room_id, forwarding_chain ): try: return InboundGroupSession.import_session( session_key, sender_fp_key, sender_key, room_id, forwarding_chain, ) except OlmSessionError as e: logger.warning(f"Error importing inbound group session: {e}") return None @staticmethod def import_keys_static(infile: str, passphrase: str) -> List[InboundGroupSession]: sessions = [] try: data = decrypt_and_read(infile, passphrase) except ValueError as e: raise EncryptionError(e) try: session_list_all = json.loads(data) except JSONDecodeError as e: raise EncryptionError(f"Error parsing key file: {str(e)}") session_list = [] missing = False for session in session_list_all: if "sender_claimed_keys" in session: session_list.append(session) else: missing = True try: validate_json(session_list, Schemas.megolm_key_import) except (ValidationError, SchemaError) as e: logger.warning(e) raise EncryptionError(f"Error parsing key file: {str(e)}") for session_dict in session_list: if session_dict["algorithm"] != Olm._megolm_algorithm: logger.warning("Ignoring session with unsupported algorithm.") continue session = Olm._import_group_session( session_dict["session_key"], session_dict["sender_claimed_keys"]["ed25519"], session_dict["sender_key"], session_dict["room_id"], session_dict["forwarding_curve25519_key_chain"], ) if not session: missing = False continue sessions.append(session) if missing: total = len(session_list_all) imported = len(session_list_all) - len(sessions) logger.warning(f"Warning! Could only import {imported} out of {total} keys") return sessions # This function is copyrighted under the Apache 2.0 license Zil0 def import_keys(self, infile, passphrase): """Import Megolm decryption keys. The keys will be added to the current instance as well as written to database. Args: infile (str): The file containing the keys. passphrase (str): The decryption passphrase. """ sessions = Olm.import_keys_static(infile, passphrase) for session in sessions: # This could be improved by writing everything to db at once at # the end if self.inbound_group_store.add(session): self.save_inbound_group_session(session) logger.info(f"Successfully imported encryption keys from {infile}") def clear_verifications(self): """Remove canceled or done key verifications from our cache. Returns a list of events that need to be added to the to-device event stream of our caller. """ active_sas = {} events = [] now = datetime.now() for transaction_id, sas in self.key_verifications.items(): if sas.timed_out: message = sas.get_cancellation() self.outgoing_to_device_messages.append(message) cancel_event = {"sender": self.user_id, "content": message.content} events.append(KeyVerificationCancel.from_dict(cancel_event)) continue elif sas.canceled or sas.verified: if now - sas.creation_time > self._max_sas_life: continue active_sas[transaction_id] = sas else: active_sas[transaction_id] = sas self.key_verifications = active_sas return events def create_sas(self, olm_device): sas = Sas( self.user_id, self.device_id, self.account.identity_keys["ed25519"], olm_device, ) self.key_verifications[sas.transaction_id] = sas return sas.start_verification() def get_active_sas(self, user_id: str, device_id: str) -> Optional[Sas]: """Find a non-canceled SAS verification object for the provided user. Args: user_id (str): The user for which we should find a SAS verification object. device_id (str): The device_id for which we should find the SAS verification object. Returns the object if it's found, otherwise None. """ verifications = [x for x in self.key_verifications.values() if not x.canceled] for sas in sorted(verifications, key=lambda x: x.creation_time, reverse=True): device = sas.other_olm_device if device.user_id == user_id and device.id == device_id: return sas return None def handle_key_verification(self, event: KeyVerificationEvent) -> None: """Receive key verification events.""" if isinstance(event, KeyVerificationStart): logger.info( f"Received key verification start event from {event.sender} {event.from_device} {event.transaction_id}" ) try: device = self.device_store[event.sender][event.from_device] except KeyError: logger.warning( f"Received key verification event from unknown device: {event.sender} {event.from_device}" ) self.users_for_key_query.add(event.sender) return new_sas = Sas.from_key_verification_start( self.user_id, self.device_id, self.account.identity_keys["ed25519"], device, event, ) if new_sas.canceled: logger.warning( f"Received malformed key verification event from {event.sender} {event.from_device}" ) message = new_sas.get_cancellation() self.outgoing_to_device_messages.append(message) else: old_sas = self.get_active_sas(event.sender, event.from_device) if old_sas: logger.info( "Found an active verification process for the " "same user/device combination, " "canceling the old one. " f"Old Sas: {event.sender} {event.from_device} {old_sas.transaction_id}" ) old_sas.cancel() cancel_message = old_sas.get_cancellation() self.outgoing_to_device_messages.append(cancel_message) logger.info( f"Successfully started key verification with " f"{event.sender} {event.from_device} {new_sas.transaction_id}" ) self.key_verifications[event.transaction_id] = new_sas else: sas = self.key_verifications.get(event.transaction_id, None) if not sas: logger.warning( "Received key verification event with an unknown " f"transaction id from {event.sender}" ) return if isinstance(event, KeyVerificationAccept): sas.receive_accept_event(event) if sas.canceled: message = sas.get_cancellation() else: logger.info( f"Received a key verification accept event from {event.sender} " f"{sas.other_olm_device.id}, sharing keys {sas.transaction_id}" ) message = sas.share_key() self.outgoing_to_device_messages.append(message) elif isinstance(event, KeyVerificationCancel): logger.info( f"Received a key verification cancellation from {event.sender} " f"{sas.other_olm_device.id}. Canceling verification {sas.transaction_id}." ) sas = self.key_verifications.pop(event.transaction_id, None) if sas: sas.cancel() elif isinstance(event, KeyVerificationKey): sas.receive_key_event(event) to_device_message: Optional[ToDeviceMessage] = None if sas.canceled: to_device_message = sas.get_cancellation() else: logger.info( f"Received a key verification pubkey from {event.sender} " f"{sas.other_olm_device.id} {sas.transaction_id}." ) if not sas.we_started_it and not sas.canceled: to_device_message = sas.share_key() if to_device_message: self.outgoing_to_device_messages.append(to_device_message) elif isinstance(event, KeyVerificationMac): sas.receive_mac_event(event) if sas.canceled: self.outgoing_to_device_messages.append(sas.get_cancellation()) return logger.info( f"Received a valid key verification MAC from {event.sender} " f"{sas.other_olm_device.id} {event.transaction_id}." ) if sas.verified: logger.info( "Interactive key verification successful, verifying device " f"{sas.other_olm_device.id} of user {event.sender} {event.transaction_id}." ) device = sas.other_olm_device self.verify_device(device) matrix-nio-0.24.0/nio/crypto/sas.py000066400000000000000000000605021455215747700171550ustar00rootroot00000000000000# Copyright © 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations from datetime import datetime, timedelta from enum import Enum from itertools import zip_longest from typing import List, Optional, Tuple from uuid import uuid4 import olm from ..api import Api from ..event_builders import ToDeviceMessage from ..events import KeyVerificationEvent, KeyVerificationStart from ..exceptions import LocalProtocolError from .device import OlmDevice class SasState(Enum): """Short Authentication String enum. This enum tracks the current state of our verification process. """ created = 0 started = 1 accepted = 2 key_received = 3 mac_received = 4 canceled = 5 class Sas(olm.Sas): """Matrix Short Authentication String class. This class implements a state machine to handle device verification using short authentication strings. Attributes: we_started_it (bool): Is true if the verification process was started by us, otherwise false. sas_accepted (bool): Is true if we accepted that the short authentication string matches on both devices. verified_devices(List[str]): The list of device ids that were verified during the verification process. Args: own_user (str): The user id of our own user. own_device (str): The device id of our own user. own_fp_key (str): The fingerprint key of our own device that will be verified by the other client. other_olm_device (OlmDevice): The OlmDevice which we would like to verify. transaction_id (str, optional): A string that will uniquely identify this verification process. A random and unique string will be generated if one isn't provided. short_auth_string (List[str], optional): A list of valid short authentication methods that the client would like to allow for this authentication session. By default the 'emoji' and 'decimal' methods are allowed. """ _sas_method_v1 = "m.sas.v1" _key_agreement_v1 = "curve25519" _key_agreement_v2 = "curve25519-hkdf-sha256" _key_agreeemnt_protocols = [_key_agreement_v1, _key_agreement_v2] _hash_v1 = "sha256" _mac_normal = "hkdf-hmac-sha256" _mac_old = "hmac-sha256" _mac_v1 = [_mac_normal, _mac_old] _strings_v1 = ["emoji", "decimal"] _user_cancel_error = ("m.user", "Canceled by user") _timeout_error = ("m.timeout", "Timed out") _txid_error = ("m.unknown_transaction", "Unknown transaction") _unknown_method_error = ("m.unknown_method", "Unknown method") _unexpected_message_error = ("m.unexpected_message", "Unexpected message") _key_mismatch_error = ("m.key_mismatch", "Key mismatch") _user_mismatch_error = ("m.user_error", "User mismatch") _invalid_message_error = ("m.invalid_message", "Invalid message") _commitment_mismatch_error = ( "m.mismatched_commitment", "Mismatched commitment", ) _sas_mismatch_error = ( "m.mismatched_sas", "Mismatched short authentication string", ) _max_age = timedelta(minutes=5) _max_event_timeout = timedelta(minutes=1) emoji = [ ("🐶", "Dog"), ("🐱", "Cat"), ("🦁", "Lion"), ("🐎", "Horse"), ("🦄", "Unicorn"), ("🐷", "Pig"), ("🐘", "Elephant"), ("🐰", "Rabbit"), ("🐼", "Panda"), ("🐓", "Rooster"), ("🐧", "Penguin"), ("🐢", "Turtle"), ("🐟", "Fish"), ("🐙", "Octopus"), ("🦋", "Butterfly"), ("🌷", "Flower"), ("🌳", "Tree"), ("🌵", "Cactus"), ("🍄", "Mushroom"), ("🌏", "Globe"), ("🌙", "Moon"), ("☁️", "Cloud"), ("🔥", "Fire"), ("🍌", "Banana"), ("🍎", "Apple"), ("🍓", "Strawberry"), ("🌽", "Corn"), ("🍕", "Pizza"), ("🎂", "Cake"), ("❤️", "Heart"), ("😀", "Smiley"), ("🤖", "Robot"), ("🎩", "Hat"), ("👓", "Glasses"), ("🔧", "Wrench"), ("🎅", "Santa"), ("👍", "Thumbs up"), ("☂️", "Umbrella"), ("⌛", "Hourglass"), ("⏰", "Clock"), ("🎁", "Gift"), ("💡", "Light Bulb"), ("📕", "Book"), ("✏️", "Pencil"), ("📎", "Paperclip"), ("✂️", "Scissors"), ("🔒", "Lock"), ("🔑", "Key"), ("🔨", "Hammer"), ("☎️", "Telephone"), ("🏁", "Flag"), ("🚂", "Train"), ("🚲", "Bicycle"), ("✈️", "Airplane"), ("🚀", "Rocket"), ("🏆", "Trophy"), ("⚽", "Ball"), ("🎸", "Guitar"), ("🎺", "Trumpet"), ("🔔", "Bell"), ("⚓", "Anchor"), ("🎧", "Headphones"), ("📁", "Folder"), ("📌", "Pin"), ] def __init__( self, own_user: str, own_device: str, own_fp_key: str, other_olm_device: OlmDevice, transaction_id: Optional[str] = None, short_auth_string: Optional[List[str]] = None, mac_methods: Optional[List[str]] = None, ): self.own_user = own_user self.own_device = own_device self.own_fp_key = own_fp_key self.other_olm_device = other_olm_device self.transaction_id = transaction_id or str(uuid4()) self.short_auth_string = short_auth_string or ["emoji", "decimal"] self.mac_methods = mac_methods or Sas._mac_v1 self.chosen_mac_method = "" self.key_agreement_protocols = Sas._key_agreeemnt_protocols self.chosen_key_agreement: Optional[str] = None self.state = SasState.created self.we_started_it = True self.sas_accepted = False self.commitment = None self.cancel_reason = "" self.cancel_code = "" self.their_sas_key: Optional[str] = None self.verified_devices: List[str] = [] self.creation_time = datetime.now() self._last_event_time = self.creation_time super().__init__() @classmethod def from_key_verification_start( cls, own_user: str, own_device: str, own_fp_key: str, other_olm_device: OlmDevice, event: KeyVerificationStart, ) -> Sas: """Create a SAS object from a KeyVerificationStart event. Args: own_user (str): The user id of our own user. own_device (str): The device id of our own user. own_fp_key (str): The fingerprint key of our own device that will be verified by the other client. other_olm_device (OlmDevice): The Olm device of the other user that should be verified. event (KeyVerificationStart): The event that we received from the other device to start the key verification process. """ obj = cls( own_user, own_device, own_fp_key, other_olm_device, event.transaction_id, event.short_authentication_string, event.message_authentication_codes, ) obj.we_started_it = False obj.state = SasState.started string_content = Api.to_canonical_json(event.source["content"]) obj.commitment = olm.sha256(obj.pubkey + string_content) obj.key_agreement_protocols = event.key_agreement_protocols if ( Sas._sas_method_v1 != event.method or ( Sas._key_agreement_v1 not in event.key_agreement_protocols and Sas._key_agreement_v2 not in event.key_agreement_protocols ) or Sas._hash_v1 not in event.hashes or ( Sas._mac_normal not in event.message_authentication_codes and Sas._mac_old not in event.message_authentication_codes ) or ( "emoji" not in event.short_authentication_string and "decimal" not in event.short_authentication_string ) ): obj.state = SasState.canceled obj.cancel_code, obj.cancel_reason = obj._unknown_method_error return obj @property def canceled(self) -> bool: """Is the verification request canceled.""" return self.state == SasState.canceled @property def timed_out(self) -> bool: """Did the verification process time out.""" if self.verified or self.canceled: return False now = datetime.now() if ( now - self.creation_time >= self._max_age or now - self._last_event_time >= self._max_event_timeout ): self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._timeout_error return True return False @property def verified(self) -> bool: """Is the device verified and the request done.""" return self.state == SasState.mac_received and self.sas_accepted def set_their_pubkey(self, pubkey: str): self.their_sas_key = pubkey super().set_their_pubkey(pubkey) def accept_sas(self): """Accept the short authentication string.""" if self.state == SasState.canceled: raise LocalProtocolError( "Key verification process was canceled " "can't accept short authentication " "string" ) if not self.other_key_set: raise LocalProtocolError( "Other public key isn't set yet, can't " "generate nor accept a short " "authentication string." ) self.sas_accepted = True def reject_sas(self): """Reject the authentication string.""" if not self.other_key_set: raise LocalProtocolError( "Other public key isn't set yet, can't " "generate nor reject a short " "authentication string." ) self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._sas_mismatch_error def cancel(self): """Cancel the authentication process.""" self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._user_cancel_error def _check_commitment(self, key: str): assert self.commitment calculated_commitment = olm.sha256( key + Api.to_canonical_json(self.start_verification().content) ) return self.commitment == calculated_commitment def _grouper(self, iterable, n, fillvalue=None): """Collect data into fixed-length chunks or blocks.""" # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n return zip_longest(*args, fillvalue=fillvalue) @property def _extra_info_v1(self) -> str: device = self.other_olm_device tx_id = self.transaction_id our_info = f"{self.own_user}{self.own_device}" their_info = f"{device.user_id}{device.device_id}" if self.we_started_it: return f"MATRIX_KEY_VERIFICATION_SAS{our_info}{their_info}{tx_id}" else: return f"MATRIX_KEY_VERIFICATION_SAS{their_info}{our_info}{tx_id}" @property def _extra_info_v2(self) -> str: device = self.other_olm_device tx_id = self.transaction_id assert self.their_sas_key our_info = f"{self.own_user}|{self.own_device}|{self.pubkey}" their_info = f"{device.user_id}|{device.device_id}|{self.their_sas_key}" if self.we_started_it: return f"MATRIX_KEY_VERIFICATION_SAS|{our_info}|{their_info}|{tx_id}" else: return f"MATRIX_KEY_VERIFICATION_SAS|{their_info}|{our_info}|{tx_id}" @property def _extra_info(self) -> str: if self.chosen_key_agreement == Sas._key_agreement_v1: return self._extra_info_v1 elif self.chosen_key_agreement == Sas._key_agreement_v2: return self._extra_info_v2 raise ValueError(f"Unknown key agreement protocol {self.chosen_key_agreement}") def get_emoji(self) -> List[Tuple[str, str]]: """Get the emoji short authentication string. Returns a list of tuples that contain the emoji and the description of the emoji of the short authentication string. """ return self._generate_emoji(self._extra_info) def get_decimals(self) -> Tuple[int, ...]: """Get the decimal short authentication string. Returns a tuple that contains three 4 digit integer numbers that represent the short authentication string. """ return self._generate_decimals(self._extra_info) def _generate_emoji(self, extra_info: str) -> List[Tuple[str, str]]: """Create a list of emojies from our shared secret.""" generated_bytes = self.generate_bytes(extra_info, 6) number = "".join([format(x, "08b") for x in bytes(generated_bytes)]) return [ self.emoji[int(x, 2)] for x in map("".join, list(self._grouper(number[:42], 6))) ] def _generate_decimals(self, extra_info: str) -> Tuple[int, ...]: """Create a decimal number from our shared secret.""" generated_bytes = self.generate_bytes(extra_info, 5) number = "".join([format(x, "08b") for x in bytes(generated_bytes)]) return tuple( int(x, 2) + 1000 for x in map("".join, list(self._grouper(number[:-1], 13))) ) def start_verification(self) -> ToDeviceMessage: """Create a content dictionary to start the verification.""" if not self.we_started_it: raise LocalProtocolError( "Verification was not started by us, " "can't send start verification message." ) if self.state == SasState.canceled: raise LocalProtocolError( "SAS verification was canceled, " "can't send start verification message." ) content = { "from_device": self.own_device, "method": self._sas_method_v1, "transaction_id": self.transaction_id, "key_agreement_protocols": Sas._key_agreeemnt_protocols, "hashes": [self._hash_v1], "message_authentication_codes": self._mac_v1, "short_authentication_string": self._strings_v1, } message = ToDeviceMessage( "m.key.verification.start", self.other_olm_device.user_id, self.other_olm_device.id, content, ) return message def accept_verification(self) -> ToDeviceMessage: """Create a content dictionary to accept the verification offer.""" if self.we_started_it: raise LocalProtocolError( "Verification was started by us, can't " "accept offer." ) if self.state == SasState.canceled: raise LocalProtocolError( "SAS verification was canceled, can't " "accept offer." ) sas_methods = [] if "emoji" in self.short_auth_string: sas_methods.append("emoji") if "decimal" in self.short_auth_string: sas_methods.append("decimal") if self._mac_normal in self.mac_methods: self.chosen_mac_method = self._mac_normal else: self.chosen_mac_method = self._mac_old if Sas._key_agreement_v2 in self.key_agreement_protocols: self.chosen_key_agreement = Sas._key_agreement_v2 else: self.chosen_key_agreement = Sas._key_agreement_v1 content = { "transaction_id": self.transaction_id, "key_agreement_protocol": self.chosen_key_agreement, "hash": self._hash_v1, "message_authentication_code": self.chosen_mac_method, "short_authentication_string": sas_methods, "commitment": self.commitment, } message = ToDeviceMessage( "m.key.verification.accept", self.other_olm_device.user_id, self.other_olm_device.id, content, ) return message def share_key(self) -> ToDeviceMessage: """Create a dictionary containing our public key.""" if self.state == SasState.canceled: raise LocalProtocolError( "SAS verification was canceled, can't " "share our public key." ) content = {"transaction_id": self.transaction_id, "key": self.pubkey} message = ToDeviceMessage( "m.key.verification.key", self.other_olm_device.user_id, self.other_olm_device.id, content, ) return message def get_mac(self) -> ToDeviceMessage: """Create a dictionary containing our MAC.""" if not self.sas_accepted: raise LocalProtocolError("SAS string wasn't yet accepted") if self.state == SasState.canceled: raise LocalProtocolError( "SAS verification was canceled, can't " "generate MAC." ) key_id = f"ed25519:{self.own_device}" assert self.chosen_mac_method if self.chosen_mac_method == self._mac_normal: calculate_mac = self.calculate_mac elif self.chosen_mac_method == self._mac_old: calculate_mac = self.calculate_mac_long_kdf info = ( "MATRIX_KEY_VERIFICATION_MAC" f"{self.own_user}{self.own_device}" f"{self.other_olm_device.user_id}{self.other_olm_device.id}{self.transaction_id}" ) mac = {key_id: calculate_mac(self.own_fp_key, info + key_id)} content = { "mac": mac, "keys": calculate_mac(key_id, info + "KEY_IDS"), "transaction_id": self.transaction_id, } message = ToDeviceMessage( "m.key.verification.mac", self.other_olm_device.user_id, self.other_olm_device.id, content, ) return message def get_cancellation(self) -> ToDeviceMessage: """Create a dictionary containing our verification cancellation.""" if self.state != SasState.canceled: raise LocalProtocolError("Sas process isn't canceled.") assert self.cancel_code assert self.cancel_reason content = { "code": self.cancel_code, "reason": self.cancel_reason, "transaction_id": self.transaction_id, } message = ToDeviceMessage( "m.key.verification.cancel", self.other_olm_device.user_id, self.other_olm_device.id, content, ) return message def _event_ok(self, event: KeyVerificationEvent): if self.state == SasState.canceled: return False if event.transaction_id != self.transaction_id: self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._txid_error return False if self.other_olm_device.user_id != event.sender: self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._user_mismatch_error return False return True def receive_accept_event(self, event): """Receive a KeyVerificationAccept event.""" if not self._event_ok(event): return if self.state != SasState.created: self.state = SasState.canceled ( self.cancel_code, self.cancel_reason, ) = Sas._unexpected_message_error return if ( event.key_agreement_protocol not in Sas._key_agreeemnt_protocols or event.hash != Sas._hash_v1 or event.message_authentication_code not in Sas._mac_v1 or ( "emoji" not in event.short_authentication_string and "decimal" not in event.short_authentication_string ) ): self.state = SasState.canceled self.cancel_code, self.cancel_reason = Sas._unknown_method_error return self.commitment = event.commitment self.chosen_mac_method = event.message_authentication_code self.chosen_key_agreement = event.key_agreement_protocol self.short_auth_string = event.short_authentication_string self.state = SasState.accepted def receive_key_event(self, event): """Receive a KeyVerificationKey event.""" if self.other_key_set or ( (self.state != SasState.started) and (self.state != SasState.accepted) ): self.state = SasState.canceled ( self.cancel_code, self.cancel_reason, ) = self._unexpected_message_error return if not self._event_ok(event): return if self.we_started_it: if not self._check_commitment(event.key): self.state = SasState.canceled ( self.cancel_code, self.cancel_reason, ) = self._commitment_mismatch_error return self.set_their_pubkey(event.key) self.state = SasState.key_received def receive_mac_event(self, event): """Receive a KeyVerificationMac event. Args: event (KeyVerificationMac): The MAC event that was received for this SAS session. """ if self.verified: return if not self._event_ok(event): return if self.state != SasState.key_received: self.state = SasState.canceled ( self.cancel_code, self.cancel_reason, ) = Sas._unexpected_message_error return info = ( f"MATRIX_KEY_VERIFICATION_MAC{self.other_olm_device.user_id}{self.other_olm_device.id}" f"{self.own_user}{self.own_device}{self.transaction_id}" ) key_ids = ",".join(sorted(event.mac.keys())) assert self.chosen_mac_method if self.chosen_mac_method == self._mac_normal: calculate_mac = self.calculate_mac elif self.chosen_mac_method == self._mac_old: calculate_mac = self.calculate_mac_long_kdf if event.keys != calculate_mac(key_ids, info + "KEY_IDS"): self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._key_mismatch_error return for key_id, key_mac in event.mac.items(): try: key_type, device_id = key_id.split(":", 2) except ValueError: self.state = SasState.canceled ( self.cancel_code, self.cancel_reason, ) = self._invalid_message_error return if key_type != "ed25519": self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._key_mismatch_error return if device_id != self.other_olm_device.id: continue other_fp_key = self.other_olm_device.ed25519 if key_mac != calculate_mac(other_fp_key, info + key_id): self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._key_mismatch_error return self.verified_devices.append(device_id) if not self.verified_devices: self.state = SasState.canceled self.cancel_code, self.cancel_reason = self._key_mismatch_error self.state = SasState.mac_received matrix-nio-0.24.0/nio/crypto/sessions.py000066400000000000000000000151771455215747700202450ustar00rootroot00000000000000# Copyright © 2018 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations from datetime import datetime, timedelta from typing import List, Optional, Set, Tuple import olm from ..exceptions import EncryptionError class OlmAccount(olm.Account): def __init__(self) -> None: self.shared = False super().__init__() def __new__(cls, *args): return super().__new__(cls) @classmethod def from_pickle( cls, pickle: bytes, passphrase: str = "", shared: bool = False, ) -> OlmAccount: account = super().from_pickle(pickle, passphrase) account.shared = shared return account class _SessionExpirationMixin: @property def expired(self): return False class Session(olm.Session, _SessionExpirationMixin): def __init__(self): super().__init__() self.creation_time = datetime.now() self.use_time = datetime.now() def __new__(cls, *args): return super().__new__(cls, *args) @classmethod def from_pickle( cls, pickle: str, creation_time: datetime, passphrase: str = "", use_time: Optional[datetime] = None, ) -> Session: session = super().from_pickle(pickle, passphrase) session.creation_time = creation_time session.use_time = use_time or creation_time return session def decrypt(self, ciphertext, unicode_errors="replace"): self.use_time = datetime.now() return super().decrypt(ciphertext, unicode_errors) def encrypt(self, plaintext): self.use_time = datetime.now() return super().encrypt(plaintext) class InboundSession(olm.InboundSession, _SessionExpirationMixin): def __new__(cls, *args): return super().__new__(cls, *args) def __init__(self, account, message, identity_key=None): super().__init__(account, message, identity_key) self.creation_time = datetime.now() self.use_time = datetime.now() def decrypt(self, ciphertext, unicode_errors="replace"): self.use_time = datetime.now() return super().decrypt(ciphertext, unicode_errors) def encrypt(self, plaintext): self.use_time = datetime.now() return super().encrypt(plaintext) class OutboundSession(olm.OutboundSession, _SessionExpirationMixin): def __new__(cls, *args): return super().__new__(cls, *args) def __init__(self, account, identity_key, one_time_key): super().__init__(account, identity_key, one_time_key) self.creation_time = datetime.now() self.use_time = datetime.now() def decrypt(self, ciphertext, unicode_errors="replace"): self.use_time = datetime.now() return super().decrypt(ciphertext, unicode_errors) def encrypt(self, plaintext): self.use_time = datetime.now() return super().encrypt(plaintext) class InboundGroupSession(olm.InboundGroupSession): def __init__( self, session_key: str, signing_key: str, sender_key: str, room_id: str, forwarding_chains: Optional[List[str]] = None, ) -> None: self.ed25519 = signing_key self.sender_key = sender_key self.room_id = room_id self.forwarding_chain: List[str] = forwarding_chains or [] super().__init__(session_key) def __new__(cls, *args): return super().__new__(cls) @classmethod def from_pickle( cls, pickle: bytes, signing_key: str, sender_key: str, room_id: str, passphrase: str = "", forwarding_chain: Optional[List[str]] = None, ) -> InboundGroupSession: session = super().from_pickle(pickle, passphrase) session.ed25519 = signing_key session.sender_key = sender_key session.room_id = room_id session.forwarding_chain = forwarding_chain or [] return session @classmethod def import_session( cls, session_key: str, signing_key: str, sender_key: str, room_id: str, forwarding_chain: Optional[List[str]] = None, ): session = super().import_session(session_key) session.ed25519 = signing_key session.sender_key = sender_key session.room_id = room_id session.forwarding_chain = forwarding_chain or [] return session class OutboundGroupSession(olm.OutboundGroupSession): """Outbound group session aware of the users it is shared with. Also remembers the time it was created and the number of messages it has encrypted, in order to know if it needs to be rotated. Attributes: creation_time (datetime.datetime): Creation time of the session. message_count (int): Number of messages encrypted using the session. """ def __init__(self): self.max_age = timedelta(days=7) self.max_messages = 100 self.creation_time = datetime.now() self.message_count = 0 self.users_shared_with: Set[Tuple[str, str]] = set() self.users_ignored: Set[Tuple[str, str]] = set() self.shared = False super().__init__() def __new__(cls, **kwargs): return super().__new__(cls) def mark_as_shared(self): self.shared = True @property def expired(self): return self.should_rotate() def should_rotate(self): """Should the session be rotated? Returns: True if it should, False if not. """ if ( self.message_count >= self.max_messages or datetime.now() - self.creation_time >= self.max_age ): return True return False def encrypt(self, plaintext): if not self.shared: raise EncryptionError("Error, session is not shared") if self.expired: raise EncryptionError("Error, session is has expired") self.message_count += 1 return super().encrypt(plaintext) matrix-nio-0.24.0/nio/event_builders/000077500000000000000000000000001455215747700175045ustar00rootroot00000000000000matrix-nio-0.24.0/nio/event_builders/__init__.py000066400000000000000000000006031455215747700216140ustar00rootroot00000000000000"""Nio Event Builders Module. This module provides classes to easily create event dictionaries that can be used with the clients's ``room_send()`` method, or ``room_create()``'s ``initial_state`` argument. It also provides classes for some direct events such as to-device messages. """ from .direct_messages import * from .event_builder import EventBuilder from .state_events import * matrix-nio-0.24.0/nio/event_builders/direct_messages.py000066400000000000000000000045401455215747700232220ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """Matrix direct messages module. This module contains classes that can be used to send direct events to a Matrix homeserver. """ from dataclasses import dataclass, field from typing import Dict from .event_builder import EventBuilder @dataclass class ToDeviceMessage(EventBuilder): """A to-device message that can be sent to the homeserver. Attributes: type (str): The type of the message. recipient (str): The user to whom we should sent this message. recipient_device (str): The device id of the device that the message should be sent to. content (Dict[Any, Any]): The content that should be sent to the user. """ type: str = field() recipient: str = field() recipient_device: str = field() content: Dict = field() def as_dict(self): return {"messages": {self.recipient: {self.recipient_device: self.content}}} @dataclass class DummyMessage(ToDeviceMessage): """A dummy to-device mssage that is sent to restart a Olm session.""" pass @dataclass class RoomKeyRequestMessage(ToDeviceMessage): """A to-device message that requests room keys from other devices. Attributes: request_id (str): The unique request id that identifies this key request. session_id (str): The session id that uniquely identifies the room key. room_id (str): The room id of the room that the key belongs to. algorithm (str): The algorithm of the room key. """ request_id: str = field() session_id: str = field() room_id: str = field() algorithm: str = field() matrix-nio-0.24.0/nio/event_builders/event_builder.py000066400000000000000000000021071455215747700227050ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # Copyright © 2019 miruka # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # TODO: use abc.ABC when we drop py2 class EventBuilder: """The base class for event builders, should not be instantiated.""" def as_dict(self): """Format the event as a dictionary, to be sent to the server.""" raise NotImplementedError matrix-nio-0.24.0/nio/event_builders/state_events.py000066400000000000000000000126411455215747700225660ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # Copyright © 2019 miruka # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """Matrix state events module. This module contains classes that can be used to easily create room state event dicts. For example, to turn on encryption in a room with the ``HttpClient`` or ``AsyncClient``, the ``EnableEncryptionBuilder`` class can be used: >>> event_dict = EnableEncryptionBuilder().as_dict() >>> client.room_send( ... room_id = "!test:example.com", ... message_type = event_dict["type"], ... content = event_dict["content"], ... ) """ from dataclasses import dataclass, field from . import EventBuilder @dataclass class EnableEncryptionBuilder(EventBuilder): """A state event sent to enable encryption in a room. Attributes: algorithm (str): The algorithm to use for encrypting messages. The default ``m.megolm.v1.aes-sha2`` should not be changed. rotation_ms (int): How long in milliseconds an encrypted session should be used before changing it. The default ``604800000`` (a week) is recommended. rotation_msgs (int): How many messages can be received in a room before changing the encrypted session. The default ``100`` is recommended. """ algorithm: str = "m.megolm.v1.aes-sha2" rotation_ms: int = 604800000 rotation_msgs: int = 100 def as_dict(self): return { "type": "m.room.encryption", "state_key": "", "content": { "algorithm": self.algorithm, "rotation_period_ms": self.rotation_ms, "rotation_period_msgs": self.rotation_msgs, }, } @dataclass class ChangeNameBuilder(EventBuilder): """A state event sent to change a room's name. Attributes: name (str): The name to set. Must not exceed 255 characters. Can be empty to remove the room's name. """ name: str = field() def __post_init__(self): if len(self.name) > 255: raise ValueError( f"Room name exceeds 255 characters: {self.name}", ) def as_dict(self): return { "type": "m.room.name", "state_key": "", "content": {"name": self.name}, } @dataclass class ChangeTopicBuilder(EventBuilder): """A state event sent to change a room's topic. Attributes: topic (str): The topic to set. Can be empty to remove the room's topic. """ topic: str = field() def as_dict(self): return { "type": "m.room.topic", "state_key": "", "content": {"topic": self.topic}, } @dataclass class ChangeJoinRulesBuilder(EventBuilder): """A state event sent to change who can join a room. Attributes: rule (str): Can be ``public``, meaning any user can join; or ``invite``, meaning users must be invited to join the room. The matrix specification also reserves ``knock`` and ``private`` rules, which are currently not implemented. """ rule: str = field() def as_dict(self): return { "type": "m.room.join_rules", "state_key": "", "content": {"join_rule": self.rule}, } @dataclass class ChangeGuestAccessBuilder(EventBuilder): """A state event sent to allow or forbid guest accounts in a room. Attributes: access (str): Whether guests can join the room. Can be ``can_join`` or ``forbidden``. """ access: str = field() def as_dict(self): return { "type": "m.room.guest_access", "state_key": "", "content": {"guest_access": self.access}, } @dataclass class ChangeHistoryVisibilityBuilder(EventBuilder): """A state event sent to set what can users see from the room history. Attributes: visibility (str): Can be: - ``invited``: users can't see events that happened before they were invited to the room - ``joined``: users can't see events that happened before they joined or accepted an invitation to the room. - ``shared``: users that joined the room can see the entire room's history - ``world_readable``: anyone can see the entire room's history, including users that aren't part of the room. """ visibility: str = field() def as_dict(self): return { "type": "m.room.history_visibility", "state_key": "", "content": {"history_visibility": self.visibility}, } # TODO: power_levels, canonical_alias, avatar, pinned_events matrix-nio-0.24.0/nio/events/000077500000000000000000000000001455215747700157765ustar00rootroot00000000000000matrix-nio-0.24.0/nio/events/__init__.py000066400000000000000000000007451455215747700201150ustar00rootroot00000000000000"""Nio Events Module. The model of conversation history exposed by a Matrix server can be considered as a list of events. The server 'linearises' the eventually-consistent event graph of events into an 'event stream' at any given point in time: Nio contains clases for most known Matrix Event types. """ from .account_data import * from .ephemeral import * from .invite_events import * from .misc import * from .presence import * from .room_events import * from .to_device import * matrix-nio-0.24.0/nio/events/account_data.py000066400000000000000000000446361455215747700210120ustar00rootroot00000000000000# Copyright © 2018-2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """nio Account data events. Clients can store custom config data for their account on their homeserver. This account data will be synced between different devices and can persist across installations on a particular device. """ from __future__ import annotations import re from dataclasses import dataclass, field from fnmatch import fnmatchcase from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union from ..api import PushRuleKind from ..schemas import Schemas from .misc import verify, verify_or_none from .room_events import Event if TYPE_CHECKING: from ..rooms import MatrixRoom @dataclass class AccountDataEvent: """Abstract class for account data events.""" @classmethod @verify(Schemas.account_data) def parse_event( cls, event_dict: Dict[Any, Any], ): if event_dict["type"] == "m.fully_read": return FullyReadEvent.from_dict(event_dict) elif event_dict["type"] == "m.tag": return TagEvent.from_dict(event_dict) elif event_dict["type"] == "m.push_rules": return PushRulesEvent.from_dict(event_dict) return UnknownAccountDataEvent.from_dict(event_dict) @dataclass class FullyReadEvent(AccountDataEvent): """Read marker location event. The current location of the user's read marker in a room. This event appears in the user's room account data for the room the marker is applicable for. Attributes: event_id (str): The event id the user's read marker is located at in the room. """ event_id: str = field() @classmethod @verify(Schemas.fully_read) def from_dict(cls, event_dict): """Construct a FullyReadEvent from a dictionary.""" content = event_dict.pop("content") return cls( content["event_id"], ) @dataclass class TagEvent(AccountDataEvent): """Event representing the tags of a room. Room tags may include: - m.favourite for favourite rooms - m.lowpriority for low priority room A tag may have an order between 0 and 1, indicating the room's position towards other rooms with the same tag. Attributes: tags (Dict[str, Optional[Dict[str, float]]]): The tags of the room and their contents. """ tags: Dict[str, Optional[Dict[str, float]]] = field() @classmethod @verify(Schemas.tags) def from_dict(cls, event_dict): """Construct a TagEvent from a dictionary.""" content = event_dict.pop("content") return cls(content["tags"]) @dataclass class PushCondition: """A condition for a push rule to match an event.""" @classmethod def from_dict(cls, condition: Dict[str, Any]) -> PushCondition: cnd = condition if cnd["kind"] == "event_match" and "key" in cnd and "pattern" in cnd: return PushEventMatch(cnd["key"], cnd["pattern"]) if cnd["kind"] == "contains_display_name": return PushContainsDisplayName() if cnd["kind"] == "room_member_count": return PushRoomMemberCount.from_dict(cnd) if cnd["kind"] == "sender_notification_permission" and "key" in cnd: return PushSenderNotificationPermission(cnd["key"]) return PushUnknownCondition(cnd) @property def as_value(self) -> Dict[str, Any]: raise NotImplementedError def matches( self, event: Event, room: MatrixRoom, display_name: str, ) -> bool: """Return whether this condition holds true for a room event. Args: event (Event): The room event to check the condition for. room (MatrixRoom): The room that this event is part of. display_name (str): The display name of our own user in the room. """ return False @dataclass class PushEventMatch(PushCondition): """Require a field of the event to match a glob-style pattern. Attributes: key (str): The dot-separated field of the event to match, e.g. ``"type"`` or ``"content.body"``. pattern (str): Glob-style pattern to match the field's value against. Patterns with no special glob characters should be treated as starting and ending with an asterisk. """ key: str = field() pattern: str = field() @property def as_value(self) -> Dict[str, Any]: return { "kind": "event_match", "key": self.key, "pattern": self.pattern, } def matches( self, event: Event, room: MatrixRoom, display_name: str, ) -> bool: if self.key == "room_id": return fnmatchcase(room.room_id, self.pattern) value = event.flattened().get(self.key) if not isinstance(value, str): return False if self.key == "content.body": pattern = f"*[!a-z0-9]{self.pattern.lower()}[!a-z0-9]*" return fnmatchcase(f" {value.lower()} ", pattern) return fnmatchcase(value.lower(), self.pattern.lower()) @dataclass class PushContainsDisplayName(PushCondition): """Require a message's ``content.body`` to contain our display name. This rule can only match unencrypted messages. """ @property def as_value(self) -> Dict[str, Any]: return {"kind": "contains_display_name"} def matches( self, event: Event, room: MatrixRoom, display_name: str, ) -> bool: body = event.source.get("content", {}).get("body") if not isinstance(body, str): return False pattern = rf"(^|\W){re.escape(display_name)}(\W|$)" return bool(re.match(pattern, body, re.IGNORECASE)) @dataclass class PushRoomMemberCount(PushCondition): """Require a certain member count for the room the event is posted in. Attributes: count (int): A number of members operator (str): Whether the room's member count should be equal (``"=="``) to ``count``, inferior (``"<"``), superior (``">"``), inferior or equal (``"<="``), or superior or equal (``">="``). """ count: int = field() operator: str = "==" @classmethod def from_dict(cls, condition: Dict[str, Any]) -> PushRoomMemberCount: op, num = re.findall(r"(==|<|>|<=|>=)?([0-9.-]+)", condition["is"])[0] return cls(int(num), op or "==") @property def as_value(self) -> Dict[str, Any]: operator = "" if self.operator == "==" else self.operator return {"kind": "room_member_count", "is": f"{operator}{self.count}"} def matches( self, event: Event, room: MatrixRoom, display_name: str, ) -> bool: if self.operator == "==": return room.joined_count == self.count elif self.operator == "<": return room.joined_count < self.count elif self.operator == ">": return room.joined_count > self.count elif self.operator == "<=": return room.joined_count <= self.count else: return room.joined_count >= self.count @dataclass class PushSenderNotificationPermission(PushCondition): """Require the event's sender to have a high enough power level. Attributes: key (str): Which key from the ``notifications`` dict in power levels event (https://matrix.org/docs/spec/client_server/latest#m-room-power-levels) should be referred to as the required level for the event's sender, e.g. ``room``. """ key: str = field() @property def as_value(self) -> Dict[str, Any]: return { "kind": "sender_notification_permission", "key": self.key, } def matches( self, event: Event, room: MatrixRoom, display_name: str, ) -> bool: return room.power_levels.can_user_notify(event.sender, self.key) @dataclass class PushUnknownCondition(PushCondition): """An unknown kind of push rule condition. Attributes: condition (Dict[str, Any]): The condition as a dict from the source event. """ condition: Dict[str, Any] = field() @property def as_value(self) -> Dict[str, Any]: return self.condition @dataclass class PushAction: """An action to apply for a push rule when matching.""" @classmethod def from_dict(cls, action: Union[str, Dict[str, Any]]) -> PushAction: # isinstance() to make mypy happy if isinstance(action, str) and action == "notify": return PushNotify() if isinstance(action, str) and action == "dont_notify": return PushDontNotify() if isinstance(action, str) and action == "coalesce": return PushCoalesce() if isinstance(action, dict) and "set_tweak" in action: value = action.get("value") if action["set_tweak"] == "sound" and value is None: value = "default" if action["set_tweak"] == "highlight" and value is None: value = True return PushSetTweak(action["set_tweak"], value) return PushUnknownAction(action) @property def as_value(self) -> Union[str, Dict[str, Any]]: raise NotImplementedError @dataclass class PushNotify(PushAction): """Cause the matching event to generate a notification.""" @property def as_value(self) -> str: return "notify" @dataclass class PushDontNotify(PushAction): """Prevents the matching event from generating a notification.""" @property def as_value(self) -> str: return "dont_notify" @dataclass class PushCoalesce(PushAction): """Causes multiple matching events to be joined into a single notification. The behavior is homeserver-dependent. Homeservers not supporting this action should treat it as a ``PushNotify`` action. """ @property def as_value(self) -> str: return "coalesce" @dataclass class PushSetTweak(PushAction): """Set a particular tweak for the notification. These tweaks are defined by the Matrix specification: - ``sound``: The sound to be played when the notification arrives, e.g. a file path. A ``value`` of ``"default"`` means to play the client's default sound. A device may choose to alert the user by some other means if appropriate, e.g. vibration. - ``highlight``: Whether this message should be highlighted in the UI. This typically takes the form of presenting the message with a different color or style. The UI might also be adjusted to draw particular attention to the room in which the event occurred. Attributes: tweak (str): The name of the tweak to set value (Any): The tweak's value. """ tweak: str = field() value: Any = None @property def as_value(self) -> Dict[str, Any]: return {"set_tweak": self.tweak, "value": self.value} @dataclass class PushUnknownAction(PushAction): """An unknown kind of push rule action. Attributes: action (Union[str, Dict[str, Any]]): The action as a string or dict from the source event. """ action: Union[str, Dict[str, Any]] = field() @property def as_value(self) -> Union[str, Dict[str, Any]]: return self.action @dataclass class PushRule: """Rule stating how to notify the user for events matching some conditions. Attributes: kind (PushRuleKind): The kind of rule this is. id (str): A unique (within its ruleset) string identifying this rule. The ``id`` for default rules set by the server starts with a ``.``. For rules of ``room`` kind, this will be the room ID to match for. For rules of ``sender`` kind, this will be the user ID to match. default (bool): Whether this is a default rule set by the server, or one that the user created explicitly. enabled (bool): Whether this rule is currently enabled, or disabled and to be ignored. pattern (str): Only applies to ``content`` rules. The glob-style pattern to match message text against. conditions (List[PushCondition]): Only applies to ``override`` and ``underride`` rules. The conditions that must be true for an event in order for this rule to be applied to it. A rule with no condition always matches. actions (List[PushAction]): The actions to perform when this rule matches. """ kind: PushRuleKind = field() id: str = field() default: bool = field() enabled: bool = True pattern: str = "" conditions: List[PushCondition] = field(default_factory=list) actions: List[PushAction] = field(default_factory=list) def matches( self, event: Event, room: MatrixRoom, display_name: str, ) -> bool: """Return whether this push rule matches a room event. Args: event (Event): The room event to match. room (MatrixRoom): The room that this event is part of. display_name (str): The display name of our own user in the room. """ if not self.enabled: return False conditions = self.conditions if self.kind == PushRuleKind.content: conditions = [PushEventMatch("content.body", self.pattern)] elif self.kind == PushRuleKind.room: conditions = [PushEventMatch("room_id", self.id)] elif self.kind == PushRuleKind.sender: conditions = [PushEventMatch("sender", self.id)] return all(c.matches(event, room, display_name) for c in conditions) @classmethod @verify_or_none(Schemas.push_rule) def from_dict(cls, rule: Dict[str, Any], kind: PushRuleKind) -> PushRule: return cls( kind, rule["rule_id"], rule["default"], rule["enabled"], rule.get("pattern", ""), [PushCondition.from_dict(c) for c in rule.get("conditions", [])], [PushAction.from_dict(a) for a in rule.get("actions", [])], ) @dataclass class PushRuleset: """A set of different kinds of push rules under a same scope. Attributes: override (List[PushRule]): Highest priority rules content (List[PushRule]): Rules that configure behaviors for messages with text matching certain patterns. room (List[PushRule]): Rules that configure behaviors for all messages in a certain room. Their ``id`` is the room's ID. sender (List[PushRule]): Rules that configure behaviors for all messages sent by a specific user. Their ``id`` is the user's ID. underride (List[PushRule]): Identical the ``override`` rules, but have a lower priority than ``content``, ``room`` and ``sender`` rules. """ override: List[PushRule] = field(default_factory=list) content: List[PushRule] = field(default_factory=list) room: List[PushRule] = field(default_factory=list) sender: List[PushRule] = field(default_factory=list) underride: List[PushRule] = field(default_factory=list) def matching_rule( self, event: Event, room: MatrixRoom, display_name: str, ) -> Optional[PushRule]: """Return the push rule in this set that matches a room event, if any. Args: event (Event): The room event to match. room (MatrixRoom): The room that this event is part of. display_name (str): The display name of our own user in the room. """ for kind in PushRuleKind: for rule in getattr(self, kind.value): if rule.matches(event, room, display_name): return rule return None @classmethod @verify_or_none(Schemas.push_ruleset) def from_dict(cls, ruleset: Dict[str, Any]) -> PushRuleset: kwargs = {} for kind in PushRuleKind: rules = [ PushRule.from_dict(rule_dict, kind) for rule_dict in ruleset.get(kind.value, []) ] # PushRule.from_dict returns None if the schema verification fails kwargs[kind.value] = [r for r in rules if r] return cls(**kwargs) def __bool__(self) -> bool: return bool( self.override or self.content or self.room or self.sender or self.underride, ) @dataclass class PushRulesEvent(AccountDataEvent): """Configured push rule sets for an account. Each set belongs to a scope. Attributes: global_rules (PushRuleset): Rulesets applying to all devices device_rules (PushRuleset): Rulesets applying to current device only """ global_rules: PushRuleset = field(default_factory=PushRuleset) device_rules: PushRuleset = field(default_factory=PushRuleset) @classmethod @verify(Schemas.push_rules) def from_dict(cls, event: Dict[str, Any]) -> PushRulesEvent: content = event["content"] return cls( PushRuleset.from_dict(content.get("global", {})) or PushRuleset(), PushRuleset.from_dict(content.get("device", {})) or PushRuleset(), ) def __bool__(self) -> bool: return bool(self.global_rules or self.device_rules) @dataclass class UnknownAccountDataEvent(AccountDataEvent): """Account data event of an unknown type. Attributes: type (str): The type of the event. content (Dict): The content of the event. """ type: str = field() content: Dict[str, Any] = field() @classmethod def from_dict(cls, event_dict): """Construct an UnknownAccountDataEvent from a dictionary.""" content = event_dict.pop("content") return cls(event_dict["type"], content) matrix-nio-0.24.0/nio/events/common.py000066400000000000000000000112061455215747700176400ustar00rootroot00000000000000# Copyright © 2020 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """nio common event type mixins This module contains mixin classes for events that can be found in the to-device part of a sync response or in a room timeline of a sync response. """ from dataclasses import dataclass, field from typing import Dict, List @dataclass class KeyVerificationEventMixin: """Base class for key verification events. Attributes: transaction_id (str): An opaque identifier for the verification process. Must be unique with respect to the devices involved. """ transaction_id: str = field() @dataclass class KeyVerificationStartMixin: """Event signaling the start of a SAS key verification process. Attributes: from_device (str): The device ID which is initiating the process. method (str): The verification method to use. key_agreement_protocols (list): A list of strings specifying the key agreement protocols the sending device understands. hashes (list): A list of strings specifying the hash methods the sending device understands. message_authentication_codes (list): A list of strings specifying the message authentication codes that the sending device understands. short_authentication_string (list): A list of strings specifying the SAS methods the sending device (and the sending device's user) understands. """ from_device: str = field() method: str = field() key_agreement_protocols: List[str] = field() hashes: List[str] = field() message_authentication_codes: List[str] = field() short_authentication_string: List[str] = field() @dataclass class KeyVerificationAcceptMixin: """Event signaling that the SAS verification start has been accepted. Attributes: commitment (str): The commitment value of the verification process. key_agreement_protocol (str): The key agreement protocol the device is choosing to use hash (str): A list of strings specifying the hash methods the sending device understands. message_authentication_code (str): The message authentication code the device is choosing to use. short_authentication_string (list): A list of strings specifying the SAS methods that can be used in the verification process. """ commitment: str = field() key_agreement_protocol: str = field() hash: str = field() message_authentication_code: str = field() short_authentication_string: List[str] = field() @dataclass class KeyVerificationKeyMixin: """Event carrying a key verification key. After this event is received the short authentication string can be shown to the user. Attributes: key (str): The device's ephemeral public key, encoded as unpadded base64. """ key: str = field() @dataclass class KeyVerificationMacMixin: """Event holding a message authentication code of the verification process. After this event is received the device that we are verifying will be marked as verified given that we have accepted the short authentication string as well. Attributes: mac (dict): A map of the key ID to the MAC of the key, using the algorithm in the verification process. The MAC is encoded as unpadded base64. keys (str): The MAC of the comma-separated, sorted, list of key IDs given in the mac property, encoded as unpadded base64. """ mac: Dict[str, str] = field() keys: str = field() @dataclass class KeyVerificationCancelMixin: """Event signaling that a key verification process has been canceled. Attributes: code (str): The error code for why the process/request was canceled by the user. reason (str): A human readable description of the cancellation code. """ code: str = field() reason: str = field() matrix-nio-0.24.0/nio/events/ephemeral.py000066400000000000000000000110221455215747700203060ustar00rootroot00000000000000# Copyright © 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """nio Ephemeral events. Ephemeral events are a special type of events that are not recorded in the room history. Ephemeral events are used for typing notifications and read receipts. """ from __future__ import annotations from dataclasses import dataclass, field from typing import List from ..schemas import Schemas from .misc import verify_or_none @dataclass class EphemeralEvent: """Base class for ephemeral events.""" @classmethod @verify_or_none(Schemas.ephemeral_event) def parse_event(cls, event_dict): """Parse an ephemeral event and create a higher level event object. This function parses the type of the ephemeral event and produces a higher level event object representing the parsed event. The event structure is checked for correctness and the event fields are type-checked. If this validation process fails for an event None will be returned. If the event has an unknown type None is returned as well. Args: event_dict (dict): The dictionary representation of the event. """ if event_dict["type"] == "m.typing": return TypingNoticeEvent.from_dict(event_dict) if event_dict["type"] == "m.receipt": return ReceiptEvent.from_dict(event_dict) return None @classmethod def from_dict(cls, parsed_dict): """Create an Ephemeral event from a dictionary. Args: parsed_dict (dict): The dictionary representation of the event. """ raise NotImplementedError @dataclass class TypingNoticeEvent(EphemeralEvent): """Informs the client of the list of users currently typing in a room. Attributes: users (List): The list of user IDs typing in this room, if any. """ users: List = field() @classmethod @verify_or_none(Schemas.m_typing) def from_dict(cls, parsed_dict): return cls(parsed_dict["content"]["user_ids"]) @dataclass class Receipt: """Receipt of a user acknowledging an event. If `receipt_type` is "m.read", then it is a read receipt and shows the last event that a user has read. Attributes: event_id (str): the ID of the event being acknowledged receipt_type (str): the type of receipt being received; this is commonly "m.read" for read receipts. user_id (str): the ID of the user who is acknowledging the event. timestamp (int): The timestamp the receipt was sent at. """ event_id: str = field() receipt_type: str = field() user_id: str = field() timestamp: int = field() @dataclass class ReceiptEvent(EphemeralEvent): """Informs the client of changes in the newest events seen by users. A ReceiptEvent can contain multiple event_ids seen by many different users. At the time of writing, all Receipts have a `receipt_type` of "m.read" and are read receipts, but this may change in the future. Attributes: receipts (List[Receipt]): The list of `Receipt`s in this event. """ receipts: List[Receipt] = field() @classmethod @verify_or_none(Schemas.m_receipt) def from_dict(cls, parsed_dict) -> ReceiptEvent: event_receipts: List[Receipt] = [] for event_id, event in parsed_dict["content"].items(): for receipt_type, receipt in event.items(): for user_id, user in receipt.items(): # Synapse pre-0.99.3 has a bug where it sends invalid # ts values. https://github.com/matrix-org/synapse/issues/4898 if isinstance(user, dict) and "ts" in user: event_receipts.append( Receipt(event_id, receipt_type, user_id, user["ts"]) ) return cls(event_receipts) matrix-nio-0.24.0/nio/events/invite_events.py000066400000000000000000000147351455215747700212440ustar00rootroot00000000000000# Copyright © 2018-2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """Matrix Invite Events. Events for invited rooms will have a stripped down version of their counterparts for joined rooms. Such events will be missing the event id and origin server timestamp. Since all of the events in an invited room will be state events they will never be encrypted. These events help set up the state of an invited room so more information can be displayed to users if they are invited to a room. """ from __future__ import annotations from dataclasses import dataclass, field from typing import Any, Dict, Optional, Union from ..schemas import Schemas from .misc import BadEventType, verify, verify_or_none @dataclass class InviteEvent: """Matrix Event class for events in invited rooms. Events for invited rooms will have a stripped down version of their counterparts for joined rooms. Such events will be missing the event id and origin server timestamp. Since all of the events in an invited room will be state events they will never be encrypted. Attributes: source (dict): The source dictionary of the event. This allows access to all the event fields in a non-secure way. sender (str): The fully-qualified ID of the user who sent this event. """ source: Dict = field() sender: str = field() @classmethod @verify_or_none(Schemas.invite_event) def parse_event( cls, event_dict: Dict[Any, Any] ) -> Optional[Union[InviteEvent, BadEventType]]: """Parse a Matrix invite event and create a higher level event object. This function parses the type of the Matrix event and produces a higher level event object representing the parsed event. The event structure is checked for correctness and the event fields are type-checked. If this validation process fails for an event None will be returned. Args: event_dict (dict): The dictionary representation of the event. """ if "unsigned" in event_dict: if "redacted_because" in event_dict["unsigned"]: return None if event_dict["type"] == "m.room.member": return InviteMemberEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.canonical_alias": return InviteAliasEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.name": return InviteNameEvent.from_dict(event_dict) return None @classmethod def from_dict(cls, parsed_dict): """Create an InviteEvent from a dictionary. Args: parsed_dict (dict): The dictionary representation of the event. """ raise NotImplementedError @dataclass class InviteMemberEvent(InviteEvent): """Class representing to an m.room.member event in an invited room. Attributes: state_key (str): The user_id this membership event relates to. In all cases except for when membership is join, the user ID in the sender attribute does not need to match the user ID in the state_key. membership (str): The membership state of the user. One of "invite", "join", "leave", "ban". prev_membership (str, optional): The previous membership state that this one is overwriting. Can be None in which case the membership state is assumed to have been "leave". content (dict): The content of the of the membership event. prev_content(dict, optional): The content of a previous membership event that this one is overwriting. """ state_key: str = field() membership: str = field() prev_membership: str = field() content: dict = field() prev_content: dict = field(default_factory=dict) @classmethod @verify(Schemas.room_membership) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[InviteMemberEvent, BadEventType]: content = parsed_dict.pop("content") unsigned = parsed_dict.get("unsigned", {}) prev_content = unsigned.get("prev_content", None) membership = content["membership"] prev_membership = prev_content.get("membership") if prev_content else None return cls( parsed_dict, parsed_dict["sender"], parsed_dict["state_key"], membership, prev_membership, content, prev_content, ) @dataclass class InviteAliasEvent(InviteEvent): """An event informing us about which alias should be preferred. This is the RoomAliasEvent equivalent for invited rooms. Attributes: canonical_alias (str): The alias that is considered canonical. """ canonical_alias: str = field() @classmethod @verify(Schemas.room_canonical_alias) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[InviteAliasEvent, BadEventType]: sender = parsed_dict["sender"] canonical_alias = parsed_dict["content"].get("alias") return cls(parsed_dict, sender, canonical_alias) @dataclass class InviteNameEvent(InviteEvent): """Event holding the name of the invited room. This is the RoomNameEvent equivalent for invited rooms. The room name is a human-friendly string designed to be displayed to the end-user. The room name is not unique, as multiple rooms can have the same room name set. Attributes: name (str): The name of the room. """ name: str = field() @classmethod @verify(Schemas.room_name) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[InviteNameEvent, BadEventType]: sender = parsed_dict["sender"] canonical_alias = parsed_dict["content"]["name"] return cls(parsed_dict, sender, canonical_alias) matrix-nio-0.24.0/nio/events/misc.py000066400000000000000000000143561455215747700173140ustar00rootroot00000000000000from __future__ import annotations import logging from dataclasses import dataclass, field from functools import wraps from typing import Any, Dict, Optional, Union from jsonschema.exceptions import SchemaError, ValidationError from ..schemas import validate_json # Copyright © 2018-2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. logger = logging.getLogger(__name__) def validate_or_badevent( parsed_dict: Dict[Any, Any], schema: Dict[Any, Any], ) -> Optional[Union[BadEvent, UnknownBadEvent]]: try: validate_json(parsed_dict, schema) except (ValidationError, SchemaError) as e: logger.warning(f"Error validating event: {str(e)}") try: return BadEvent.from_dict(parsed_dict) except KeyError: return UnknownBadEvent(parsed_dict) return None def verify(schema): def decorator(f): @wraps(f) def wrapper(*args, **kwargs): event_dict = args[1] bad = validate_or_badevent(event_dict, schema) if bad: return bad return f(*args, **kwargs) return wrapper return decorator def verify_or_none(schema): def decorator(f): @wraps(f) def wrapper(*args, **kwargs): event_dict = args[1] try: validate_json(event_dict, schema) except (ValidationError, SchemaError) as e: logger.error(f"Error validating event: {str(e)}") return None return f(*args, **kwargs) return wrapper return decorator @dataclass class UnknownBadEvent: """An event that doesn't have the minimal necessary structure. This type of event will be created if we can't find the event_id, sender, origin server timestamp or event type. The event can still be inspected with the source attribute. Attributes: source (dict): The source dictionary of the event. This allows access to all the event fields in a non-secure way. decrypted (bool): A flag signaling if the event was decrypted. verified (bool): A flag signaling if the event is verified, is True if the event was sent from a verified device. sender_key (str, optional): The public key of the sender that was used to establish the encrypted session. Is only set if decrypted is True, otherwise None. session_id (str, optional): The unique identifier of the session that was used to decrypt the message. Is only set if decrypted is True, otherwise None. transaction_id (str, optional): The unique identifier that was used when the message was sent. Is only set if the message was sent from our own device, otherwise None. """ source: Dict[str, Any] = field() transaction_id: Optional[str] = None decrypted: bool = field(default=False, init=False) verified: bool = field(default=False, init=False) sender_key: Optional[str] = field(default=None, init=False) session_id: Optional[str] = field(default=None, init=False) @dataclass class BadEvent: """An event that failed event schema and type validation. This type of event will be created if the event has a valid core structure but failed validation for the given event type. The event can still be inspected with the source attribute. Attributes: source (dict): The source dictionary of the event. This allows access to all the event fields in a non-secure way. event_id (str): A globally unique event identifier. sender (str): The fully-qualified ID of the user who sent this event. server_timestamp (int): Timestamp in milliseconds on originating homeserver when this event was sent. type (str): The claimed type of the event. decrypted (bool): A flag signaling if the event was decrypted. verified (bool): A flag signaling if the event is verified, is True if the event was sent from a verified device. sender_key (str, optional): The public key of the sender that was used to establish the encrypted session. Is only set if decrypted is True, otherwise None. session_id (str, optional): The unique identifier of the session that was used to decrypt the message. Is only set if decrypted is True, otherwise None. transaction_id (str, optional): The unique identifier that was used when the message was sent. Is only set if the message was sent from our own device, otherwise None. """ source: Dict[str, Any] = field() event_id: str = field() sender: str = field() server_timestamp: int = field() type: str = field() decrypted: bool = field(default=False, init=False) verified: bool = field(default=False, init=False) sender_key: Optional[str] = field(default=None, init=False) session_id: Optional[str] = field(default=None, init=False) transaction_id: Optional[str] = field(default=None, init=False) def __str__(self): return f"Bad event of type {self.type}, from {self.sender}." @classmethod def from_dict(cls, parsed_dict: Dict[Any, Any]) -> BadEvent: timestamp = parsed_dict["origin_server_ts"] timestamp = timestamp if timestamp > 0 else 0 return cls( parsed_dict, parsed_dict["event_id"], parsed_dict["sender"], timestamp, parsed_dict["type"], ) BadEventType = Union[BadEvent, UnknownBadEvent] matrix-nio-0.24.0/nio/events/presence.py000066400000000000000000000037711455215747700201640ustar00rootroot00000000000000import logging from dataclasses import dataclass, field from typing import Optional from ..schemas import Schemas from .misc import verify # Copyright © 2018-2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. logger = logging.getLogger(__name__) @dataclass class PresenceEvent: """Informs the client of a user's presence state change.""" user_id: str = field() presence: str = field() last_active_ago: Optional[int] = None currently_active: Optional[bool] = None status_msg: Optional[str] = None @classmethod @verify(Schemas.presence) def from_dict(cls, parsed_dict): """Create an Presence event from a dictionary. Args: parsed_dict (dict): The dictionary representation of the event. """ args = { "user_id": parsed_dict["sender"], "presence": parsed_dict["content"]["presence"], } if "last_active_ago" in parsed_dict["content"]: args["last_active_ago"] = parsed_dict["content"]["last_active_ago"] if "currently_active" in parsed_dict["content"]: args["currently_active"] = parsed_dict["content"]["currently_active"] if "status_msg" in parsed_dict["content"]: args["status_msg"] = parsed_dict["content"]["status_msg"] return cls(**args) matrix-nio-0.24.0/nio/events/room_events.py000066400000000000000000001503421455215747700207150ustar00rootroot00000000000000# Copyright © 2018-2019 Damir Jelić # Copyright © 2021 Famedly GmbH # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations import time from dataclasses import dataclass, field from typing import Any, Dict, List, Optional, Union from ..event_builders import RoomKeyRequestMessage from ..schemas import Schemas from .misc import BadEvent, BadEventType, UnknownBadEvent, validate_or_badevent, verify @dataclass class Event: """Matrix Event class. This is the base event class, most events inherit from this class. Attributes: source (dict): The source dictionary of the event. This allows access to all the event fields in a non-secure way. event_id (str): A globally unique event identifier. sender (str): The fully-qualified ID of the user who sent this event. server_timestamp (int): Timestamp in milliseconds on originating homeserver when this event was sent. decrypted (bool): A flag signaling if the event was decrypted. verified (bool): A flag signaling if the event is verified, is True if the event was sent from a verified device. sender_key (str, optional): The public key of the sender that was used to establish the encrypted session. Is only set if decrypted is True, otherwise None. session_id (str, optional): The unique identifier of the session that was used to decrypt the message. Is only set if decrypted is True, otherwise None. transaction_id (str, optional): The unique identifier that was used when the message was sent. Is only set if the message was sent from our own device, otherwise None. """ source: Dict[str, Any] = field() event_id: str = field(init=False) sender: str = field(init=False) server_timestamp: int = field(init=False) decrypted: bool = field(default=False, init=False) verified: bool = field(default=False, init=False) sender_key: Optional[str] = field(default=None, init=False) session_id: Optional[str] = field(default=None, init=False) transaction_id: Optional[str] = field(default=None, init=False) def __post_init__(self): self.event_id = self.source["event_id"] self.sender = self.source["sender"] self.server_timestamp = self.source["origin_server_ts"] def flattened( self, _prefix: str = "", _source: Optional[Dict[str, Any]] = None, _flat: Optional[Dict[str, Any]] = None, ) -> Dict[str, Any]: """Return a flattened version of the ``source`` dict with dotted keys. Example: >>> event.source {"content": {"body": "foo"}, "m.test": {"key": "bar"}} >>> event.source.flattened() {"content.body": "foo", "m.test.key": "bar"} """ source = self.source if _source is None else _source flat = {} if _flat is None else _flat for key, value in source.items(): if isinstance(value, dict): self.flattened(f"{_prefix}{key}.", value, flat) else: flat[f"{_prefix}{key}"] = value return flat @classmethod def from_dict(cls, parsed_dict: Dict[Any, Any]) -> Union[Event, BadEventType]: """Create an Event from a dictionary. Args: parsed_dict (dict): The dictionary representation of the event. """ return cls(parsed_dict) @classmethod @verify(Schemas.room_event) def parse_event(cls, event_dict: Dict[Any, Any]) -> Union[Event, BadEventType]: """Parse a Matrix event and create a higher level event object. This function parses the type of the Matrix event and produces a higher level event object representing the parsed event. The event structure is checked for correctness and the event fields are type-checked. If this validation process fails for an event an BadEvent will be produced. If the type of the event is now known an UnknownEvent will be produced. Args: event_dict (dict): The dictionary representation of the event. """ if "unsigned" in event_dict: if "redacted_because" in event_dict["unsigned"]: return RedactedEvent.from_dict(event_dict) if event_dict["type"] == "m.room.message": return RoomMessage.parse_event(event_dict) elif event_dict["type"] == "m.room.create": return RoomCreateEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.guest_access": return RoomGuestAccessEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.join_rules": return RoomJoinRulesEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.history_visibility": return RoomHistoryVisibilityEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.member": return RoomMemberEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.canonical_alias": return RoomAliasEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.name": return RoomNameEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.topic": return RoomTopicEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.avatar": return RoomAvatarEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.power_levels": return PowerLevelsEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.encryption": return RoomEncryptionEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.redaction": return RedactionEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.tombstone": return RoomUpgradeEvent.from_dict(event_dict) elif event_dict["type"] == "m.space.parent": return RoomSpaceParentEvent.from_dict(event_dict) elif event_dict["type"] == "m.space.child": return RoomSpaceChildEvent.from_dict(event_dict) elif event_dict["type"] == "m.room.encrypted": return Event.parse_encrypted_event(event_dict) elif event_dict["type"] == "m.sticker": return StickerEvent.from_dict(event_dict) elif event_dict["type"] == "m.reaction": return ReactionEvent.from_dict(event_dict) elif event_dict["type"].startswith("m.call"): return CallEvent.parse_event(event_dict) return UnknownEvent.from_dict(event_dict) @classmethod @verify(Schemas.room_encrypted) def parse_encrypted_event(cls, event_dict): """Parse an encrypted event. Encrypted events may have different fields depending on the algorithm that was used to encrypt them. This function checks the algorithm of the event and produces a higher level event from the provided dictionary. Args: event_dict (dict): The dictionary representation of the encrypted event. Returns None if the algorithm of the event is unknown. """ content = event_dict["content"] if content["algorithm"] == "m.megolm.v1.aes-sha2": return MegolmEvent.from_dict(event_dict) return UnknownEncryptedEvent.from_dict(event_dict) @classmethod def parse_decrypted_event( cls, event_dict: Dict[Any, Any] ) -> Union[Event, BadEventType]: """Parse a decrypted event and create a higher level event object. Args: event_dict (dict): The dictionary representation of the event. """ if "unsigned" in event_dict: if "redacted_because" in event_dict["unsigned"]: return RedactedEvent.from_dict(event_dict) # Events shouldn't be encrypted twice, this would lead to a loop in the # parser path. if event_dict["type"] == "m.room.encrypted": try: return BadEvent.from_dict(event_dict) except KeyError: return UnknownBadEvent(event_dict) if event_dict["type"] == "m.room.message": return RoomMessage.parse_decrypted_event(event_dict) return Event.parse_event(event_dict) @dataclass class UnknownEvent(Event): """An Event which we do not understand. This event is created every time nio tries to parse an event of an unknown type. Since custom and extensible events are a feature of Matrix this allows clients to use custom events but care should be taken that the clients will be responsible to validate and type check the event. Attributes: type (str): The type of the event. """ type: str = field() @classmethod def from_dict(cls, event_dict): return cls( event_dict, event_dict["type"], ) @dataclass class UnknownEncryptedEvent(Event): """An encrypted event which we don't know how to decrypt. This event is created every time nio tries to parse an event encrypted event that was encrypted using an unknown algorithm. Attributes: type (str): The type of the event. algorithm (str): The algorithm of the event. """ type: str = field() algorithm: str = field() @classmethod def from_dict(cls, event_dict): return cls( event_dict, event_dict["type"], event_dict["content"]["algorithm"], ) @dataclass class MegolmEvent(Event): """An undecrypted Megolm event. MegolmEvents are presented to library users only if the library fails to decrypt the event because of a missing session key. MegolmEvents can be stored for later use. If a RoomKeyEvent is later on received with a session id that matches the session_id of this event decryption can be retried. Attributes: event_id (str): A globally unique event identifier. sender (str): The fully-qualified ID of the user who sent this event. server_timestamp (int): Timestamp in milliseconds on originating homeserver when this event was sent. sender_key (str): The public key of the sender that was used to establish the encrypted session. Is only set if decrypted is True, otherwise None. device_id (str): The unique identifier of the device that was used to encrypt the event. session_id (str): The unique identifier of the session that was used to encrypt the message. ciphertext (str): The undecrypted ciphertext of the event. algorithm (str): The encryption algorithm that was used to encrypt the message. room_id (str): The unique identifier of the room in which the message was sent. transaction_id (str, optional): The unique identifier that was used when the message was sent. Is only set if the message was sent from our own device, otherwise None. """ device_id: str = field() ciphertext: str = field() algorithm: str = field() room_id: str = "" @classmethod @verify(Schemas.room_megolm_encrypted) def from_dict(cls, event_dict): """Create a MegolmEvent from a dictionary. Args: event_dict (Dict): Dictionary containing the event. Returns a MegolmEvent if the event_dict contains a valid event or a BadEvent if it's invalid. """ content = event_dict["content"] ciphertext = content["ciphertext"] sender_key = content["sender_key"] session_id = content["session_id"] device_id = content["device_id"] algorithm = content["algorithm"] room_id = event_dict.get("room_id", None) tx_id = ( event_dict["unsigned"].get("transaction_id", None) if "unsigned" in event_dict else None ) event = cls( event_dict, device_id, ciphertext, algorithm, room_id, ) event.sender_key = sender_key event.session_id = session_id event.transaction_id = tx_id return event def as_key_request( self, user_id: str, requesting_device_id: str, request_id: Optional[str] = None, device_id: Optional[str] = None, ) -> RoomKeyRequestMessage: """Make a to-device message for a room key request. MegolmEvents are presented to library users only if the library fails to decrypt the event because of a missing session key. A missing key can be requested later on by sending a key request, this method creates a ToDeviceMessage that can be sent out if such a request should be made. Args: user_id (str): The user id of the user that should receive the key request. requesting_device_id (str): The device id of the user that is requesting the key. request_id (str, optional): A unique string identifying the request. Defaults to the session id of the missing megolm session. device_id (str, optional): The device id of the device that should receive the request. Defaults to all the users devices. """ assert self.session_id request_id = request_id or self.session_id content = { "action": "request", "body": { "algorithm": self.algorithm, "session_id": self.session_id, "room_id": self.room_id, "sender_key": self.sender_key, }, "request_id": request_id, "requesting_device_id": requesting_device_id, } return RoomKeyRequestMessage( "m.room_key_request", user_id, device_id or "*", content, request_id, self.session_id, self.room_id, self.algorithm, ) @dataclass class CallEvent(Event): """Base Class for Matrix call signalling events. Attributes: call_id (str): The unique identifier of the call. version (int): The version of the VoIP specification this message adheres to. """ call_id: str = field() version: int = field() @staticmethod def parse_event(event_dict): """Parse a Matrix event and create a higher level event object. This function parses the type of the Matrix event and produces a higher level CallEvent object representing the parsed event. The event structure is checked for correctness and the event fields are type checked. If this validation process fails for an event an BadEvent will be produced. If the type of the event is now known an UnknownEvent will be produced. Args: event_dict (dict): The raw matrix event dictionary. """ if event_dict["type"] == "m.call.candidates": event = CallCandidatesEvent.from_dict(event_dict) elif event_dict["type"] == "m.call.invite": event = CallInviteEvent.from_dict(event_dict) elif event_dict["type"] == "m.call.answer": event = CallAnswerEvent.from_dict(event_dict) elif event_dict["type"] == "m.call.hangup": event = CallHangupEvent.from_dict(event_dict) else: event = UnknownEvent.from_dict(event_dict) return event @dataclass class CallCandidatesEvent(CallEvent): """Call event holding additional VoIP ICE candidates. This event is sent by callers after sending an invite and by the callee after answering. Its purpose is to give the other party additional ICE candidates to try using to communicate. Args: candidates (list): A list of dictionaries describing the candidates. """ candidates: List[Dict[str, Any]] = field() @classmethod @verify(Schemas.call_candidates) def from_dict(cls, event_dict): content = event_dict.get("content", {}) return cls( event_dict, content["call_id"], content["version"], content["candidates"], ) @dataclass class CallInviteEvent(CallEvent): """Event representing an invitation to a VoIP call. This event is sent by a caller when they wish to establish a call. Attributes: lifetime (integer): The time in milliseconds that the invite is valid for. offer (dict): The session description object. A dictionary containing the keys "type" which must be "offer" for this event and "sdp" which contains the SDP text of the session description. """ lifetime: int = field() offer: Dict[str, Any] = field() @property def expired(self): """Property marking if the invite event expired.""" now = time.time() return now - (self.server_timestamp / 1000) > (self.lifetime / 1000) @classmethod @verify(Schemas.call_invite) def from_dict(cls, event_dict): content = event_dict.get("content", {}) return cls( event_dict, content["call_id"], content["version"], content["lifetime"], content["offer"], ) @dataclass class CallAnswerEvent(CallEvent): """Event representing the answer to a VoIP call. This event is sent by the callee when they wish to answer the call. Attributes: answer (dict): The session description object. A dictionary containing the keys "type" which must be "answer" for this event and "sdp" which contains the SDP text of the session description. """ answer: Dict[str, Any] = field() @classmethod @verify(Schemas.call_answer) def from_dict(cls, event_dict): content = event_dict.get("content", {}) return cls( event_dict, content["call_id"], content["version"], content["answer"], ) @dataclass class CallHangupEvent(CallEvent): """An event representing the end of a VoIP call. Sent by either party to signal their termination of the call. This can be sent either once the call has has been established or before to abort the call. """ @classmethod @verify(Schemas.call_hangup) def from_dict(cls, event_dict): content = event_dict.get("content", {}) return cls( event_dict, content["call_id"], content["version"], ) @dataclass class RedactedEvent(Event): """An event that has been redacted. Attributes: type (str): The type of the event that has been redacted. redacter (str): The fully-qualified ID of the user who redacted the event. reason (str, optional): A string describing why the event was redacted, can be None. """ type: str = field() redacter: str = field() reason: Optional[str] = field() def __str__(self): reason = f", reason: {self.reason}" if self.reason else "" return f"Redacted event of type {self.type}, by {self.redacter}{reason}." @property def event_type(self): """Type of the event.""" return self.type @classmethod @verify(Schemas.redacted_event) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RedactedEvent, BadEventType]: redacter = parsed_dict["unsigned"]["redacted_because"]["sender"] content_dict = parsed_dict["unsigned"]["redacted_because"]["content"] reason = content_dict.get("reason", None) return cls( parsed_dict, parsed_dict["type"], redacter, reason, ) @dataclass class RoomEncryptionEvent(Event): """An event signaling that encryption has been enabled in a room.""" @classmethod @verify(Schemas.room_encryption) def from_dict(cls, parsed_dict): return cls(parsed_dict) @dataclass class RoomCreateEvent(Event): """The first event in a room, signaling that the room was created. Attributes: creator (str): The fully-qualified ID of the user who created the room. federate (bool): A boolean flag telling us whether users on other homeservers are able to join this room. room_version (str): The version of the room. Different room versions will have different event formats. Clients shouldn't worry about this too much unless they want to perform room upgrades. room_type (str): The type of the room. In spec v1.2 the following room types are specified: - `m.space` Unspecified room types are permitted through the use of Namespaced Identifiers. """ creator: str = field() federate: bool = True room_version: str = "1" room_type: str = "" @classmethod @verify(Schemas.room_create) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomCreateEvent, BadEventType]: creator = parsed_dict["content"]["creator"] federate = parsed_dict["content"]["m.federate"] version = parsed_dict["content"]["room_version"] if "type" in parsed_dict["content"]: room_type = parsed_dict["content"]["type"] return cls(parsed_dict, creator, federate, version, room_type) @dataclass class RoomGuestAccessEvent(Event): """Event signaling whether guest users are allowed to join rooms. Attributes: guest_access (str): A string describing the guest access policy of the room. Can be one of "can_join" or "forbidden". """ guest_access: str = "forbidden" @classmethod @verify(Schemas.room_guest_access) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomGuestAccessEvent, BadEventType]: guest_access = parsed_dict["content"]["guest_access"] return cls(parsed_dict, guest_access) @dataclass class RoomJoinRulesEvent(Event): """An event telling us how users can join the room. Attributes: join_rule (str): A string telling us how users may join the room, can be one of "public" meaning anyone can join the room without any restrictions or "invite" meaning users can only join if they have been previously invited. """ join_rule: str = "invite" @classmethod @verify(Schemas.room_join_rules) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomJoinRulesEvent, BadEventType]: join_rule = parsed_dict["content"]["join_rule"] return cls(parsed_dict, join_rule) @dataclass class RoomHistoryVisibilityEvent(Event): """An event telling whether users can read the room history. Room history visibility can be set up in multiple ways in Matrix: * world_readable All events value may be shared by any participating homeserver with anyone, regardless of whether they have ever joined the room. * shared Previous events are always accessible to newly joined members. All events in the room are accessible, even those sent when the member was not a part of the room. * invited Events are accessible to newly joined members from the point they were invited onwards. Events stop being accessible when the member's state changes to something other than invite or join. * joined Events are only accessible to members from the point on they joined to the room and stop being accessible when they aren't joined anymore. Attributes: history_visibility (str): A string describing who can read the room history. One of "invited", "joined", "shared", "world_readable". """ history_visibility: str = "shared" @classmethod @verify(Schemas.room_history_visibility) def from_dict( cls, parsed_dict: Dict[Any, Any], ) -> Union[RoomHistoryVisibilityEvent, BadEventType]: history_visibility = parsed_dict["content"]["history_visibility"] return cls(parsed_dict, history_visibility) @dataclass class RoomAliasEvent(Event): """An event informing us about which alias should be preferred. Attributes: canonical_alias (str): The alias that is considered canonical. """ canonical_alias: Optional[str] = field() @classmethod @verify(Schemas.room_canonical_alias) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomAliasEvent, BadEventType]: canonical_alias = parsed_dict["content"].get("alias") return cls(parsed_dict, canonical_alias) @dataclass class RoomNameEvent(Event): """Event holding the name of the room. The room name is a human-friendly string designed to be displayed to the end-user. The room name is not unique, as multiple rooms can have the same room name set. Attributes: name (str): The name of the room. """ name: str = field() @classmethod @verify(Schemas.room_name) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomNameEvent, BadEventType]: room_name = parsed_dict["content"]["name"] return cls(parsed_dict, room_name) @dataclass class RoomTopicEvent(Event): """Event holding the topic of a room. A topic is a short message detailing what is currently being discussed in the room. It can also be used as a way to display extra information about the room, which may not be suitable for the room name. Attributes: topic (str): The topic of the room. """ topic: str = field() @classmethod @verify(Schemas.room_topic) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomTopicEvent, BadEventType]: canonical_alias = parsed_dict["content"]["topic"] return cls(parsed_dict, canonical_alias) @dataclass class RoomAvatarEvent(Event): """Event holding a picture that is associated with the room. Attributes: avatar_url (str): The URL to the picture. """ avatar_url: Optional[str] = field() @classmethod @verify(Schemas.room_avatar) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomAvatarEvent, BadEventType]: room_avatar_url = parsed_dict["content"].get("url") return cls(parsed_dict, room_avatar_url) @dataclass class RoomSpaceParentEvent(Event): """Event holding the parent space of a room. Attributes: state_key (str): The parent space's room """ state_key: str = field() canonical: bool = False @classmethod @verify(Schemas.room_space_parent) def from_dict(cls, parsed_dict): content_dict = parsed_dict["content"] return cls( parsed_dict, parsed_dict["state_key"], content_dict.get("canonical", False) ) @dataclass class RoomSpaceChildEvent(Event): """Event holding the child rooms of a space. Attributes: state_key (str): The child room of a space """ state_key: str = field() suggested: bool = False @classmethod @verify(Schemas.room_space_child) def from_dict(cls, parsed_dict): content_dict = parsed_dict["content"] return cls( parsed_dict, parsed_dict["state_key"], content_dict.get("suggested", False) ) @dataclass class RoomMessage(Event): """Abstract room message class. This class corespondents to a Matrix event of the m.room.message type. It is used when messages are sent to the room. The class has one child class per msgtype. """ @classmethod @verify(Schemas.room_message) def parse_event( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomMessage, BadEventType]: content_dict = parsed_dict["content"] if content_dict["msgtype"] == "m.text": event = RoomMessageText.from_dict(parsed_dict) elif content_dict["msgtype"] == "m.emote": event = RoomMessageEmote.from_dict(parsed_dict) elif content_dict["msgtype"] == "m.notice": event = RoomMessageNotice.from_dict(parsed_dict) elif content_dict["msgtype"] == "m.image": event = RoomMessageImage.from_dict(parsed_dict) elif content_dict["msgtype"] == "m.audio": event = RoomMessageAudio.from_dict(parsed_dict) elif content_dict["msgtype"] == "m.video": event = RoomMessageVideo.from_dict(parsed_dict) elif content_dict["msgtype"] == "m.file": event = RoomMessageFile.from_dict(parsed_dict) else: event = RoomMessageUnknown.from_dict(parsed_dict) if "unsigned" in parsed_dict: txn_id = parsed_dict["unsigned"].get("transaction_id", None) event.transaction_id = txn_id return event @classmethod @verify(Schemas.room_message) def parse_decrypted_event( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomMessage, BadEventType]: msgtype = parsed_dict["content"]["msgtype"] if msgtype == "m.image": event = RoomEncryptedImage.from_dict(parsed_dict) elif msgtype == "m.audio": event = RoomEncryptedAudio.from_dict(parsed_dict) elif msgtype == "m.video": event = RoomEncryptedVideo.from_dict(parsed_dict) elif msgtype == "m.file": event = RoomEncryptedFile.from_dict(parsed_dict) else: event = RoomMessage.parse_event(parsed_dict) if "unsigned" in parsed_dict: txn_id = parsed_dict["unsigned"].get("transaction_id", None) event.transaction_id = txn_id return event @dataclass class RoomMessageMedia(RoomMessage): """Base class for room messages containing a URI. Attributes: url (str): The URL of the file. body (str): The description of the message. """ url: str = field() body: str = field() @classmethod @verify(Schemas.room_message_media) def from_dict(cls, parsed_dict): return cls( parsed_dict, parsed_dict["content"]["url"], parsed_dict["content"]["body"], ) @dataclass class RoomEncryptedMedia(RoomMessage): """Base class for encrypted room messages containing an URI. Attributes: url (str): The URL of the file. body (str): The description of the message. key (dict): The key that can be used to decrypt the file. hashes (dict): A mapping from an algorithm name to a hash of the ciphertext encoded as base64. iv (str): The initialisation vector that was used to encrypt the file. mimetype (str, optional): The mimetype of the message. thumbnail_url (str, optional): The URL of the thumbnail file. thumbnail_key (dict, optional): The key that can be used to decrypt the thumbnail file. thumbnail_hashes (dict, optional): A mapping from an algorithm name to a hash of the thumbnail ciphertext encoded as base64. thumbnail_iv (str, optional): The initialisation vector that was used to encrypt the thumbnail file. """ url: str = field() body: str = field() key: Dict[str, Any] = field() hashes: Dict[str, Any] = field() iv: str = field() mimetype: str = field() thumbnail_url: Optional[str] = None thumbnail_key: Optional[Dict] = None thumbnail_hashes: Optional[Dict] = None thumbnail_iv: Optional[str] = None @classmethod @verify(Schemas.room_encrypted_media) def from_dict(cls, parsed_dict): info = parsed_dict["content"].get("info", {}) thumbnail_file = info.get("thumbnail_file", {}) thumbnail_url = thumbnail_file.get("url") thumbnail_key = thumbnail_file.get("key") thumbnail_hashes = thumbnail_file.get("hashes") thumbnail_iv = thumbnail_file.get("iv") mimetype = info.get("mimetype") or parsed_dict["content"]["file"].get( "mimetype" ) return cls( parsed_dict, parsed_dict["content"]["file"]["url"], parsed_dict["content"]["body"], parsed_dict["content"]["file"]["key"], parsed_dict["content"]["file"]["hashes"], parsed_dict["content"]["file"]["iv"], mimetype, thumbnail_url, thumbnail_key, thumbnail_hashes, thumbnail_iv, ) @dataclass class RoomEncryptedImage(RoomEncryptedMedia): """A room message containing an image where the file is encrypted.""" @dataclass class RoomEncryptedAudio(RoomEncryptedMedia): """A room message containing an audio clip where the file is encrypted.""" @dataclass class RoomEncryptedVideo(RoomEncryptedMedia): """A room message containing a video clip where the file is encrypted.""" @dataclass class RoomEncryptedFile(RoomEncryptedMedia): """A room message containing a generic encrypted file.""" @dataclass class RoomMessageImage(RoomMessageMedia): """A room message containing an image.""" @dataclass class RoomMessageAudio(RoomMessageMedia): """A room message containing an audio clip.""" @dataclass class RoomMessageVideo(RoomMessageMedia): """A room message containing a video clip.""" @dataclass class RoomMessageFile(RoomMessageMedia): """A room message containing a generic file.""" @dataclass class RoomMessageUnknown(RoomMessage): """A m.room.message which we do not understand. This event is created every time nio tries to parse a room message of an unknown msgtype. Since custom and extensible events are a feature of Matrix this allows clients to use custom messages but care should be taken that the clients will be responsible to validate and type check the content of the message. Attributes: msgtype (str): The msgtype of the room message. content (dict): The dictionary holding the content of the room message. The keys and values of this dictionary will differ depending on the msgtype. """ msgtype: str = field() content: Dict[str, Any] = field() @classmethod def from_dict(cls, parsed_dict: Dict[Any, Any]) -> RoomMessage: return cls( parsed_dict, parsed_dict["content"]["msgtype"], parsed_dict.get("content", {}), ) @property def type(self): """Get the msgtype of the room message.""" return self.msgtype @dataclass class RoomMessageFormatted(RoomMessage): """Base abstract class for room messages that can have formatted bodies. Attributes: body (str): The textual body of the message. formatted_body (str, optional): The formatted version of the body. Can be None if the message doesn't contain a formatted version of the body. format (str, optional): The format used in the formatted_body. This specifies how the formatted_body should be interpreted. """ body: str = field() formatted_body: Optional[str] = field() format: Optional[str] = field() def __str__(self) -> str: return f"{self.sender}: {self.body}" @staticmethod def _validate(parsed_dict): raise NotImplementedError @classmethod def from_dict(cls, parsed_dict: Dict[Any, Any]) -> Union[RoomMessage, BadEventType]: bad = cls._validate(parsed_dict) if bad: return bad body = parsed_dict["content"]["body"] body_format = parsed_dict["content"].get("format") # Only try to find the formatted body if the format is specified. It is # required by the spec to have both or none specified. if body_format: formatted_body = parsed_dict["content"].get("formatted_body") else: formatted_body = None return cls( parsed_dict, body, formatted_body, body_format, ) @dataclass class RoomMessageText(RoomMessageFormatted): """A room message corresponding to the m.text msgtype. This message is the most basic message and is used to represent text. Attributes: body (str): The textual body of the message. formatted_body (str, optional): The formatted version of the body. Can be None if the message doesn't contain a formatted version of the body. format (str, optional): The format used in the formatted_body. This specifies how the formatted_body should be interpreted. """ @staticmethod def _validate(parsed_dict: Dict[Any, Any]) -> Optional[BadEventType]: return validate_or_badevent(parsed_dict, Schemas.room_message_text) @dataclass class RoomMessageEmote(RoomMessageFormatted): """A room message corresponding to the m.emote msgtype. This message is similar to m.text except that the sender is 'performing' the action contained in the body key, similar to /me in IRC. Attributes: body (str): The textual body of the message. formatted_body (str, optional): The formatted version of the body. Can be None if the message doesn't contain a formatted version of the body. format (str, optional): The format used in the formatted_body. This specifies how the formatted_body should be interpreted. """ @staticmethod def _validate(parsed_dict: Dict[Any, Any]) -> Optional[BadEventType]: return validate_or_badevent(parsed_dict, Schemas.room_message_emote) @dataclass class RoomMessageNotice(RoomMessageFormatted): """A room message corresponding to the m.notice msgtype. Room notices are primarily intended for responses from automated clients. Attributes: body (str): The textual body of the notice. formatted_body (str, optional): The formatted version of the notice body. Can be None if the message doesn't contain a formatted version of the body. format (str, optional): The format used in the formatted_body. This specifies how the formatted_body should be interpreted. """ @staticmethod def _validate(parsed_dict: Dict[Any, Any]) -> Optional[BadEventType]: return validate_or_badevent(parsed_dict, Schemas.room_message_notice) @dataclass class DefaultLevels: """Class holding information about default power levels of a room. Attributes: ban (int): The level required to ban a user. invite (int): The level required to invite a user. kick (int): The level required to kick a user. redact (int): The level required to redact events. state_default (int): The level required to send state events. This can be overridden by the events power level mapping. events_default (int): The level required to send message events. This can be overridden by the events power level mapping. users_default (int): The default power level for every user in the room. This can be overridden by the users power level mapping. notifications (Dict[str, int]): The level required to send different kinds of notifications. Used for ``sender_notification_permission`` conditions in push rules. """ ban: int = 50 invite: int = 50 kick: int = 50 redact: int = 50 state_default: int = 0 events_default: int = 0 users_default: int = 0 notifications: Dict[str, int] = field(default_factory=lambda: {"room": 50}) @classmethod def from_dict(cls, parsed_dict): """Create a DefaultLevels object from a dictionary. This creates the DefaultLevels object from a dictionary containing a m.room.power_levels event. The event structure isn't checked in this method. This shouldn't be used directly, the `PowerLevelsEvent` method will call this method to construct the DefaultLevels object. """ content = parsed_dict["content"] return cls( content["ban"], content["invite"], content["kick"], content["redact"], content["state_default"], content["events_default"], content["users_default"], content["notifications"], ) @dataclass class PowerLevels: """Class holding information of room power levels. Attributes: defaults (DefaultLevels): The default power levels of the room. users (dict): The power levels for specific users. This is a mapping from user_id to power level for that user. events (dict): The level required to send specific event types. This is a mapping from event type to power level required. """ defaults: DefaultLevels = field(default_factory=DefaultLevels) users: Dict[str, int] = field(default_factory=dict) events: Dict[str, int] = field(default_factory=dict) def get_state_event_required_level(self, event_type: str) -> int: """Get required power level to send a certain type of state event. Returns an integer representing the required power level. Args: event_type (str): The type of matrix state event we want the required level for, e.g. `m.room.name` or `m.room.topic`. """ return self.events.get(event_type, self.defaults.state_default) def get_message_event_required_level(self, event_type: str) -> int: """Get required power level to send a certain type of message event. Returns an integer representing the required power level. Args: event_type (str): The type of matrix message event we want the required level for, e.g. `m.room.message`. """ return self.events.get(event_type, self.defaults.events_default) def get_notification_required_level(self, notification_type: str) -> int: """Get required power level to send a certain type of notification. Returns an integer representing the required power level. Args: notification_type (str): The type of notification to get the required level for, e.g. ``"room"``. """ return self.defaults.notifications.get(notification_type, 50) def get_user_level(self, user_id: str) -> int: """Get the power level of a user. Returns an integer representing the user's power level. Args: user_id (str): The fully-qualified ID of the user for whom we would like to get the power level. """ return self.users.get(user_id, self.defaults.users_default) def can_user_send_state(self, user_id: str, event_type: str) -> bool: """Return whether a user has enough power to send certain state events. Args: user_id (str): The user to check the power of. event_type (str): The type of matrix state event to check the required power of, e.g. `m.room.encryption`. """ required_level = self.get_state_event_required_level(event_type) return self.get_user_level(user_id) >= required_level def can_user_send_message( self, user_id: str, event_type: str = "m.room.message" ) -> bool: """ Return whether a user has enough power to send certain message events. Args: user_id (str): The user to check the power of. event_type (str): The type of matrix message event to check the required power of, `m.room.message` by default. """ required_level = self.get_message_event_required_level(event_type) return self.get_user_level(user_id) >= required_level def can_user_invite(self, user_id: str) -> bool: """Return whether a user has enough power to invite others.""" return self.get_user_level(user_id) >= self.defaults.invite def can_user_kick( self, user_id: str, target_user_id: Optional[str] = None, ) -> bool: """Return whether a user has enough power to kick another. If ``target_user_id`` is ``None``, returns whether ``user_id`` has enough power to kick anyone with a lower power level than that user. """ level = self.get_user_level(user_id) can_kick_lower = level >= self.defaults.kick if target_user_id is None: return can_kick_lower return can_kick_lower and level > self.get_user_level(target_user_id) def can_user_ban( self, user_id: str, target_user_id: Optional[str] = None, ) -> bool: """Return whether a user has enough power to ban another. If ``target_user_id`` is ``None``, returns whether ``user_id`` has enough power to ban anyone with a lower power level than that user. """ level = self.get_user_level(user_id) can_ban_lower = level >= self.defaults.ban if target_user_id is None: return can_ban_lower return can_ban_lower and level > self.get_user_level(target_user_id) def can_user_redact(self, user_id: str): """Return whether a user has enough power to redact other user's events.""" return self.get_user_level(user_id) >= self.defaults.redact def can_user_notify(self, user_id: str, notification_type: str): """Return whether user has enough power to send a type of notification.""" required = self.get_notification_required_level(notification_type) return self.get_user_level(user_id) >= required def update(self, new_levels): """Update the power levels object with new levels. Args: new_levels (PowerLevels): A new PowerLevels object that we received from a newer PowerLevelsEvent. """ if not isinstance(new_levels, PowerLevels): return self.defaults = new_levels.defaults self.events.update(new_levels.events) self.users.update(new_levels.users) @dataclass class PowerLevelsEvent(Event): """Class representing a m.room.power_levels event. This event specifies the minimum level a user must have in order to perform a certain action. It also specifies the levels of each user in the room. Attributes: power_levels (PowerLevels): The PowerLevels object holding information of the power levels of the room. """ power_levels: PowerLevels = field() @classmethod @verify(Schemas.room_power_levels) def from_dict(cls, parsed_dict): default_levels = DefaultLevels.from_dict(parsed_dict) users = parsed_dict["content"].get("users", {}) events = parsed_dict["content"].get("events", {}) levels = PowerLevels(default_levels, users, events) return cls( parsed_dict, levels, ) @dataclass class RedactionEvent(Event): """An event signaling that another event has been redacted. Events can be redacted by either room or server administrators. Redacting an event means that all keys not required by the protocol are stripped off. Attributes: redacts (str): The event id of the event that has been redacted. reason (str, optional): A string describing why the event was redacted, can be None. """ redacts: str = field() reason: Optional[str] = None @classmethod @verify(Schemas.room_redaction) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RedactionEvent, BadEventType]: content = parsed_dict.get("content", {}) reason = content.get("reason", None) return cls( parsed_dict, parsed_dict["redacts"], reason, ) @dataclass class RoomMemberEvent(Event): """Class representing to an m.room.member event. Attributes: state_key (str): The user_id this membership event relates to. In all cases except for when membership is join, the user ID in the sender attribute does not need to match the user ID in the state_key. membership (str): The membership state of the user. One of "invite", "join", "leave", "ban", "knock". prev_membership (str, optional): The previous membership state that this one is overwriting. Can be None in which case the membership state is assumed to have been "leave". content (dict): The content of the of the membership event. prev_content(dict, optional): The content of a previous membership event that this one is overwriting. """ state_key: str = field() membership: str = field() prev_membership: Optional[str] = field() content: Dict[str, Any] = field() prev_content: Optional[Dict[str, Any]] = None @classmethod @verify(Schemas.room_membership) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomMemberEvent, BadEventType]: content = parsed_dict.get("content", {}) unsigned = parsed_dict.get("unsigned", {}) prev_content = unsigned.get("prev_content", None) membership = content["membership"] prev_membership = prev_content.get("membership") if prev_content else None return cls( parsed_dict, parsed_dict["state_key"], membership, prev_membership, content, prev_content, ) @dataclass class StickerEvent(Event): """An event indicating the use of a sticker Sticker messages are specialised image messages that are displayed without controls. Sticker messages are intended to provide simple "reaction" events in the message timeline. Attributes: body (str): A textual representation or associated description of the sticker image. This could be the alt text of the original image, or a message to accompany and further describe the sticker. url (str): The URL to the sticker image. content (dict): The content of the of the redaction event. """ body: str = field() url: str = field() content: Dict[str, Any] = field() @classmethod @verify(Schemas.sticker) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[StickerEvent, BadEventType]: content = parsed_dict.get("content", {}) body = content["body"] url = content["url"] return cls( parsed_dict, body, url, content, ) @dataclass class ReactionEvent(Event): """An event representing an m.reaction event. Users sometimes wish to respond to a message using emojis. When such responses are grouped visually below the message being reacted to, this provides a (visually) lightweight way for users to react to messages. Attributes: reacts_to (str): The event_id of the message the reaction relates to. key (str): The actual reaction/emoji. """ reacts_to: str = field() key: str = field() @classmethod @verify(Schemas.reaction) def from_dict(cls, parsed_dict): content = parsed_dict["content"]["m.relates_to"] return cls( parsed_dict, content["event_id"], content["key"], ) @dataclass class RoomUpgradeEvent(Event): """Class representing to an m.room.tombstone event. A state event signifying that a room has been upgraded to a different room version, and that clients should go there. Attributes: body (str): A server-defined message. replacement_room (str): The new room the client should be visiting. """ body: str = field() replacement_room: str = field() @classmethod @verify(Schemas.room_tombstone) def from_dict(cls, parsed_dict): content = parsed_dict.get("content", {}) body = content.get("body", "") replacement_room = content.get("replacement_room", "") return cls( parsed_dict, body, replacement_room, ) matrix-nio-0.24.0/nio/events/to_device.py000066400000000000000000000420301455215747700203100ustar00rootroot00000000000000# Copyright © 2018-2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """nio to-device events. To-device events are events that are sent directly between two devices instead of normally sending events in a room. To-device events can be sent to a specific device of a user or to all devices of a user. """ from __future__ import annotations from copy import deepcopy from dataclasses import dataclass, field from typing import Any, Dict, Optional, Union from ..schemas import Schemas from .common import ( KeyVerificationAcceptMixin, KeyVerificationCancelMixin, KeyVerificationEventMixin, KeyVerificationKeyMixin, KeyVerificationMacMixin, KeyVerificationStartMixin, ) from .misc import BadEventType, logger, verify @dataclass class ToDeviceEvent: """Base Event class for events that are sent using the to-device endpoint. Attributes: source (dict): The source dictionary of the event. This allows access to all the event fields in a non-secure way. sender (str): The fully-qualified ID of the user who sent this event. """ source: Dict[str, Any] = field() sender: str = field() @classmethod @verify(Schemas.to_device) def parse_event( cls, event_dict: Dict ) -> Optional[Union[ToDeviceEvent, BadEventType]]: """Parse a to-device event and create a higher level event object. This function parses the type of the to-device event and produces a higher level event object representing the parsed event. The event structure is checked for correctness and the event fields are type-checked. If this validation process fails for an event None will be returned. Args: event_dict (dict): The dictionary representation of the event. """ # A redacted event will have an empty content. if not event_dict["content"]: return None if event_dict["type"] == "m.room.encrypted": return ToDeviceEvent.parse_encrypted_event(event_dict) elif event_dict["type"] == "m.key.verification.start": return KeyVerificationStart.from_dict(event_dict) elif event_dict["type"] == "m.key.verification.accept": return KeyVerificationAccept.from_dict(event_dict) elif event_dict["type"] == "m.key.verification.key": return KeyVerificationKey.from_dict(event_dict) elif event_dict["type"] == "m.key.verification.mac": return KeyVerificationMac.from_dict(event_dict) elif event_dict["type"] == "m.key.verification.cancel": return KeyVerificationCancel.from_dict(event_dict) elif event_dict["type"] == "m.room_key_request": return BaseRoomKeyRequest.parse_event(event_dict) return UnknownToDeviceEvent.from_dict(event_dict) @classmethod @verify(Schemas.room_encrypted) def parse_encrypted_event(cls, event_dict): """Parse an encrypted to-device event. Encrypted events may have different fields depending on the algorithm that was used to encrypt them. This function checks the algorithm of the event and produces a higher level event from the provided dictionary. Args: event_dict (dict): The dictionary representation of the encrypted event. Returns None if the algorithm of the event is unknown. """ content = event_dict["content"] if content["algorithm"] == "m.olm.v1.curve25519-aes-sha2": return OlmEvent.from_dict(event_dict) logger.warning( f"Received an encrypted event with an unknown algorithm {content['algorithm']}." ) return None @classmethod def from_dict(cls, parsed_dict): """Create an Event from a dictionary. Args: parsed_dict (dict): The dictionary representation of the event. """ raise NotImplementedError @dataclass class BaseRoomKeyRequest(ToDeviceEvent): """Base class for room key requests. requesting_device_id (str): The id of the device that is requesting the key. request_id (str): A unique identifier for the request. """ requesting_device_id: str = field() request_id: str = field() @classmethod @verify(Schemas.room_key_request_cancel) def parse_event(cls, event_dict): if event_dict["content"]["action"] == "request": return RoomKeyRequest.from_dict(event_dict) return RoomKeyRequestCancellation.from_dict(event_dict) @dataclass class RoomKeyRequest(BaseRoomKeyRequest): """Event signaling that a room key was requested from us. Attributes: algorithm (str, optional): The encryption algorithm the requested key in this event is to be used with. Will be set only if the action is 'request'. room_id (str, optional): The id of the room that the key is used in. Will be set only if the action is 'request'. sender_key (str, optional): The key of the device that initiated the session. Will be set only if the action is 'request'. session_id (str, optional): The id of the session the key is for. Will be set only if the action is 'request'. """ algorithm: str = field() room_id: str = field() sender_key: str = field() session_id: str = field() @classmethod @verify(Schemas.room_key_request) def from_dict(cls, parsed_dict): content = parsed_dict["content"] body = content["body"] return cls( parsed_dict, parsed_dict["sender"], content["requesting_device_id"], content["request_id"], body["algorithm"], body["room_id"], body["sender_key"], body["session_id"], ) @dataclass class RoomKeyRequestCancellation(BaseRoomKeyRequest): """Event signaling that a previous room key request was canceled.""" @classmethod @verify(Schemas.room_key_request_cancel) def from_dict(cls, parsed_dict): content = parsed_dict["content"] return cls( parsed_dict, parsed_dict["sender"], content["requesting_device_id"], content["request_id"], ) @dataclass class KeyVerificationEvent(KeyVerificationEventMixin, ToDeviceEvent): """Base class for key verification events. Attributes: transaction_id (str): An opaque identifier for the verification process. Must be unique with respect to the devices involved. """ @dataclass class KeyVerificationStart(KeyVerificationStartMixin, KeyVerificationEvent): """Event signaling the start of a SAS key verification process. Attributes: from_device (str): The device ID which is initiating the process. method (str): The verification method to use. key_agreement_protocols (list): A list of strings specifying the key agreement protocols the sending device understands. hashes (list): A list of strings specifying the hash methods the sending device understands. message_authentication_codes (list): A list of strings specifying the message authentication codes that the sending device understands. short_authentication_string (list): A list of strings specifying the SAS methods the sending device (and the sending device's user) understands. """ @classmethod @verify(Schemas.key_verification_start) def from_dict(cls, parsed_dict): content = parsed_dict["content"] return cls( parsed_dict, parsed_dict["sender"], content["transaction_id"], content["from_device"], content["method"], content["key_agreement_protocols"], content["hashes"], content["message_authentication_codes"], content["short_authentication_string"], ) @dataclass class KeyVerificationAccept(KeyVerificationAcceptMixin, KeyVerificationEvent): """Event signaling that the SAS verification start has been accepted. Attributes: commitment (str): The commitment value of the verification process. key_agreement_protocol (str): The key agreement protocol the device is choosing to use hash (str): A list of strings specifying the hash methods the sending device understands. message_authentication_code (str): The message authentication code the device is choosing to use. short_authentication_string (list): A list of strings specifying the SAS methods that can be used in the verification process. """ @classmethod @verify(Schemas.key_verification_accept) def from_dict(cls, parsed_dict): content = parsed_dict["content"] return cls( parsed_dict, parsed_dict["sender"], content["transaction_id"], content["commitment"], content["key_agreement_protocol"], content["hash"], content["message_authentication_code"], content["short_authentication_string"], ) @dataclass class KeyVerificationKey(KeyVerificationKeyMixin, KeyVerificationEvent): """Event carrying a key verification key. After this event is received the short authentication string can be shown to the user. Attributes: key (str): The device's ephemeral public key, encoded as unpadded base64. """ @classmethod @verify(Schemas.key_verification_key) def from_dict(cls, parsed_dict): content = parsed_dict["content"] return cls( parsed_dict, parsed_dict["sender"], content["transaction_id"], content["key"], ) @dataclass class KeyVerificationMac(KeyVerificationMacMixin, KeyVerificationEvent): """Event holding a message authentication code of the verification process. After this event is received the device that we are verifying will be marked as verified given that we have accepted the short authentication string as well. Attributes: mac (dict): A map of the key ID to the MAC of the key, using the algorithm in the verification process. The MAC is encoded as unpadded base64. keys (str): The MAC of the comma-separated, sorted, list of key IDs given in the mac property, encoded as unpadded base64. """ @classmethod @verify(Schemas.key_verification_mac) def from_dict(cls, parsed_dict): content = parsed_dict["content"] return cls( parsed_dict, parsed_dict["sender"], content["transaction_id"], content["mac"], content["keys"], ) @dataclass class KeyVerificationCancel(KeyVerificationCancelMixin, KeyVerificationEvent): """Event signaling that a key verification process has been canceled. Attributes: code (str): The error code for why the process/request was canceled by the user. reason (str): A human readable description of the cancellation code. """ @classmethod @verify(Schemas.key_verification_cancel) def from_dict(cls, parsed_dict): content = parsed_dict["content"] return cls( parsed_dict, parsed_dict["sender"], content["transaction_id"], content["code"], content["reason"], ) @dataclass class EncryptedToDeviceEvent(ToDeviceEvent): pass @dataclass class OlmEvent(EncryptedToDeviceEvent): """An Olm encrypted event. Olm events are used to exchange end to end encrypted messages between two devices. They will mostly contain encryption keys to establish a Megolm session for a room. nio users will never see such an event under normal circumstances since decrypting this event will produce an event of another type. Attributes: sender (str): The fully-qualified ID of the user who sent this event. sender_key (str, optional): The public key of the sender that was used to establish the encrypted session. ciphertext (Dict[str, Any]): The undecrypted ciphertext of the event. transaction_id (str, optional): The unique identifier that was used when the message was sent. Is only set if the message was sent from our own device, otherwise None. """ sender_key: str = field() ciphertext: Dict[str, Any] = field() transaction_id: Optional[str] = None @classmethod @verify(Schemas.room_olm_encrypted) def from_dict(cls, event_dict): content = event_dict["content"] ciphertext = content["ciphertext"] sender_key = content["sender_key"] tx_id = ( event_dict["unsigned"].get("transaction_id", None) if "unsigned" in event_dict else None ) return cls(event_dict, event_dict["sender"], sender_key, ciphertext, tx_id) @dataclass class DummyEvent(ToDeviceEvent): """Event containing a dummy message. This event type is used start a new Olm session with a device. The event has no content. Attributes: sender (str): The sender of the event. sender_key (str): The key of the sender that sent the event. """ sender_key: str = field() sender_device: str = field() @classmethod @verify(Schemas.dummy_event) def from_dict(cls, event_dict, sender, sender_key): return cls(event_dict, sender, sender_key, event_dict["sender_device"]) @dataclass class RoomKeyEvent(ToDeviceEvent): """Event containing a megolm room key that got sent to us. Attributes: sender (str): The sender of the event. sender_key (str): The key of the sender that sent the event. room_id (str): The room ID of the room to which the session key belongs to. session_id (str): The session id of the session key. algorithm (str): The algorithm of the session key. """ sender_key: str = field() room_id: str = field() session_id: str = field() algorithm: str = field() @classmethod @verify(Schemas.room_key_event) def from_dict(cls, event_dict, sender, sender_key): event_dict = deepcopy(event_dict) event_dict.pop("keys") content = event_dict["content"] content.pop("session_key") return cls( event_dict, sender, sender_key, content["room_id"], content["session_id"], content["algorithm"], ) @dataclass class ForwardedRoomKeyEvent(RoomKeyEvent): """Event containing a room key that got forwarded to us. Attributes: sender (str): The sender of the event. sender_key (str): The key of the sender that sent the event. room_id (str): The room ID of the room to which the session key belongs to. session_id (str): The session id of the session key. algorithm (str): The algorithm of the session key. """ @classmethod @verify(Schemas.forwarded_room_key_event) def from_dict(cls, event_dict, sender, sender_key): """Create a ForwardedRoomKeyEvent from a event dictionary. Args: event_dict (Dict): The dictionary containing the event. sender (str): The sender of the event. sender_key (str): The key of the sender that sent the event. """ event_dict = deepcopy(event_dict) content = event_dict["content"] content.pop("session_key") return cls( event_dict, sender, sender_key, content["room_id"], content["session_id"], content["algorithm"], ) @dataclass class UnknownToDeviceEvent(ToDeviceEvent): """A ToDeviceEvent which we do not understand. This event is created every time nio tries to parse an event of an unknown type. Since custom and extensible events are a feature of Matrix this allows clients to use custom events but care should be taken that the clients will be responsible to validate and type check the event. Attributes: type (str): The type of the event. """ type: str = field() @classmethod def from_dict(cls, event_dict): return cls( event_dict, event_dict["sender"], event_dict["type"], ) matrix-nio-0.24.0/nio/exceptions.py000066400000000000000000000030141455215747700172230ustar00rootroot00000000000000# Copyright © 2018 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. class ProtocolError(Exception): pass class LocalProtocolError(ProtocolError): pass class MembersSyncError(LocalProtocolError): pass class SendRetryError(LocalProtocolError): pass class RemoteProtocolError(ProtocolError): pass class LocalTransportError(ProtocolError): pass class RemoteTransportError(ProtocolError): pass class OlmTrustError(Exception): pass class OlmUnverifiedDeviceError(OlmTrustError): def __init__(self, unverified_device, *args): super().__init__(*args) self.device = unverified_device class VerificationError(Exception): pass class EncryptionError(Exception): pass class GroupEncryptionError(Exception): pass class TransferCancelledError(Exception): pass matrix-nio-0.24.0/nio/http.py000066400000000000000000000406501455215747700160300ustar00rootroot00000000000000# Copyright © 2018 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations import json import logging import pprint import time from collections import OrderedDict, deque from enum import Enum, unique from typing import Any, Deque, List, Optional, Tuple from uuid import UUID, uuid4 import h2.connection import h2.events import h11 logger = logging.getLogger(__name__) USER_AGENT = "nio" @unique class TransportType(Enum): HTTP = 0 HTTP2 = 1 WEBSOCKETS = 2 class TransportRequest: def __init__(self, request, data=b"", timeout=0): self._request = request self._data = data self.response = None # Optional[TransportResponse] self.timeout = timeout @classmethod def get(cls, host, target, timeout=0): raise NotImplementedError @classmethod def post(cls, host, target, data, timeout=0): raise NotImplementedError @classmethod def put(cls, host, target, data, timeout=0): raise NotImplementedError class HttpRequest(TransportRequest): def __init__(self, request, data=b"", timeout=0): super().__init__(request, data, timeout) self._end_of_message = h11.EndOfMessage() @classmethod def get(cls, host, target, timeout=0): request = h11.Request( method="GET", target=target, headers=HttpRequest._headers(host) ) return cls(request, timeout=timeout) @staticmethod def _headers(host: str, data: Optional[bytes] = None) -> List[Tuple[str, str]]: headers = [ ("User-Agent", f"{USER_AGENT}"), ("Host", f"{host}"), ("Connection", "keep-alive"), ("Accept", "*/*"), ] if data: headers.append(("Content-Type", "application/json")) headers.append(("Content-length", f"{len(data)}")) return headers @classmethod def _post_or_put(cls, method, host, target, data, timeout=0): request_data = ( json.dumps(data, separators=(",", ":")) if isinstance(data, dict) else data ) request_data = bytes(request_data, "utf-8") request = h11.Request( method=method, target=target, headers=HttpRequest._headers(host, request_data), ) d = h11.Data(data=request_data) return cls(request, d, timeout) @classmethod def post(cls, host, target, data, timeout=0): return cls._post_or_put("POST", host, target, data, timeout) @classmethod def put(cls, host, target, data, timeout=0): return cls._post_or_put("PUT", host, target, data, timeout) class Http2Request(TransportRequest): @staticmethod def _request(method, target, headers): h = [(":method", method), (":path", target)] h = h + headers return h @staticmethod def _headers(host: str, data: Optional[bytes] = None) -> List[Tuple[str, str]]: headers = [ (":authority", f"{host}"), (":scheme", "https"), ("user-agent", f"{USER_AGENT}"), ] headers.append(("accept", "application/json")) if data: headers.append(("content-type", "application/json")) headers.append(("content-length", f"{len(data)}")) return headers @classmethod def _post_or_put(cls, method, host, target, data, timeout): request_data = ( json.dumps(data, separators=(",", ":")) if isinstance(data, dict) else data ) request_data = bytes(request_data, "utf-8") request = Http2Request._request( method=method, target=target, headers=Http2Request._headers(host, request_data), ) return cls(request, request_data, timeout) @classmethod def put(cls, host, target, data, timeout=0): return cls._post_or_put("PUT", host, target, data, timeout) @classmethod def post(cls, host, target, data, timeout=0): return cls._post_or_put("POST", host, target, data, timeout) @classmethod def get(cls, host, target, timeout=0): request = Http2Request._request( method="GET", target=target, headers=Http2Request._headers(host), ) return cls(request, timeout=timeout) class HeaderDict(dict): def __setitem__(self, key, value): super().__setitem__(key.lower(), value) def __getitem__(self, key): return super().__getitem__(key.lower()) class TransportResponse: def __init__(self, uuid: Optional[UUID] = None, timeout: float = 0) -> None: self.headers: HeaderDict = HeaderDict() self.content: bytes = b"" self.status_code: Optional[int] = None self.uuid = uuid or uuid4() self.creation_time = time.time() self.timeout: float = timeout self.send_time: Optional[float] = None self.receive_time: Optional[float] = None self.request_info: Optional[Any] = None def add_response(self, response): raise NotImplementedError def add_data(self, content: bytes) -> None: self.content = self.content + content def mark_as_sent(self): self.send_time = time.time() def mark_as_received(self): self.receive_time = time.time() @property def elapsed(self) -> float: if (self.receive_time is not None) and (self.send_time is not None): elapsed = self.receive_time - self.send_time elif self.send_time is not None: elapsed = time.time() - self.send_time else: elapsed = 0.0 return max(0, elapsed - (self.timeout / 1000)) @property def text(self): return self.content.decode("utf-8") @property def is_ok(self): if self.status_code == 200: return True return False class HttpResponse(TransportResponse): def add_response(self, response: h11.Response) -> None: self.status_code = response.status_code for header in response.headers: _name, _value = header name = _name.decode("utf-8") value = _value.decode("utf-8") logger.debug(f"Got http header {name}: {value}") self.headers[name] = value class Http2Response(TransportResponse): def __init__(self, uuid=None, timeout=0): super().__init__(uuid, timeout) self.was_reset = False self.error_code: Optional[h2.errors.ErrorCodes] = None def add_response(self, headers: h2.events.ResponseReceived) -> None: for header in headers: name, value = header logger.debug(f"Got http2 header {name}: {value}") if name == b":status" or name == ":status": self.status_code = int(value) else: self.headers[name] = value @property def is_ok(self): if self.was_reset: return False if self.status_code == 200: return True return False class Connection: def connect(self) -> bytes: return b"" def disconnect(self) -> bytes: return b"" class HttpConnection(Connection): def __init__(self) -> None: self._connection = h11.Connection(our_role=h11.CLIENT) self._message_queue: Deque[HttpRequest] = deque() self._current_response: Optional[HttpResponse] = None def data_to_send(self) -> bytes: if self._current_response: return b"" if not self._message_queue: return b"" if not self._connection.our_state == h11.IDLE: return b"" request = self._message_queue.popleft() _, data = self.send(request) return data @property def elapsed(self) -> float: if not self._current_response: return 0 response = self._current_response return response.elapsed def send( self, request: TransportRequest, uuid: Optional[UUID] = None ) -> Tuple[UUID, bytes]: data = b"" if not isinstance(request, HttpRequest): raise TypeError("Invalid request type for HttpConnection") if self._connection.our_state == h11.IDLE and not self._current_response: data = data + self._connection.send(request._request) if request._data: data = data + self._connection.send(request._data) data = data + self._connection.send(request._end_of_message) if request.response: self._current_response = request.response else: self._current_response = HttpResponse(uuid, request.timeout) # Make mypy happy assert self._current_response self._current_response.mark_as_sent() return self._current_response.uuid, data else: request.response = HttpResponse(uuid, request.timeout) self._message_queue.append(request) return request.response.uuid, b"" def _get_response(self) -> Optional[HttpResponse]: ret = self._connection.next_event() if not self._current_response: self._current_response = HttpResponse() while ret != h11.NEED_DATA: if ret == h11.PAUSED or isinstance(ret, h11.EndOfMessage): try: self._connection.start_next_cycle() except h11.ProtocolError: self._connection = h11.Connection(our_role=h11.CLIENT) response = self._current_response self._current_response = None response.mark_as_received() return response elif isinstance(ret, h11.InformationalResponse): pass elif isinstance(ret, h11.Response): self._current_response.add_response(ret) elif isinstance(ret, h11.Data): self._current_response.add_data(ret.data) ret = self._connection.next_event() return None def receive(self, data): self._connection.receive_data(data) return self._get_response() class Http2Connection(Connection): def __init__(self) -> None: config = h2.config.H2Configuration( client_side=True, validate_inbound_headers=False ) self._connection = h2.connection.H2Connection(config=config) self._connection.max_inbound_frame_size = 64 * 1024 self._responses: OrderedDict[int, Http2Response] = OrderedDict() self._data_to_send: OrderedDict[int, bytes] = OrderedDict() @property def elapsed(self) -> float: if not self._responses: return 0 return max(response.elapsed for response in self._responses.values()) def _handle_window_update(self, event): # We don't have any data to send, it doesn't matter that the window got # updated. if not self._data_to_send: return # The window changed for a single stream and the stream contains some # data to send, send it out now. if event.stream_id in self._data_to_send: self._send_data(event.stream_id, self._data_to_send[event.stream_id]) return # The window changed for the whole connection, try to send out data for # every stream we have some data buffered. if event.stream_id == 0: for stream_id, data in self._data_to_send.items(): self._send_data(stream_id, data) def _send_data(self, stream_id, data): window_size = self._connection.local_flow_control_window(stream_id) max_frame_size = self._connection.max_outbound_frame_size request_size = len(data) bytes_to_send = min(window_size, request_size) logger.debug( f"Sending data: stream id: {stream_id}; request size: {request_size}; " f"window size: {window_size}; max frame size {max_frame_size}" ) while bytes_to_send > 0: chunk_size = min(bytes_to_send, max_frame_size) if chunk_size >= len(data): chunk, data = data, "" else: chunk, data = (data[0:chunk_size], data[chunk_size:]) bytes_to_send -= chunk_size self._connection.send_data(stream_id, chunk) if not data: self._connection.end_stream(stream_id) self._data_to_send.pop(stream_id, None) else: self._data_to_send[stream_id] = data def send( self, request: TransportRequest, uuid: Optional[UUID] = None ) -> Tuple[UUID, bytes]: if not isinstance(request, Http2Request): raise TypeError("Invalid request type for HttpConnection") logger.debug( f"Making Http2 request {pprint.pformat(request._request)} {pprint.pformat(request._data)}." ) stream_id = self._connection.get_next_available_stream_id() logger.debug(f"New stream id {stream_id}") self._connection.send_headers(stream_id, request._request) self._send_data(stream_id, request._data) ret = self._connection.data_to_send() response = Http2Response(uuid, request.timeout) response.mark_as_sent() self._responses[stream_id] = response return response.uuid, ret def data_to_send(self): return self._connection.data_to_send() def connect(self) -> bytes: self._connection.initiate_connection() return self._connection.data_to_send() def disconnect(self) -> bytes: self._connection.close_connection() self._responses.clear() self._data_to_send = OrderedDict() return self._connection.data_to_send() def _handle_response(self, event: h2.events.Event) -> None: stream_id = event.stream_id headers = event.headers response = self._responses[stream_id] response.add_response(headers) def _handle_data(self, event: h2.events.Event) -> None: stream_id = event.stream_id data = event.data self._connection.acknowledge_received_data( event.flow_controlled_length, event.stream_id ) response = self._responses[stream_id] response.add_data(data) def _handle_reset(self, event: h2.events.StreamReset) -> Optional[Http2Response]: response = self._responses.pop(event.stream_id, None) if not response: return None response.was_reset = True response.error_code = event.error_code return response def _handle_events(self, events: h2.events.Event) -> Optional[Http2Response]: for event in events: logger.info(f"Handling Http2 event: {repr(event)}") if isinstance(event, h2.events.ResponseReceived): self._handle_response(event) elif isinstance(event, h2.events.DataReceived): self._handle_data(event) elif isinstance(event, h2.events.StreamEnded): response = self._responses.pop(event.stream_id, None) if not response: return None response.mark_as_received() return response elif isinstance(event, h2.events.SettingsAcknowledged): pass elif isinstance(event, h2.events.WindowUpdated): self._handle_window_update(event) elif isinstance(event, h2.events.StreamReset): logger.error("Http2 stream reset") return self._handle_reset(event) elif isinstance(events, h2.events.ConnectionTerminated): logger.error("Http2 connection terminated") # TODO reset the client pass return None def receive(self, data: bytes) -> Optional[Http2Response]: events = self._connection.receive_data(data) return self._handle_events(events) matrix-nio-0.24.0/nio/monitors.py000066400000000000000000000151461455215747700167250ustar00rootroot00000000000000# Copyright © 2018, 2019 Damir Jelić # Copyright © 2019 miruka # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import time from dataclasses import dataclass, field from datetime import datetime, timedelta from threading import Thread from typing import Callable, List, Optional @dataclass class TransferMonitor: """Get statistics, pause or cancel a running upload. A ``TransferMonitor`` object can be passed to the ``AsyncClient.upload()`` methods; the methods will then update the object's statistics while the transfer is running. The transfer can also be paused or cancelled using the object. Args: total_size (int): Size in bytes of the data to transfer. on_transferred (Callable[[int], None], optional): A callback to call with the new value of ``transferred`` when it changes. on_speed_changed (Callable[[float], None], optional): A callback to call with the new value of ``average_speed`` when it changes. speed_period (float, optional): How many previous seconds are considered to calculate ``average_speed``. Defaults to ``10``. Lower values makes ``average_speed`` more accurate, but less smooth and more susceptible to speed fluctuations. Attributes: average_speed (float): An average number of how many bytes are being transferred per second. start_time (datetime): The date when the ``TransferMonitor` object was created. end_time (datetime, optional): The date when the transfer was completed, or ``None`` if it is still running. pause (bool): Indicates to methods using this object if the transfer should be paused. ``False`` by default. At this time, servers don't handle pausing uploads well and will end up dropping the connection after some time. cancel (bool): When set to True, stop updating statistics and indicate to methods using this object that they should raise a ``TransferCancelledError``. """ # TODO: tell that this can be used for downloads too once implemented. total_size: int = field() on_transferred: Optional[Callable[[int], None]] = None on_speed_changed: Optional[Callable[[float], None]] = None speed_period: float = 10 average_speed: float = field(init=False, default=0.0) start_time: datetime = field(init=False) end_time: Optional[datetime] = field(init=False, default=None) pause: bool = field(init=False, default=False) cancel: bool = field(init=False, default=False) _transferred: int = field(init=False, default=0) _updater: Thread = field(init=False) _last_transferred_sizes: List[int] = field(init=False) _update_loop_sleep_time: float = field(default=1) def __post_init__(self) -> None: self.start_time = datetime.now() self._last_transferred_sizes = [] self._start_update_loop() def _start_update_loop(self) -> None: """Start a Thread running ``self._update_loop()``.""" self._updater = Thread(target=self._update_loop, daemon=True) self._updater.start() def _update_loop(self) -> None: """Calculate and update the average transfer speed every second.""" times_we_got_data = 0 while not self.done and not self.cancel: if self.pause: time.sleep(self._update_loop_sleep_time / 10) continue bytes_transferred_this_second = sum(self._last_transferred_sizes) self._last_transferred_sizes.clear() previous_speed = self.average_speed consider_past_secs = min(times_we_got_data, self.speed_period) or 1 self.average_speed = max( 0, self.average_speed * (consider_past_secs - 1) / consider_past_secs + bytes_transferred_this_second / consider_past_secs, ) if self.average_speed != previous_speed and self.on_speed_changed: self.on_speed_changed(self.average_speed) if bytes_transferred_this_second: times_we_got_data += 1 time.sleep(self._update_loop_sleep_time) if self.done and not self.average_speed: # Transfer was fast enough to end before we had time to calculate self.average_speed = self.total_size @property def transferred(self) -> int: """Number of currently transferred bytes.""" return self._transferred @transferred.setter def transferred(self, size: int) -> None: old_value = self._transferred self._transferred = size self._last_transferred_sizes.append(size - old_value) if size >= self.total_size: self.end_time = datetime.now() if size != old_value and self.on_transferred: self.on_transferred(size) @property def percent_done(self) -> float: """Percentage of completion for the transfer.""" return self.transferred / self.total_size * 100 @property def remaining(self) -> int: """Number of remaining bytes to transfer.""" return self.total_size - self.transferred @property def spent_time(self) -> timedelta: """Time elapsed since the transfer started.""" return (self.end_time or datetime.now()) - self.start_time @property def remaining_time(self) -> Optional[timedelta]: """Estimated remaining time to complete the transfer. Returns None (for infinity) if the current transfer speed is 0 bytes/s, or the remaining time is so long it would cause an OverflowError. """ try: return timedelta(seconds=self.remaining / self.average_speed) except (ZeroDivisionError, OverflowError): return None @property def done(self) -> bool: """Whether the transfer is finished.""" return bool(self.end_time) matrix-nio-0.24.0/nio/responses.py000066400000000000000000001626461455215747700171040ustar00rootroot00000000000000# Copyright © 2018 Damir Jelić # Copyright © 2020-2021 Famedly GmbH # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations import logging import os from dataclasses import dataclass, field from datetime import datetime from functools import wraps from typing import Any, Dict, Generator, List, Optional, Set, Tuple, Union from jsonschema.exceptions import SchemaError, ValidationError from .event_builders import ToDeviceMessage from .events import ( AccountDataEvent, BadEventType, EphemeralEvent, Event, InviteEvent, ToDeviceEvent, ) from .events.presence import PresenceEvent from .http import TransportResponse from .schemas import Schemas, validate_json logger = logging.getLogger(__name__) __all__ = [ "ContentRepositoryConfigResponse", "ContentRepositoryConfigError", "FileResponse", "DeleteDevicesAuthResponse", "DeleteDevicesResponse", "DeleteDevicesError", "DeletePushRuleError", "DeletePushRuleResponse", "Device", "DeviceList", "DevicesResponse", "DevicesError", "DeviceOneTimeKeyCount", "DiscoveryInfoError", "DiscoveryInfoResponse", "DiskDownloadResponse", "DownloadResponse", "DownloadError", "EnablePushRuleResponse", "EnablePushRuleError", "ErrorResponse", "GetOpenIDTokenError", "GetOpenIDTokenResponse", "InviteInfo", "JoinResponse", "JoinError", "JoinedMembersResponse", "JoinedMembersError", "JoinedRoomsResponse", "JoinedRoomsError", "KeysClaimResponse", "KeysClaimError", "KeysQueryResponse", "KeysQueryError", "KeysUploadResponse", "KeysUploadError", "RegisterResponse", "LoginResponse", "LoginError", "LoginInfoResponse", "LoginInfoError", "LogoutResponse", "LogoutError", "MemoryDownloadResponse", "Response", "RoomBanResponse", "RoomBanError", "RoomCreateResponse", "RoomCreateError", "RoomDeleteAliasError", "RoomDeleteAliasResponse", "RoomInfo", "RoomInviteResponse", "RoomInviteError", "RoomKickResponse", "RoomKickError", "RoomKnockResponse", "RoomKnockError", "RoomLeaveResponse", "RoomLeaveError", "RoomForgetResponse", "RoomForgetError", "RoomMember", "RoomMessagesResponse", "RoomMessagesError", "RoomGetStateResponse", "RoomGetStateError", "RoomGetStateEventResponse", "RoomGetStateEventError", "RoomGetEventResponse", "RoomGetEventError", "RoomGetVisibilityResponse", "RoomGetVisibilityError", "RoomPutAliasResponse", "RoomPutStateResponse", "RoomPutStateError", "RoomRedactResponse", "RoomRedactError", "RoomResolveAliasResponse", "RoomResolveAliasError", "RoomSendResponse", "RoomSendError", "RoomSummary", "RoomUnbanResponse", "RoomUnbanError", "Rooms", "SetPushRuleError", "SetPushRuleResponse", "SetPushRuleActionsError", "SetPushRuleActionsResponse", "ShareGroupSessionResponse", "ShareGroupSessionError", "SyncResponse", "SyncError", "Timeline", "UpdateDeviceResponse", "UpdateDeviceError", "RoomTypingResponse", "RoomTypingError", "RoomReadMarkersResponse", "RoomReadMarkersError", "UploadResponse", "UploadError", "ProfileGetResponse", "ProfileGetError", "ProfileGetDisplayNameResponse", "ProfileGetDisplayNameError", "ProfileSetDisplayNameResponse", "ProfileSetDisplayNameError", "ProfileGetAvatarResponse", "ProfileGetAvatarError", "ProfileSetAvatarResponse", "ProfileSetAvatarError", "PresenceGetResponse", "PresenceGetError", "PresenceSetResponse", "PresenceSetError", "RoomKeyRequestResponse", "RoomKeyRequestError", "ThumbnailResponse", "ThumbnailError", "ToDeviceResponse", "ToDeviceError", "RoomContextResponse", "RoomContextError", "UploadFilterError", "UploadFilterResponse", "UpdateReceiptMarkerError", "UpdateReceiptMarkerResponse", "WhoamiError", "WhoamiResponse", "SpaceGetHierarchyResponse", "SpaceGetHierarchyError", "DirectRoomsResponse", "DirectRoomsErrorResponse", ] def verify(schema, error_class, pass_arguments=True): def decorator(f): @wraps(f) def wrapper(cls, parsed_dict, *args, **kwargs): try: logger.debug("Validating response schema %r: %s", schema, parsed_dict) validate_json(parsed_dict, schema) except (SchemaError, ValidationError) as e: logger.warning("Error validating response: " + str(e.message)) if pass_arguments: return error_class.from_dict(parsed_dict, *args, **kwargs) else: return error_class.from_dict(parsed_dict) return f(cls, parsed_dict, *args, **kwargs) return wrapper return decorator @dataclass class Rooms: invite: Dict[str, InviteInfo] = field() join: Dict[str, RoomInfo] = field() leave: Dict[str, RoomInfo] = field() @dataclass class DeviceOneTimeKeyCount: curve25519: Optional[int] = field() signed_curve25519: Optional[int] = field() @dataclass class DeviceList: changed: List[str] = field() left: List[str] = field() @dataclass class Timeline: events: List = field() limited: bool = field() prev_batch: Optional[str] = field() @dataclass class InviteInfo: invite_state: List = field() @dataclass class RoomSummary: invited_member_count: Optional[int] = None joined_member_count: Optional[int] = None heroes: Optional[List[str]] = None @dataclass class UnreadNotifications: notification_count: Optional[int] = None highlight_count: Optional[int] = None @dataclass class RoomInfo: timeline: Timeline = field() state: List = field() ephemeral: List = field() account_data: List = field() summary: Optional[RoomSummary] = None unread_notifications: Optional[UnreadNotifications] = None @staticmethod def parse_account_data(event_dict): """Parse the account data dictionary and produce a list of events.""" return [AccountDataEvent.parse_event(event) for event in event_dict] @dataclass class RoomMember: user_id: str = field() display_name: str = field() avatar_url: str = field() @dataclass class Device: id: str = field() display_name: str = field() last_seen_ip: str = field() last_seen_date: datetime = field() @classmethod def from_dict(cls, parsed_dict): date = None if parsed_dict["last_seen_ts"] is not None: date = datetime.fromtimestamp(parsed_dict["last_seen_ts"] / 1000) return cls( parsed_dict["device_id"], parsed_dict["display_name"], parsed_dict["last_seen_ip"], date, ) @dataclass class Response: uuid: str = field(default="", init=False) start_time: Optional[float] = field(default=None, init=False) end_time: Optional[float] = field(default=None, init=False) timeout: int = field(default=0, init=False) transport_response: Optional[TransportResponse] = field( init=False, default=None, ) @property def elapsed(self): if not self.start_time or not self.end_time: return 0 elapsed = self.end_time - self.start_time return max(0, elapsed - (self.timeout / 1000)) @dataclass class FileResponse(Response): """A response representing a successful file content request. Attributes: body (bytes, os.PathLike): The file's content in bytes, or location on disk if provided. content_type (str): The content MIME type of the file, e.g. "image/png". filename (str, optional): The file's name returned by the server. """ body: Union[bytes, os.PathLike] = field() content_type: str = field() filename: Optional[str] = field() def __str__(self): return f"{len(self.body)} bytes, content type: {self.content_type}, filename: {self.filename}" @classmethod def from_data( cls, data: Union[bytes, os.PathLike, dict], content_type, filename=None ): """Create a FileResponse from file content returned by the server. Args: data (bytes, os.PathLike): The file's content in bytes. content_type (str): The content MIME type of the file, e.g. "image/png". filename (str, optional): The file's name returned by the server. """ raise NotImplementedError @dataclass class MemoryFileResponse(FileResponse): """ A response representing a successful file content request with the file content stored in memory. Attributes: body (bytes): The file's content in bytes. """ body: bytes = field() @dataclass class DiskFileResponse(FileResponse): """A response representing a successful file content request with the file content stored on disk. This class is exactly the same as ``FileResponse`` but with the following difference Attributes: body (os.PathLike): The path to the file on disk. """ body: os.PathLike = field() @dataclass class ErrorResponse(Response): message: str = field() status_code: Optional[str] = None retry_after_ms: Optional[int] = None soft_logout: bool = False def __str__(self) -> str: if self.status_code and self.message: e = f"{self.status_code} {self.message}" elif self.message: e = self.message elif self.status_code: e = f"{self.status_code} unknown error" else: e = "unknown error" if self.retry_after_ms: e = f"{e} - retry after {self.retry_after_ms}ms" return f"{self.__class__.__name__}: {e}" @classmethod def from_dict(cls, parsed_dict: Dict[Any, Any]) -> ErrorResponse: try: validate_json(parsed_dict, Schemas.error) except (SchemaError, ValidationError): return cls("unknown error") return cls( parsed_dict["error"], parsed_dict["errcode"], parsed_dict.get("retry_after_ms"), parsed_dict.get("soft_logout", False), ) @dataclass class _ErrorWithRoomId(ErrorResponse): room_id: str = "" @classmethod def from_dict(cls, parsed_dict, room_id): try: validate_json(parsed_dict, Schemas.error) except (SchemaError, ValidationError): return cls("unknown error") return cls( parsed_dict["error"], parsed_dict["errcode"], parsed_dict.get("retry_after_ms"), parsed_dict.get("soft_logout", False), room_id, ) class LoginError(ErrorResponse): pass class LogoutError(ErrorResponse): pass class SyncError(ErrorResponse): pass class RoomSendError(_ErrorWithRoomId): pass class RoomGetStateError(_ErrorWithRoomId): """A response representing an unsuccessful room state query.""" pass class RoomGetStateEventError(_ErrorWithRoomId): """A response representing an unsuccessful room state query.""" pass class RoomGetEventError(ErrorResponse): """A response representing an unsuccessful room get event request.""" pass class RoomPutStateError(_ErrorWithRoomId): """A response representing an unsuccessful room state sending request.""" pass class RoomRedactError(_ErrorWithRoomId): pass class RoomResolveAliasError(ErrorResponse): """A response representing an unsuccessful room alias query.""" pass class RoomDeleteAliasError(ErrorResponse): """A response representing an unsuccessful room alias delete request.""" pass class RoomPutAliasError(ErrorResponse): """A response representing an unsuccessful room alias put request.""" pass class RoomGetVisibilityError(ErrorResponse): """A response representing an unsuccessful room get visibility request.""" pass class RoomTypingError(_ErrorWithRoomId): """A response representing a unsuccessful room typing request.""" pass class UpdateReceiptMarkerError(ErrorResponse): pass class RoomReadMarkersError(_ErrorWithRoomId): """A response representing a unsuccessful room read markers request.""" pass class RoomKickError(ErrorResponse): pass class RoomBanError(ErrorResponse): pass class RoomUnbanError(ErrorResponse): pass class RoomInviteError(ErrorResponse): pass class RoomCreateError(ErrorResponse): """A response representing a unsuccessful create room request.""" pass class JoinError(ErrorResponse): pass class RoomKnockError(ErrorResponse): """A response representing a unsuccessful room knock request.""" pass class RoomLeaveError(ErrorResponse): pass class RoomForgetError(_ErrorWithRoomId): pass class RoomMessagesError(_ErrorWithRoomId): pass class SpaceGetHierarchyError(ErrorResponse): pass class GetOpenIDTokenError(ErrorResponse): pass class KeysUploadError(ErrorResponse): pass class KeysQueryError(ErrorResponse): pass class KeysClaimError(_ErrorWithRoomId): pass class ContentRepositoryConfigError(ErrorResponse): """A response for a unsuccessful content repository config request.""" class UploadError(ErrorResponse): """A response representing a unsuccessful upload request.""" class DownloadError(ErrorResponse): """A response representing a unsuccessful download request.""" class ThumbnailError(ErrorResponse): """A response representing a unsuccessful thumbnail request.""" @dataclass class ShareGroupSessionError(_ErrorWithRoomId): """Response representing unsuccessful group sessions sharing request.""" users_shared_with: Set[Tuple[str, str]] = field(default_factory=set) @classmethod def from_dict(cls, parsed_dict, room_id, users_shared_with): try: validate_json(parsed_dict, Schemas.error) except (SchemaError, ValidationError): return cls("unknown error") return cls( parsed_dict["error"], parsed_dict["errcode"], room_id, users_shared_with ) class DevicesError(ErrorResponse): pass class DeleteDevicesError(ErrorResponse): pass class UpdateDeviceError(ErrorResponse): pass class JoinedMembersError(_ErrorWithRoomId): pass class JoinedRoomsError(ErrorResponse): """A response representing an unsuccessful joined rooms query.""" pass class ProfileGetError(ErrorResponse): pass class ProfileGetDisplayNameError(ErrorResponse): pass class ProfileSetDisplayNameError(ErrorResponse): pass class ProfileGetAvatarError(ErrorResponse): pass class PresenceGetError(ErrorResponse): """Response representing a unsuccessful get presence request.""" pass class PresenceSetError(ErrorResponse): """Response representing a unsuccessful set presence request.""" pass class ProfileSetAvatarError(ErrorResponse): pass @dataclass class DiscoveryInfoError(ErrorResponse): pass @dataclass class DiscoveryInfoResponse(Response): """A response for a successful discovery info request. Attributes: homeserver_url (str): The base URL of the homeserver corresponding to the requested domain. identity_server_url (str, optional): The base URL of the identity server corresponding to the requested domain, if any. """ homeserver_url: str = field() identity_server_url: Optional[str] = None @classmethod @verify(Schemas.discovery_info, DiscoveryInfoError) def from_dict( cls, parsed_dict: Dict[str, Any], ) -> Union[DiscoveryInfoResponse, DiscoveryInfoError]: homeserver_url = parsed_dict["m.homeserver"]["base_url"].rstrip("/") identity_server_url = ( parsed_dict.get( "m.identity_server", {}, ) .get("base_url", "") .rstrip("/") or None ) return cls(homeserver_url, identity_server_url) @dataclass class RegisterErrorResponse(ErrorResponse): pass @dataclass class RegisterResponse(Response): user_id: str = field() device_id: str = field() access_token: str = field() def __str__(self) -> str: return f"Registered {self.user_id}, device id {self.device_id}." @classmethod @verify(Schemas.register, RegisterErrorResponse) def from_dict(cls, parsed_dict): return cls( parsed_dict["user_id"], parsed_dict["device_id"], parsed_dict["access_token"], ) @dataclass class RegisterInteractiveError(ErrorResponse): pass @dataclass class RegisterInteractiveResponse(Response): stages: List[str] = field() params: Dict[str, Any] = field() session: str = field() completed: List[str] = field() user_id: str = field() device_id: str = field() access_token: str = field() @classmethod @verify(Schemas.register_flows, RegisterInteractiveError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RegisterInteractiveResponse, RegisterInteractiveError]: for flow in parsed_dict["flows"]: stages = list(flow["stages"]) return cls( stages, parsed_dict["params"], parsed_dict["session"], parsed_dict.get("completed"), parsed_dict.get("user_id"), parsed_dict.get("device_id"), parsed_dict.get("access_token"), ) @dataclass class LoginInfoError(ErrorResponse): pass @dataclass class LoginInfoResponse(Response): flows: List[str] = field() @classmethod @verify(Schemas.login_info, LoginInfoError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[LoginInfoResponse, ErrorResponse]: flow_types = [flow["type"] for flow in parsed_dict["flows"]] return cls(flow_types) @dataclass class LoginResponse(Response): user_id: str = field() device_id: str = field() access_token: str = field() def __str__(self) -> str: return f"Logged in as {self.user_id}, device id: {self.device_id}." @classmethod @verify(Schemas.login, LoginError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[LoginResponse, ErrorResponse]: return cls( parsed_dict["user_id"], parsed_dict["device_id"], parsed_dict["access_token"], ) @dataclass class LogoutResponse(Response): def __str__(self) -> str: return "Logged out" @classmethod @verify(Schemas.empty, LogoutError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[LogoutResponse, ErrorResponse]: """Create a response for logout response from server.""" return cls() @dataclass class JoinedMembersResponse(Response): members: List[RoomMember] = field() room_id: str = field() @classmethod @verify(Schemas.joined_members, JoinedMembersError) def from_dict( cls, parsed_dict: Dict[Any, Any], room_id: str, ) -> Union[JoinedMembersResponse, ErrorResponse]: members = [] for user_id, user_info in parsed_dict["joined"].items(): user = RoomMember( user_id, user_info.get("display_name", None), user_info.get("avatar_url", None), ) members.append(user) return cls(members, room_id) @dataclass class JoinedRoomsResponse(Response): """A response containing a list of joined rooms. Attributes: rooms (List[str]): The rooms joined by the account. """ rooms: List[str] = field() @classmethod @verify(Schemas.joined_rooms, JoinedRoomsError) def from_dict( cls, parsed_dict: Dict[Any, Any], ) -> Union[JoinedRoomsResponse, ErrorResponse]: return cls(parsed_dict["joined_rooms"]) @dataclass class ContentRepositoryConfigResponse(Response): """A response for a successful content repository config request. Attributes: upload_size (Optional[int]): The maximum file size in bytes for an upload. If `None`, the limit is unknown. """ upload_size: Optional[int] = None @classmethod @verify(Schemas.content_repository_config, ContentRepositoryConfigError) def from_dict( cls, parsed_dict: dict, ) -> Union[ContentRepositoryConfigResponse, ErrorResponse]: return cls(parsed_dict.get("m.upload.size")) @dataclass class UploadResponse(Response): """A response representing a successful upload request.""" content_uri: str = field() @classmethod @verify(Schemas.upload, UploadError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[UploadResponse, ErrorResponse]: return cls( parsed_dict["content_uri"], ) @dataclass class DownloadResponse(FileResponse): """A response representing a successful download request.""" @classmethod def from_data( cls, data: Union[os.PathLike, bytes], content_type: str, filename: Optional[str] = None, ) -> Union[DownloadResponse, DownloadError]: if isinstance(data, (bytes, os.PathLike)): return cls(body=data, content_type=content_type, filename=filename) if isinstance(data, dict): return DownloadError.from_dict(data) return DownloadError("invalid data") @dataclass class MemoryDownloadResponse(DownloadResponse, MemoryFileResponse): """A response representing a successful download request with the download content stored in-memory. Attributes: body (bytes): The content of the download. content_type (str): The content type of the download. filename (Optional[str]): The filename of the download. """ @dataclass class DiskDownloadResponse(DownloadResponse, DiskFileResponse): """A response representing a successful download request with the download content stored on disk. Attributes: body (os.PathLike): The path to the downloaded file. content_type (str): The content type of the download. filename (Optional[str]): The filename of the download. """ body: os.PathLike = field() @dataclass class ThumbnailResponse(FileResponse): """A response representing a successful thumbnail request.""" @classmethod def from_data( cls, data: bytes, content_type: str, filename: Optional[str] = None ) -> Union[ThumbnailResponse, ThumbnailError]: if not content_type.startswith("image/"): return ThumbnailError(f"invalid content type: {content_type}") if isinstance(data, bytes): return cls(body=data, content_type=content_type, filename=filename) if isinstance(data, dict): return ThumbnailError.from_dict(data) return ThumbnailError("invalid data") @dataclass class RoomEventIdResponse(Response): event_id: str = field() room_id: str = field() @staticmethod def create_error(parsed_dict, _room_id): return ErrorResponse.from_dict(parsed_dict) @classmethod def from_dict( cls, parsed_dict: Dict[Any, Any], room_id: str, ) -> Union[RoomEventIdResponse, ErrorResponse]: try: validate_json(parsed_dict, Schemas.room_event_id) except (SchemaError, ValidationError): return cls.create_error(parsed_dict, room_id) return cls(parsed_dict["event_id"], room_id) class RoomSendResponse(RoomEventIdResponse): @staticmethod def create_error(parsed_dict, room_id): return RoomSendError.from_dict(parsed_dict, room_id) class DirectRoomsErrorResponse(ErrorResponse): pass @dataclass class DirectRoomsResponse(Response): """A response containing a list of direct rooms. Attributes: rooms (List[str]): The rooms joined by the account. """ rooms: Dict[str, List[str]] = field() @classmethod def from_dict( cls, parsed_dict: Dict[Any, Any], ) -> Union[DirectRoomsResponse, DirectRoomsErrorResponse]: if parsed_dict.get("errcode") is not None: # This user has no DM rooms that have been marked with m.direct. return DirectRoomsErrorResponse.from_dict(parsed_dict) return cls(parsed_dict) @dataclass class RoomGetStateResponse(Response): """A response containing the state of a room. Attributes: events (List): The events making up the room state. room_id (str): The ID of the room. """ events: List = field() room_id: str = field() @staticmethod def create_error(parsed_dict, room_id): return RoomGetStateError.from_dict(parsed_dict, room_id) @classmethod def from_dict( cls, parsed_dict: List[Dict[Any, Any]], room_id: str, ) -> Union[RoomGetStateResponse, RoomGetStateError]: try: validate_json(parsed_dict, Schemas.room_state) except (SchemaError, ValidationError): return cls.create_error(parsed_dict, room_id) return cls(parsed_dict, room_id) @dataclass class RoomGetStateEventResponse(Response): """A response containing the content of a specific bit of room state. Attributes: content (Dict): The content of the state event. event_type (str): The type of the state event. state_key (str): The key of the state event. room_id (str): The ID of the room that the state event comes from. """ content: Dict = field() event_type: str = field() state_key: str = field() room_id: str = field() @staticmethod def create_error(parsed_dict, room_id): return RoomGetStateEventError.from_dict(parsed_dict, room_id) @classmethod def from_dict( cls, parsed_dict: Dict[str, Any], event_type: str, state_key: str, room_id: str, ) -> Union[RoomGetStateEventResponse, RoomGetStateEventError]: return cls(parsed_dict, event_type, state_key, room_id) class RoomGetEventResponse(Response): """A response indicating successful room get event request. Attributes: event (Event): The requested event. """ event: Event = field() @classmethod @verify( Schemas.room_event, RoomGetEventError, pass_arguments=False, ) def from_dict( cls, parsed_dict: Dict[str, Any] ) -> Union[RoomGetEventResponse, RoomGetEventError]: event = Event.parse_event(parsed_dict) resp = cls() resp.event = event return resp class RoomPutStateResponse(RoomEventIdResponse): """A response indicating successful sending of room state.""" @staticmethod def create_error(parsed_dict, room_id): return RoomPutStateError.from_dict(parsed_dict, room_id) class RoomRedactResponse(RoomEventIdResponse): @staticmethod def create_error(parsed_dict, room_id): return RoomRedactError.from_dict(parsed_dict, room_id) @dataclass class RoomResolveAliasResponse(Response): """A response containing the result of resolving an alias. Attributes: room_alias (str): The alias of the room. room_id (str): The resolved id of the room. servers (List[str]): Servers participating in the room. """ room_alias: str = field() room_id: str = field() servers: List[str] = field() @classmethod @verify( Schemas.room_resolve_alias, RoomResolveAliasError, pass_arguments=False, ) def from_dict( cls, parsed_dict: Dict[Any, Any], room_alias: str, ) -> Union[RoomResolveAliasResponse, ErrorResponse]: room_id = parsed_dict["room_id"] servers = parsed_dict["servers"] return cls(room_alias, room_id, servers) @dataclass class RoomDeleteAliasResponse(Response): """A response containing the result of deleting an alias.""" room_alias: str = field() @classmethod def from_dict( cls, parsed_dict: Dict[Any, Any], room_alias: str ) -> Union[RoomDeleteAliasResponse, ErrorResponse]: return cls(room_alias) @dataclass class RoomPutAliasResponse(Response): """A response containing the result of adding an alias.""" room_alias: str = field() room_id: str = field() @classmethod def from_dict( cls, parsed_dict: Dict[Any, Any], room_alias: str, room_id: str ) -> Union[RoomPutAliasResponse, ErrorResponse]: return cls(room_alias, room_id) @dataclass class RoomGetVisibilityResponse(Response): """A response containing the result of a get visibility request.""" room_id: str = field() visibility: str = field() @classmethod @verify( Schemas.room_get_visibility, RoomGetVisibilityError, pass_arguments=False, ) def from_dict( cls, parsed_dict: Dict[Any, Any], room_id: str ) -> Union[RoomGetVisibilityResponse, ErrorResponse]: visibility = parsed_dict["visibility"] return cls(room_id, visibility) @dataclass class SpaceGetHierarchyResponse(Response): """A response indicating successful space get hierarchy request. Attributes: next_batch: The token to supply in the from parameter of the next call. rooms: The rooms in the space. """ next_batch: str = field() rooms: List = field() @classmethod @verify( Schemas.space_hierarchy, SpaceGetHierarchyError, pass_arguments=False, ) def from_dict( cls, parsed_dict: Dict[str, Any] ) -> Union[SpaceGetHierarchyResponse, SpaceGetHierarchyError]: next_batch = parsed_dict.get("next_batch") rooms = parsed_dict["rooms"] resp = cls(next_batch, rooms) return resp class EmptyResponse(Response): @staticmethod def create_error(parsed_dict): return ErrorResponse.from_dict(parsed_dict) @classmethod def from_dict(cls, parsed_dict: Dict[Any, Any]) -> Union[Any, ErrorResponse]: try: validate_json(parsed_dict, Schemas.empty) except (SchemaError, ValidationError): return cls.create_error(parsed_dict) return cls() @dataclass class _EmptyResponseWithRoomId(Response): room_id: str = field() @staticmethod def create_error(parsed_dict, room_id): return _ErrorWithRoomId.from_dict(parsed_dict, room_id) @classmethod def from_dict( cls, parsed_dict: Dict[Any, Any], room_id: str ) -> Union[Any, ErrorResponse]: try: validate_json(parsed_dict, Schemas.empty) except (SchemaError, ValidationError): return cls.create_error(parsed_dict, room_id) return cls(room_id) class RoomKickResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return RoomKickError.from_dict(parsed_dict) class RoomBanResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return RoomBanError.from_dict(parsed_dict) class RoomUnbanResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return RoomUnbanError.from_dict(parsed_dict) class RoomInviteResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return RoomInviteError.from_dict(parsed_dict) @dataclass class ShareGroupSessionResponse(Response): """Response representing a successful group sessions sharing request. Attributes: room_id (str): The room id of the group session. users_shared_with (Set[Tuple[str, str]]): A set containing a tuple of user id device id pairs with whom we shared the group session in this request. """ room_id: str = field() users_shared_with: set = field() @classmethod @verify(Schemas.empty, ShareGroupSessionError) def from_dict( cls, _: Dict[Any, Any], room_id: str, users_shared_with: Set[Tuple[str, str]], ) -> Union[ShareGroupSessionResponse, ErrorResponse]: """Create a response from the json dict the server returns. Args: parsed_dict (Dict): The dict containing the raw json response. room_id (str): The room id of the room to which the group session belongs to. users_shared_with (Set[Tuple[str, str]]): A set containing a tuple of user id device id pairs with whom we shared the group session in this request. """ return cls(room_id, users_shared_with) class RoomTypingResponse(_EmptyResponseWithRoomId): """A response representing a successful room typing request.""" @staticmethod def create_error(parsed_dict, room_id): return RoomTypingError.from_dict(parsed_dict, room_id) class UpdateReceiptMarkerResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return UpdateReceiptMarkerError.from_dict(parsed_dict) class RoomReadMarkersResponse(_EmptyResponseWithRoomId): """A response representing a successful room read markers request.""" @staticmethod def create_error(parsed_dict, room_id): return RoomTypingError.from_dict(parsed_dict, room_id) @dataclass class DeleteDevicesAuthResponse(Response): session: str = field() flows: Dict = field() params: Dict = field() @classmethod @verify(Schemas.delete_devices, DeleteDevicesError) def from_dict( cls, parsed_dict: Dict[Any, Any], ) -> Union[DeleteDevicesAuthResponse, ErrorResponse]: return cls(parsed_dict["session"], parsed_dict["flows"], parsed_dict["params"]) class DeleteDevicesResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return DeleteDevicesError.from_dict(parsed_dict) @dataclass class RoomMessagesResponse(Response): room_id: str = field() chunk: List[Union[Event, BadEventType]] = field() start: str = field() end: str = field(default=None) @classmethod @verify(Schemas.room_messages, RoomMessagesError) def from_dict( cls, parsed_dict: Dict[Any, Any], room_id: str, ) -> Union[RoomMessagesResponse, ErrorResponse]: chunk: List[Union[Event, BadEventType]] = [] chunk = SyncResponse._get_room_events(parsed_dict["chunk"]) return cls(room_id, chunk, parsed_dict["start"], parsed_dict.get("end")) @dataclass class RoomIdResponse(Response): room_id: str = field() @staticmethod def create_error(parsed_dict): return ErrorResponse.from_dict(parsed_dict) @classmethod def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[RoomIdResponse, ErrorResponse]: try: validate_json(parsed_dict, Schemas.room_id) except (SchemaError, ValidationError): return cls.create_error(parsed_dict) return cls(parsed_dict["room_id"]) @dataclass class RoomCreateResponse(Response): """Response representing a successful create room request.""" room_id: str = field() @classmethod @verify( Schemas.room_create_response, RoomCreateError, pass_arguments=False, ) def from_dict( cls, parsed_dict: Dict[Any, Any], ) -> Union[RoomCreateResponse, RoomCreateError]: return cls(parsed_dict["room_id"]) class JoinResponse(RoomIdResponse): @staticmethod def create_error(parsed_dict): return JoinError.from_dict(parsed_dict) class RoomKnockResponse(RoomIdResponse): @staticmethod def create_error(parsed_dict): return RoomKnockError.from_dict(parsed_dict) class RoomLeaveResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return RoomLeaveError.from_dict(parsed_dict) class RoomForgetResponse(_EmptyResponseWithRoomId): """Response representing a successful forget room request.""" @staticmethod def create_error(parsed_dict, room_id): return RoomForgetError.from_dict(parsed_dict, room_id) @dataclass class GetOpenIDTokenResponse(Response): access_token: str = field() expires_in: int = field() matrix_server_name: str = field() token_type: str = field() @classmethod @verify(Schemas.get_openid_token, GetOpenIDTokenError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[GetOpenIDTokenResponse, ErrorResponse]: access_token = parsed_dict["access_token"] expires_in = parsed_dict["expires_in"] matrix_server_name = parsed_dict["matrix_server_name"] token_type = parsed_dict["token_type"] return cls(access_token, expires_in, matrix_server_name, token_type) @dataclass class KeysUploadResponse(Response): curve25519_count: int = field() signed_curve25519_count: int = field() @classmethod @verify(Schemas.keys_upload, KeysUploadError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[KeysUploadResponse, ErrorResponse]: counts = parsed_dict["one_time_key_counts"] return cls(counts["curve25519"], counts["signed_curve25519"]) @dataclass class KeysQueryResponse(Response): device_keys: Dict = field() failures: Dict = field() changed: Dict[str, Dict[str, Any]] = field( init=False, default_factory=dict, ) @classmethod @verify(Schemas.keys_query, KeysQueryError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[KeysQueryResponse, ErrorResponse]: device_keys = parsed_dict["device_keys"] failures = parsed_dict["failures"] return cls(device_keys, failures) @dataclass class KeysClaimResponse(Response): one_time_keys: Dict[Any, Any] = field() failures: Dict[Any, Any] = field() room_id: str = "" @classmethod @verify(Schemas.keys_claim, KeysClaimError) def from_dict( cls, parsed_dict: Dict[Any, Any], room_id: str = "", ) -> Union[KeysClaimResponse, ErrorResponse]: one_time_keys = parsed_dict["one_time_keys"] failures = parsed_dict["failures"] return cls(one_time_keys, failures, room_id) @dataclass class DevicesResponse(Response): devices: List[Device] = field() @classmethod @verify(Schemas.devices, DevicesError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[DevicesResponse, ErrorResponse]: devices = [] for device_dict in parsed_dict["devices"]: try: device = Device.from_dict(device_dict) except ValueError: continue devices.append(device) return cls(devices) @dataclass class RoomKeyRequestError(ErrorResponse): """Response representing a failed room key request.""" pass @dataclass class RoomKeyRequestResponse(Response): """Response representing a successful room key request. Attributes: request_id (str): The id of the that uniquely identifies this key request that was requested, if we receive a to_device event it will contain the same request id. session_id (str): The id of the session that we requested. room_id (str): The id of the room that the session belongs to. algorithm (str): The encryption algorithm of the session. """ request_id: str = field() session_id: str = field() room_id: str = field() algorithm: str = field() @classmethod @verify(Schemas.empty, RoomKeyRequestError, False) def from_dict(cls, _, request_id, session_id, room_id, algorithm): """Create a RoomKeyRequestResponse from a json response. Args: parsed_dict (Dict): The dictionary containing the json response. request_id (str): The id of that uniquely identifies this key request that was requested, if we receive a to_device event it will contain the same request id. session_id (str): The id of the session that we requested. room_id (str): The id of the room that the session belongs to. algorithm (str): The encryption algorithm of the session. """ return cls(request_id, session_id, room_id, algorithm) class UpdateDeviceResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return UpdateDeviceError.from_dict(parsed_dict) @dataclass class ProfileGetResponse(Response): """Response representing a successful get profile request. Attributes: displayname (str, optional): The display name of the user. None if the user doesn't have a display name. avatar_url (str, optional): The matrix content URI for the user's avatar. None if the user doesn't have an avatar. other_info (dict): Contains any other information returned for the user's profile. """ displayname: Optional[str] = None avatar_url: Optional[str] = None other_info: Dict[Any, Any] = field(default_factory=dict) def __str__(self) -> str: return f"Display name: {self.displayname}, avatar URL: {self.avatar_url}, other info: {self.other_info}" @classmethod @verify(Schemas.get_profile, ProfileGetError) def from_dict( cls, parsed_dict: Dict[Any, Any] ) -> Union[ProfileGetResponse, ErrorResponse]: return cls( parsed_dict.get("displayname"), parsed_dict.get("avatar_url"), { k: v for k, v in parsed_dict.items() if k not in ("displayname", "avatar_url") }, ) @dataclass class ProfileGetDisplayNameResponse(Response): """Response representing a successful get display name request. Attributes: displayname (str, optional): The display name of the user. None if the user doesn't have a display name. """ displayname: Optional[str] = None def __str__(self) -> str: return f"Display name: {self.displayname}" @classmethod @verify(Schemas.get_displayname, ProfileGetDisplayNameError) def from_dict( cls, parsed_dict: (Dict[Any, Any]), ) -> Union[ProfileGetDisplayNameResponse, ErrorResponse]: return cls(parsed_dict.get("displayname")) class ProfileSetDisplayNameResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return ProfileSetDisplayNameError.from_dict(parsed_dict) @dataclass class ProfileGetAvatarResponse(Response): """Response representing a successful get avatar request. Attributes: avatar_url (str, optional): The matrix content URI for the user's avatar. None if the user doesn't have an avatar. """ avatar_url: Optional[str] = None def __str__(self) -> str: return f"Avatar URL: {self.avatar_url}" @classmethod @verify(Schemas.get_avatar, ProfileGetAvatarError) def from_dict( cls, parsed_dict: (Dict[Any, Any]), ) -> Union[ProfileGetAvatarResponse, ErrorResponse]: return cls(parsed_dict.get("avatar_url")) class ProfileSetAvatarResponse(EmptyResponse): @staticmethod def create_error(parsed_dict): return ProfileSetAvatarError.from_dict(parsed_dict) @dataclass class PresenceGetResponse(Response): """Response representing a successful get presence request. Attributes: user_id (str): The user´s id presence (str): The user's presence state. One of: ["online", "offline", "unavailable"] last_active_ago (int, optional): The length of time in milliseconds since an action was performed by this user. None if not set. currently_active (bool, optional): Whether the user is currently active. None if not set. status_msg (str, optional): The state message for this user. None if not set. """ user_id: str presence: str last_active_ago: Optional[int] currently_active: Optional[bool] status_msg: Optional[str] @classmethod @verify(Schemas.get_presence, PresenceGetError, pass_arguments=False) def from_dict( cls, parsed_dict: Dict[Any, Any], user_id: str ) -> Union[PresenceGetResponse, PresenceGetError]: return cls( user_id, parsed_dict.get("presence", "offline"), parsed_dict.get("last_active_ago"), parsed_dict.get("currently_active"), parsed_dict.get("status_msg"), ) class PresenceSetResponse(EmptyResponse): """Response representing a successful set presence request.""" @staticmethod def create_error(parsed_dict): return PresenceSetError.from_dict(parsed_dict) @dataclass class ToDeviceError(ErrorResponse): """Response representing a unsuccessful room key request.""" to_device_message: Optional[ToDeviceMessage] = None @classmethod def from_dict(cls, parsed_dict, message): try: validate_json(parsed_dict, Schemas.error) except (SchemaError, ValidationError): return cls("unknown error", None, message) return cls(parsed_dict["error"], parsed_dict["errcode"], message) @dataclass class ToDeviceResponse(Response): """Response representing a successful room key request.""" to_device_message: ToDeviceMessage = field() @classmethod @verify(Schemas.empty, ToDeviceError) def from_dict(cls, parsed_dict, message): """Create a ToDeviceResponse from a json response.""" return cls(message) @dataclass class RoomContextError(_ErrorWithRoomId): """Response representing a unsuccessful room context request.""" @dataclass class RoomContextResponse(Response): """Room event context response. This Response holds a number of events that happened just before and after a specified event. Attributes: room_id(str): The room id of the room which the events belong to. start(str): A token that can be used to paginate backwards with. end(str): A token that can be used to paginate forwards with. events_before(List[Event]): A list of room events that happened just before the requested event, in reverse-chronological order. event(Event): Details of the requested event. events_after(List[Event]): A list of room events that happened just after the requested event, in chronological order. state(List[Event]): The state of the room at the last event returned. """ room_id: str = field() start: str = field() end: str = field() event: Optional[Union[Event, BadEventType]] = field() events_before: List[Union[Event, BadEventType]] = field() events_after: List[Union[Event, BadEventType]] = field() state: List[Union[Event, BadEventType]] = field() @classmethod @verify(Schemas.room_context, RoomContextError) def from_dict( cls, parsed_dict: Dict[Any, Any], room_id: str, ) -> Union[RoomContextResponse, ErrorResponse]: events_before = SyncResponse._get_room_events(parsed_dict["events_before"]) events_after = SyncResponse._get_room_events(parsed_dict["events_after"]) event = Event.parse_event(parsed_dict["event"]) state = SyncResponse._get_room_events(parsed_dict["state"]) return cls( room_id, parsed_dict["start"], parsed_dict["end"], event, events_before, events_after, state, ) @dataclass class SyncResponse(Response): next_batch: str = field() rooms: Rooms = field() device_key_count: DeviceOneTimeKeyCount = field() device_list: DeviceList = field() to_device_events: List[ToDeviceEvent] = field() presence_events: List[PresenceEvent] = field() account_data_events: List[AccountDataEvent] = field(default_factory=list) def __str__(self) -> str: result = [] for room_id, room_info in self.rooms.join.items(): room_header = f" Messages for room {room_id}:\n " messages = (str(event) for event in room_info.timeline.events) room_message = room_header + "\n ".join(messages) result.append(room_message) if len(self.to_device_events) > 0: result.append(" Device messages:") for event in self.to_device_events: result.append(f" {event}") # noqa: PERF401 body = "\n".join(result) string = f"Sync response until batch: {self.next_batch}:\n{body}" return string @staticmethod def _get_room_events( parsed_dict: List[Dict[Any, Any]] ) -> List[Union[Event, BadEventType]]: events: List[Union[Event, BadEventType]] = [] for event_dict in parsed_dict: event = Event.parse_event(event_dict) if event: events.append(event) return events @staticmethod def _get_to_device(parsed_dict: Dict[Any, Any]) -> List[ToDeviceEvent]: return [ ToDeviceEvent.parse_event(event_dict) for event_dict in parsed_dict.get("events", []) ] @staticmethod def _get_timeline(parsed_dict: Dict[Any, Any]) -> Timeline: validate_json(parsed_dict, Schemas.room_timeline) events = SyncResponse._get_room_events(parsed_dict.get("events", [])) return Timeline( events, parsed_dict.get("limited", False), parsed_dict.get("prev_batch") ) @staticmethod def _get_state(parsed_dict: Dict[Any, Any]) -> List[Union[Event, BadEventType]]: validate_json(parsed_dict, Schemas.sync_room_state) events = SyncResponse._get_room_events( parsed_dict.get("events", []), ) return events @staticmethod def _get_invite_state(parsed_dict): validate_json(parsed_dict, Schemas.sync_room_state) events = [] for event_dict in parsed_dict.get("events", []): event = InviteEvent.parse_event(event_dict) if event: events.append(event) return events @staticmethod def _get_ephemeral_events(parsed_dict): events = [] for event_dict in parsed_dict: event = EphemeralEvent.parse_event(event_dict) if event: events.append(event) return events @staticmethod def _get_join_info( state_events: List[Any], timeline_events: List[Any], prev_batch: Optional[str], limited: bool, ephemeral_events: List[Any], summary_events: Dict[str, Any], unread_notification_events: Dict[str, Any], account_data_events: List[Any], ) -> RoomInfo: state = SyncResponse._get_room_events(state_events) events = SyncResponse._get_room_events(timeline_events) timeline = Timeline(events, limited, prev_batch) ephemeral_event_list = SyncResponse._get_ephemeral_events(ephemeral_events) summary = RoomSummary( summary_events.get("m.invited_member_count"), summary_events.get("m.joined_member_count"), summary_events.get("m.heroes"), ) unread_notifications = UnreadNotifications( unread_notification_events.get("notification_count"), unread_notification_events.get("highlight_count"), ) account_data = RoomInfo.parse_account_data(account_data_events) return RoomInfo( timeline, state, ephemeral_event_list, account_data, summary, unread_notifications, ) @staticmethod def _get_room_info(parsed_dict: Dict[Any, Any]) -> Rooms: joined_rooms: Dict[str, RoomInfo] = {} invited_rooms: Dict[str, InviteInfo] = {} left_rooms: Dict[str, RoomInfo] = {} for room_id, room_dict in parsed_dict.get("invite", {}).items(): state = SyncResponse._get_invite_state(room_dict.get("invite_state", {})) invite_info = InviteInfo(state) invited_rooms[room_id] = invite_info for room_id, room_dict in parsed_dict.get("leave", {}).items(): state = SyncResponse._get_state(room_dict.get("state", {})) timeline = SyncResponse._get_timeline(room_dict.get("timeline", {})) leave_info = RoomInfo(timeline, state, [], []) left_rooms[room_id] = leave_info for room_id, room_dict in parsed_dict.get("join", {}).items(): join_info = SyncResponse._get_join_info( room_dict.get("state", {}).get("events", []), room_dict.get("timeline", {}).get("events", []), room_dict.get("timeline", {}).get("prev_batch"), room_dict.get("timeline", {}).get("limited", False), room_dict.get("ephemeral", {}).get("events", []), room_dict.get("summary", {}), room_dict.get("unread_notifications", {}), room_dict.get("account_data", {}).get("events", []), ) joined_rooms[room_id] = join_info return Rooms(invited_rooms, joined_rooms, left_rooms) @staticmethod def _get_presence(parsed_dict) -> List[PresenceEvent]: presence_dicts = parsed_dict.get("presence", {}).get("events", []) return [ PresenceEvent.from_dict(presence_dict) for presence_dict in presence_dicts ] @staticmethod def _get_account_data( parsed_dict: Dict[str, Any], ) -> Generator[AccountDataEvent, None, None]: for ev_dict in parsed_dict.get("account_data", {}).get("events", []): yield AccountDataEvent.parse_event(ev_dict) @classmethod @verify(Schemas.sync, SyncError, False) def from_dict( cls, parsed_dict: Dict[Any, Any], ) -> Union[SyncResponse, ErrorResponse]: to_device = cls._get_to_device(parsed_dict.get("to_device", {})) key_count_dict = parsed_dict.get("device_one_time_keys_count", {}) key_count = DeviceOneTimeKeyCount( key_count_dict.get("curve25519"), key_count_dict.get("signed_curve25519") ) devices = DeviceList( parsed_dict.get("device_lists", {}).get("changed", []), parsed_dict.get("device_lists", {}).get("left", []), ) presence_events = SyncResponse._get_presence(parsed_dict) rooms = SyncResponse._get_room_info(parsed_dict.get("rooms", {})) return SyncResponse( parsed_dict["next_batch"], rooms, key_count, devices, to_device, presence_events, list(SyncResponse._get_account_data(parsed_dict)), ) class UploadFilterError(ErrorResponse): pass @dataclass class UploadFilterResponse(Response): """Response representing a successful filter upload request. Attributes: filter_id (str): A filter ID that may be used in future requests to restrict which events are returned to the client. """ filter_id: str = field() @classmethod @verify(Schemas.upload_filter, UploadFilterError) def from_dict( cls, parsed_dict: Dict[Any, Any], ) -> Union[UploadFilterResponse, UploadFilterError]: return cls(parsed_dict["filter_id"]) class WhoamiError(ErrorResponse): pass @dataclass class WhoamiResponse(Response): user_id: str = field() device_id: Optional[str] = field() is_guest: Optional[bool] = field() @classmethod @verify(Schemas.whoami, WhoamiError) def from_dict( cls, parsed_dict: Dict[Any, Any], ) -> Union[WhoamiResponse, WhoamiError]: return cls( parsed_dict["user_id"], parsed_dict.get("device_id"), parsed_dict.get("is_guest", False), ) @dataclass class SetPushRuleResponse(EmptyResponse): @staticmethod def create_error(parsed_dict: Dict[str, Any]): return SetPushRuleError.from_dict(parsed_dict) class SetPushRuleError(ErrorResponse): pass @dataclass class DeletePushRuleResponse(EmptyResponse): @staticmethod def create_error(parsed_dict: Dict[str, Any]): return DeletePushRuleError.from_dict(parsed_dict) class DeletePushRuleError(ErrorResponse): pass @dataclass class EnablePushRuleResponse(EmptyResponse): @staticmethod def create_error(parsed_dict: Dict[str, Any]): return EnablePushRuleError.from_dict(parsed_dict) class EnablePushRuleError(ErrorResponse): pass @dataclass class SetPushRuleActionsResponse(EmptyResponse): @staticmethod def create_error(parsed_dict: Dict[str, Any]): return SetPushRuleActionsError.from_dict(parsed_dict) class SetPushRuleActionsError(ErrorResponse): pass @dataclass class DeleteAliasResponse(EmptyResponse): @staticmethod def create_error(parsed_dict: Dict[str, Any]): return DeletePushRuleError.from_dict(parsed_dict) class DeleteAliasError(ErrorResponse): pass @dataclass class PutAliasResponse(EmptyResponse): @staticmethod def create_error(parsed_dict: Dict[str, Any]): return DeletePushRuleError.from_dict(parsed_dict) class PutAliasError(ErrorResponse): pass class RoomUpdateAliasError(ErrorResponse): pass class RoomUpdateAliasResponse(EmptyResponse): pass class RoomUpgradeError(ErrorResponse): pass class RoomUpgradeResponse(RoomCreateResponse): pass matrix-nio-0.24.0/nio/rooms.py000066400000000000000000000440511455215747700162070ustar00rootroot00000000000000# Copyright © 2018 Damir Jelić # Copyright © 2021 Famedly GmbH # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import annotations import logging from collections import defaultdict from typing import DefaultDict, Dict, List, Optional, Set, Tuple, Union from .events import ( AccountDataEvent, EphemeralEvent, Event, FullyReadEvent, InviteAliasEvent, InviteMemberEvent, InviteNameEvent, PowerLevels, PowerLevelsEvent, Receipt, ReceiptEvent, RoomAliasEvent, RoomAvatarEvent, RoomCreateEvent, RoomEncryptionEvent, RoomGuestAccessEvent, RoomHistoryVisibilityEvent, RoomJoinRulesEvent, RoomMemberEvent, RoomNameEvent, RoomSpaceChildEvent, RoomSpaceParentEvent, RoomTopicEvent, RoomUpgradeEvent, TagEvent, TypingNoticeEvent, ) from .responses import RoomSummary, UnreadNotifications logger = logging.getLogger(__name__) __all__ = [ "MatrixRoom", "MatrixInvitedRoom", "MatrixUser", ] class MatrixRoom: """Represents a Matrix room.""" def __init__(self, room_id: str, own_user_id: str, encrypted: bool = False) -> None: """Initialize a MatrixRoom object.""" # yapf: disable self.room_id: str = room_id self.own_user_id = own_user_id self.creator: str = "" self.federate: bool = True self.room_version: str = "1" self.room_type: Optional[str] = None self.guest_access: str = "forbidden" self.join_rule: str = "invite" self.history_visibility: str = "shared" self.canonical_alias: Optional[str] = None self.topic: Optional[str] = None self.name: Optional[str] = None self.parents: Set[str] = set() self.children: Set[str] = set() self.users: Dict[str, MatrixUser] = {} self.invited_users: Dict[str, MatrixUser] = {} self.names: DefaultDict[str, List[str]] = defaultdict(list) self.encrypted: bool = encrypted self.power_levels: PowerLevels = PowerLevels() self.typing_users: List[str] = [] self.read_receipts: Dict[str, Receipt] = {} self.summary: Optional[RoomSummary] = None self.room_avatar_url: Optional[str] = None self.fully_read_marker: Optional[str] = None self.tags: Dict[str, Optional[Dict[str, float]]] = {} self.unread_notifications: int = 0 self.unread_highlights: int = 0 self.members_synced: bool = False self.replacement_room: Union[str, None] = None # yapf: enable @property def display_name(self) -> str: """Calculate display name for a room. Prefer returning the room name if it exists, falling back to a group-style name if not. Follows: https://matrix.org/docs/spec/client_server/r0.6.0#id342 """ return self.named_room_name() or self.group_name() def named_room_name(self) -> Optional[str]: """Return the name of the room if it's a named room, otherwise None.""" return self.name or self.canonical_alias or None def group_name(self) -> str: """Return the group-style name of the room. In other words, a display name based on the names of room members. This is used for ad-hoc groups of people (usually direct chats). """ empty, user_ids, others = self.group_name_structure() names = [self.user_name(u) or u for u in user_ids] if others: text = f"{', '.join(names)} and {others} other{'' if others == 1 else 's'}" elif len(names) == 0: text = "" elif len(names) == 1: text = names[0] else: text = f"{', '.join(names[:-1])} and {names[-1]}" if empty and text: text = f"Empty Room (had {text})" elif empty: text = "Empty Room" return text def group_name_structure(self) -> Tuple[bool, List[str], int]: """Get if room is empty, ID for listed users and the N others count.""" try: heroes, joined, invited = self._summary_details() except ValueError: users = [ u for u in sorted(self.users, key=lambda u: self.user_name(u)) if u != self.own_user_id ] empty = not users if len(users) <= 5: return (empty, users, 0) return (empty, users[:5], len(users) - 5) empty = self.member_count <= 1 if len(heroes) >= self.member_count - 1: return (empty, heroes, 0) return (empty, heroes, self.member_count - 1 - len(heroes)) def user_name(self, user_id: str) -> Optional[str]: """Get disambiguated display name for a user. Returns display name of a user if display name is unique or returns a display name in form " ()" if there is more than one user with same display name. """ if user_id not in self.users: return None user = self.users[user_id] if len(self.names[user.name]) > 1: return user.disambiguated_name return user.name def user_name_clashes(self, name: str) -> List[str]: """Get a list of users that have same display name.""" return self.names[name] def avatar_url(self, user_id: str) -> Optional[str]: """Get avatar url for a user. Returns a matrix content URI, or None if the user has no avatar. """ if user_id not in self.users: return None return self.users[user_id].avatar_url @property def gen_avatar_url(self) -> Optional[str]: """ Get the calculated room's avatar url. Either the room's avatar if one is set, or the avatar of the first user that's not ourselves if the room is an unnamed group or has exactly two users. """ if self.room_avatar_url: return self.room_avatar_url try: heroes, _, _ = self._summary_details() except ValueError: if self.is_group and len(self.users) == 2: return self.avatar_url( next( u for u in sorted(self.users, key=lambda u: self.user_name(u)) if u != self.own_user_id ) ) return None if self.is_group and self.member_count == 2 and len(heroes) >= 1: return self.avatar_url(heroes[0]) return None @property def machine_name(self) -> str: """Calculate an unambiguous, unique machine name for a room. Either use the more human-friendly canonical alias, if it exists, or the internal room ID if not. """ return self.canonical_alias or self.room_id @property def is_named(self) -> bool: """Determine whether a room is named. A named room is a room with either the name or a canonical alias set. """ return bool(self.canonical_alias or self.name) @property def is_group(self) -> bool: """Determine whether a room is an ad-hoc group (often a direct chat). A group is an unnamed room with no canonical alias. """ return not self.is_named def add_member( self, user_id: str, display_name: Optional[str], avatar_url: Optional[str], invited: bool = False, ) -> bool: if user_id in self.users: return False level = self.power_levels.users.get( user_id, self.power_levels.defaults.users_default, ) user = MatrixUser(user_id, display_name, avatar_url, level, invited) self.users[user_id] = user if invited: self.invited_users[user_id] = user name = display_name if display_name else user_id self.names[name].append(user_id) return True def remove_member(self, user_id: str) -> bool: user = self.users.pop(user_id, None) if user: self.names[user.name].remove(user.user_id) invited_user = self.invited_users.pop(user_id, None) if invited_user: try: self.names[invited_user.name].remove(invited_user.user_id) except ValueError: pass return bool(user or invited_user) def handle_membership( self, event: Union[RoomMemberEvent, InviteMemberEvent], ) -> bool: """Handle a membership event for the room. Args: event (RoomMemberEvent): The event that should be handled that updates the room state. Returns True if the member list of the room has changed False otherwise. """ target_user = event.state_key invited = event.membership == "invite" if event.membership in ("invite", "join"): # Add member if not already present in self.users, # or the member is invited but not present in self.invited_users if target_user not in self.users or ( invited and target_user not in self.invited_users ): display_name = event.content.get("displayname", None) avatar_url = event.content.get("avatar_url", None) return self.add_member( target_user, display_name, avatar_url, invited, ) user = self.users[target_user] # Handle membership change user.invited = invited if not invited and target_user in self.invited_users: del self.invited_users[target_user] # Handle profile changes if "displayname" in event.content: self.names[user.name].remove(user.user_id) user.display_name = event.content["displayname"] self.names[user.name].append(user.user_id) if "avatar_url" in event.content: user.avatar_url = event.content["avatar_url"] return False elif event.membership in ("leave", "ban"): return self.remove_member(target_user) return False def handle_ephemeral_event(self, event: EphemeralEvent) -> None: if isinstance(event, TypingNoticeEvent): self.typing_users = event.users if isinstance(event, ReceiptEvent): read_receipts = filter(lambda x: x.receipt_type == "m.read", event.receipts) for read_receipt in read_receipts: self.read_receipts[read_receipt.user_id] = read_receipt def handle_event(self, event: Event) -> None: logger.info( f"Room {self.room_id} handling event of type {type(event).__name__}" ) if isinstance(event, RoomCreateEvent): self.creator = event.creator self.federate = event.federate self.room_version = event.room_version self.room_type = event.room_type elif isinstance(event, RoomGuestAccessEvent): self.guest_access = event.guest_access elif isinstance(event, RoomHistoryVisibilityEvent): self.history_visibility = event.history_visibility elif isinstance(event, RoomJoinRulesEvent): self.join_rule = event.join_rule elif isinstance(event, RoomNameEvent): self.name = event.name elif isinstance(event, RoomAliasEvent): self.canonical_alias = event.canonical_alias elif isinstance(event, RoomTopicEvent): self.topic = event.topic elif isinstance(event, RoomAvatarEvent): self.room_avatar_url = event.avatar_url elif isinstance(event, RoomEncryptionEvent): self.encrypted = True elif isinstance(event, RoomUpgradeEvent): self.replacement_room = event.replacement_room elif isinstance(event, PowerLevelsEvent): self.power_levels.update(event.power_levels) # Update the power levels of the joined users for user_id, level in self.power_levels.users.items(): if user_id in self.users: logger.info( f"Changing power level for user {user_id} from {self.users[user_id].power_level} to {level}" ) self.users[user_id].power_level = level elif isinstance(event, RoomSpaceParentEvent): if "via" in event.source.get("content", {}): self.parents.add(event.state_key) else: self.parents.discard(event.state_key) elif isinstance(event, RoomSpaceChildEvent): if "via" in event.source.get("content", {}): self.children.add(event.state_key) else: self.children.discard(event.state_key) def handle_account_data(self, event: AccountDataEvent) -> None: if isinstance(event, FullyReadEvent): self.fully_read_marker = event.event_id if isinstance(event, TagEvent): self.tags = event.tags def update_unread_notifications(self, unread: UnreadNotifications) -> None: if unread.notification_count is not None: self.unread_notifications = unread.notification_count if unread.highlight_count is not None: self.unread_highlights = unread.highlight_count def update_summary(self, summary: RoomSummary) -> None: if not self.summary: self.summary = summary return if summary.joined_member_count is not None: self.summary.joined_member_count = summary.joined_member_count if summary.invited_member_count is not None: self.summary.invited_member_count = summary.invited_member_count if summary.heroes is not None: self.summary.heroes = summary.heroes def _summary_details(self) -> Tuple[List[str], int, int]: """Return the summary attributes if it can be used for calculations.""" valid = bool( self.summary is not None and self.summary.joined_member_count is not None and self.summary.invited_member_count is not None, ) if not valid: raise ValueError("Unusable summary") return ( # type: ignore self.summary.heroes or [], # type: ignore self.summary.joined_member_count, # type: ignore self.summary.invited_member_count, # type: ignore ) @property def joined_count(self) -> int: try: return self._summary_details()[1] except ValueError: return len(tuple(u for u in self.users.values() if not u.invited)) @property def invited_count(self) -> int: try: return self._summary_details()[2] except ValueError: return len(tuple(u for u in self.users.values() if u.invited)) @property def member_count(self) -> int: try: _, joined, invited = self._summary_details() except ValueError: return len(self.users) return joined + invited class MatrixInvitedRoom(MatrixRoom): def __init__(self, room_id: str, own_user_id: str) -> None: self.inviter: Optional[str] = None super().__init__(room_id, own_user_id) def handle_membership( self, event: Union[RoomMemberEvent, InviteMemberEvent], ) -> bool: """Handle a membership event for the invited room. Args: event (RoomMemberEvent): The event that should be handled that updates the room state. Returns True if the member list of the room has changed False otherwise. """ if event.membership == "invite" and event.state_key == self.own_user_id: self.inviter = event.sender return super().handle_membership(event) def handle_event(self, event: Event) -> None: logger.info( f"Room {self.room_id} handling event of type {type(event).__name__}" ) if isinstance(event, InviteMemberEvent): self.handle_membership(event) elif isinstance(event, InviteNameEvent): self.name = event.name elif isinstance(event, InviteAliasEvent): self.canonical_alias = event.canonical_alias class MatrixUser: def __init__( self, user_id: str, display_name: Optional[str] = None, avatar_url: Optional[str] = None, power_level: int = 0, invited: bool = False, presence: str = "offline", last_active_ago: Optional[int] = None, currently_active: Optional[bool] = None, status_msg: Optional[str] = None, ): # yapf: disable self.user_id = user_id self.display_name = display_name self.avatar_url = avatar_url self.power_level = power_level self.invited = invited self.presence = presence self.last_active_ago = last_active_ago self.currently_active = currently_active self.status_msg = status_msg # yapf: enable @property def name(self) -> str: return self.display_name or self.user_id @property def disambiguated_name(self) -> str: # as per https://matrix.org/docs/spec/client_server/r0.4.0.html#id346 if self.display_name: return f"{self.display_name} ({self.user_id})" return self.user_id matrix-nio-0.24.0/nio/schemas.py000066400000000000000000002074351455215747700165020ustar00rootroot00000000000000# Copyright © 2018 Damir Jelić # Copyright © 2020-2021 Famedly GmbH # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import re from jsonschema import Draft4Validator, FormatChecker, validators RoomRegex = "^!.+:.+$" UserIdRegex = "^@.*:.+$" EventTypeRegex = r"^.+\..+" Base64Regex = r"[^-A-Za-z0-9+/=]|=[^=]|={3,}$" KeyRegex = r"(ed25519|curve25519):.+" SignedCurveRegex = r"(signed_curve25519|curve25519):.+" def extend_with_default(validator_class): validate_properties = validator_class.VALIDATORS["properties"] def set_defaults(validator, properties, instance, schema): for property, subschema in properties.items(): if "default" in subschema: instance.setdefault(property, subschema["default"]) yield from validate_properties(validator, properties, instance, schema) return validators.extend(validator_class, {"properties": set_defaults}) Validator = extend_with_default(Draft4Validator) Checker = FormatChecker() @Checker.checks("user_id", ValueError) def check_user_id(value: str) -> bool: if not value.startswith("@"): raise ValueError("UserIDs start with @") if ":" not in value: raise ValueError("UserIDs must have a domain component, separated by a :") return True @Checker.checks("http_url", ValueError) def check_http_url(value: str) -> bool: if not re.match(r"^https?://.+", value): raise ValueError("Must be http://... or https://... URL") return True def validate_json(instance, schema): Validator(schema, format_checker=Checker).validate(instance) class Schemas: room_message = { "type": "object", "properties": { "content": { "type": "object", "properties": {"msgtype": {"type": "string"}}, "required": ["msgtype"], } }, "not": {"required": ["state_key"]}, } room_message_text = { "type": "object", "properties": { "content": { "type": "object", "properties": { "msgtype": {"type": "string", "const": "m.text"}, "body": {"type": "string"}, "formatted_body": {"type": "string"}, "format": {"type": "string"}, }, "required": ["msgtype", "body"], } }, } room_message_emote = { "type": "object", "properties": { "content": { "type": "object", "properties": { "msgtype": {"type": "string", "const": "m.emote"}, "body": {"type": "string"}, "formatted_body": {"type": "string"}, "format": {"type": "string"}, }, "required": ["msgtype", "body"], } }, } room_message_notice = { "type": "object", "properties": { "content": { "type": "object", "properties": { "msgtype": {"type": "string", "const": "m.notice"}, "body": {"type": "string"}, "formatted_body": {"type": "string"}, "format": {"type": "string"}, }, "required": ["msgtype", "body"], }, }, } room_message_media = { "type": "object", "properties": { "content": { "type": "object", "properties": { "body": {"type": "string"}, "url": {"type": "string"}, "msgtype": { "type": "string", "enum": ["m.image", "m.audio", "m.video", "m.file"], }, }, "required": ["body", "url", "msgtype"], } }, } room_encrypted_media = { "type": "object", "properties": { "content": { "type": "object", "properties": { "body": {"type": "string"}, "msgtype": { "type": "string", "enum": ["m.image", "m.audio", "m.video", "m.file"], }, "file": { "type": "object", "properties": { "url": {"type": "string"}, "hashes": { "type": "object", "properties": {"sha256": {"type": "string"}}, }, "iv": {"type": "string"}, "key": { "type": "object", "properties": { "alg": {"type": "string"}, "k": {"type": "string"}, }, "required": ["alg", "k"], }, }, "required": ["url", "hashes", "iv", "key"], }, "info": { "type": "object", "properties": { "thumbnail_file": { "type": "object", "properties": { "url": {"type": "string"}, "hashes": { "type": "object", "properties": {"sha256": {"type": "string"}}, }, "iv": {"type": "string"}, "key": { "type": "object", "properties": { "alg": {"type": "string"}, "k": {"type": "string"}, }, "required": ["alg", "k"], }, }, "required": ["url", "hashes", "iv", "key"], }, }, }, }, "required": ["body", "file", "msgtype"], } }, "required": ["content"], } redacted_event = { "type": "object", "properties": { "unsigned": { "type": "object", "properties": { "redacted_because": { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "content": { "type": "object", "properties": {"reason": {"type": "string"}}, }, }, "required": ["sender", "content"], } }, "required": ["redacted_because"], } }, "required": ["unsigned"], } register = { "type": "object", "properties": { "user_id": {"type": "string", "format": "user_id"}, "device_id": {"type": "string"}, "access_token": {"type": "string"}, }, "required": ["user_id", "device_id", "access_token"], } register_flows = { "type": "object", "properties": { "flows": { "type": "array", "items": { "type": "object", "properties": { "stages": { "type": "array", "items": {"type": "string"}, } }, "required": ["stages"], }, }, "params": {"type": "object"}, "session": {"type": "string"}, "completed": {"type": "array", "items": {"type": "string"}}, "user_id": {"type": "string", "format": "user_id"}, "device_id": {"type": "string"}, "access_token": {"type": "string"}, }, "required": ["flows", "params", "session"], } login = { "type": "object", "properties": { "user_id": {"type": "string", "format": "user_id"}, "device_id": {"type": "string"}, "access_token": {"type": "string"}, }, "required": ["user_id", "device_id", "access_token"], } discovery_info = { "type": "object", "properties": { "m.homeserver": { "type": "object", "properties": { "base_url": {"type": "string", "format": "http_url"}, }, "required": ["base_url"], }, "m.identity_server": { "type": "object", "properties": { "base_url": {"type": "string", "format": "http_url"}, }, "required": ["base_url"], }, }, "required": ["m.homeserver"], } login_info = { "type": "object", "properties": { "flows": { "type": "array", "items": { "type": "object", "properties": {"type": {"type": "string"}}, "required": ["type"], }, }, }, "required": ["flows"], } error = { "type": "object", "properties": { "error": {"type": "string"}, "errcode": {"type": "string"}, "retry_after_ms": {"type": "integer"}, }, "required": ["error", "errcode"], } room_timeline = { "type": "object", "properties": { "events": {"type": "array"}, "limited": {"type": "boolean"}, "prev_batch": {"type": "string"}, }, "required": ["events"], } sync = { "type": "object", "properties": { "device_one_time_keys_count": { "type": "object", "default": {}, "properties": { "curve25519": {"type": "integer"}, "signed_curve25519": {"type": "integer"}, }, }, "device_lists": { "type": "object", "default": {}, "properties": { "changed": {"type": "array", "items": {"type": "string"}}, "left": {"type": "array", "items": {"type": "string"}}, }, }, "next_batch": {"type": "string"}, "rooms": { "type": "object", "default": {}, "properties": { "invite": { "type": "object", "default": {}, "patternProperties": { RoomRegex: { "type": "object", "properties": { "invite_state": { "type": "object", "default": {}, "properties": { "events": { "type": "array", "default": [], } }, } }, } }, }, "join": { "type": "object", "default": {}, "patternProperties": { RoomRegex: { "type": "object", "properties": { "timeline": room_timeline, "state": { "type": "object", "default": {}, "properties": { "events": { "type": "array", "default": [], }, }, }, "ephemeral": { "type": "object", "default": {}, "properties": { "events": { "type": "array", "default": [], } }, }, "summary": { "type": "object", "properties": { "m.invited_member_count": { "type": "integer" }, "m.joined_member_count": { "type": "integer" }, "m.heroes": { "type": "array", "items": {"type": "string"}, }, }, }, "account_data": { "type": "object", "default": {}, "properties": { "events": { "type": "array", "default": [], }, }, }, }, } }, }, "leave": { "type": "object", "default": {}, "patternProperties": { RoomRegex: { "type": "object", "properties": { "timeline": { "type": "object", "default": {}, "properties": { "events": { "type": "array", "default": [], }, }, }, "state": { "type": "object", "default": {}, "properties": { "events": { "type": "array", "default": [], }, }, }, }, } }, }, }, }, "to_device": { "type": "object", "default": {}, "properties": {"events": {"type": "array", "default": []}}, }, "presence": { "type": "object", "default": {}, "properties": { "events": {"type": "array", "default": []}, }, }, }, "required": ["next_batch"], } room_event = { "type": "object", "properties": { "event_id": {"type": "string"}, "sender": {"type": "string", "format": "user_id"}, "type": {"type": "string"}, "origin_server_ts": {"type": "integer", "minimum": 0}, "unsigned": { "type": "object", "properties": { "transaction_id": {"type": "string"}, }, }, }, "required": ["event_id", "sender", "type", "origin_server_ts"], } state_event = { "type": "object", "properties": { "event_id": {"type": "string"}, "sender": {"type": "string", "format": "user_id"}, "type": {"type": "string"}, "state_key": {"type": "string"}, "origin_server_ts": {"type": "integer", "minimum": 0}, "unsigned": { "type": "object", "properties": { "transaction_id": {"type": "string"}, }, }, }, "required": ["event_id", "sender", "type", "state_key", "origin_server_ts"], } room_state = { "type": "array", "items": state_event, } sync_room_state = { "type": "object", "properties": {"events": {"type": "array"}}, "required": ["events"], } to_device = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "type": {"type": "string"}, "content": {"type": "object"}, }, "required": ["sender", "type", "content"], } room_encrypted = { "type": "object", "properties": { "content": { "type": "object", "properties": { "sender_key": {"type": "string"}, "algorithm": {"type": "string"}, }, "required": ["sender_key", "algorithm"], } }, "required": ["content"], } room_olm_encrypted = { "type": "object", "properties": { "type": {"type": "string", "enum": ["m.room.encrypted"]}, "content": { "type": "object", "properties": { "sender_key": {"type": "string"}, "algorithm": { "type": "string", "enum": ["m.olm.v1.curve25519-aes-sha2"], }, "ciphertext": { "type": "object", "patternProperties": { Base64Regex: { "type": "object", "properties": { "body": {"type": "string"}, "type": {"type": "integer"}, }, "required": ["type", "body"], } }, }, }, "required": ["sender_key", "algorithm", "ciphertext"], }, }, "required": [ "type", "content", ], } room_megolm_decrypted = { "type": "object", "properties": {"type": {"type": "string"}, "content": {"type": "object"}}, "required": [ "type", "content", ], } room_megolm_encrypted = { "type": "object", "properties": { "type": {"type": "string", "enum": ["m.room.encrypted"]}, "event_id": {"type": "string"}, "sender": {"type": "string", "format": "user_id"}, "origin_server_ts": {"type": "integer", "minimum": 0}, "room_id": {"type": "string"}, "content": { "type": "object", "properties": { "sender_key": {"type": "string"}, "algorithm": {"type": "string", "enum": ["m.megolm.v1.aes-sha2"]}, "ciphertext": {"type": "string"}, "session_id": {"type": "string"}, "device_id": {"type": "string"}, }, "required": [ "sender_key", "algorithm", "ciphertext", "session_id", "device_id", ], }, }, "required": ["type", "content", "event_id", "sender", "origin_server_ts"], } olm_event = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "sender_device": {"type": "string"}, "keys": { "type": "object", "properties": {"ed25519": {"type": "string"}}, }, "recipient": {"type": "string", "format": "user_id"}, "recipient_keys": { "type": "object", "properties": {"ed25519": {"type": "string"}}, }, "type": {"type": "string"}, "content": {"type": "object"}, }, "required": [ "type", "sender", "keys", "recipient", "recipient_keys", "content", ], } dummy_event = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "sender_device": {"type": "string"}, "type": {"type": "string", "enum": ["m.dummy"]}, "content": { "type": "object", }, "keys": {"type": "object"}, }, "required": ["type", "sender", "keys", "sender_device"], } room_key_request = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "type": {"type": "string", "enum": ["m.room_key_request"]}, "content": { "type": "object", "properties": { "body": { "type": "object", "properties": { "algorithm": {"type": "string"}, "room_id": {"type": "string", "format": "room_id"}, "sender_key": {"type": "string"}, "session_id": {"type": "string"}, }, "required": [ "algorithm", "room_id", "sender_key", "session_id", ], }, "requesting_device_id": {"type": "string"}, "action": { "type": "string", "enum": ["request", "request_cancellation"], }, "request_id": {"type": "string"}, }, "required": ["requesting_device_id", "request_id", "action", "body"], }, }, "required": ["type", "sender", "content"], } room_key_request_cancel = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "type": {"type": "string", "enum": ["m.room_key_request"]}, "content": { "type": "object", "properties": { "requesting_device_id": {"type": "string"}, "action": { "type": "string", "enum": ["request", "request_cancellation"], }, "request_id": {"type": "string"}, }, "required": ["requesting_device_id", "request_id", "action"], }, }, "required": ["type", "sender", "content"], } room_key_event = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "sender_device": {"type": "string"}, "type": {"type": "string", "enum": ["m.room_key"]}, "content": { "type": "object", "properties": { "algorithm": {"type": "string"}, "room_id": {"type": "string", "format": "room_id"}, "session_id": {"type": "string"}, "session_key": {"type": "string"}, }, "required": [ "algorithm", "room_id", "session_id", "session_key", ], }, "keys": {"type": "object"}, }, "required": ["type", "sender", "content", "keys"], } forwarded_room_key_event = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "sender_device": {"type": "string"}, "type": {"type": "string", "enum": ["m.forwarded_room_key"]}, "content": { "type": "object", "properties": { "algorithm": {"type": "string"}, "room_id": {"type": "string", "format": "room_id"}, "sender_key": {"type": "string"}, "sender_claimed_ed25519_key": {"type": "string"}, "forwarding_curve25519_key_chain": { "type": "array", "items": {"type": "string"}, }, "session_id": {"type": "string"}, "session_key": {"type": "string"}, }, "required": [ "algorithm", "room_id", "session_id", "session_key", "sender_key", "sender_claimed_ed25519_key", "forwarding_curve25519_key_chain", ], }, }, "required": ["type", "sender", "content"], } room_create = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": { "creator": {"type": "string", "format": "user_id"}, "m.federate": {"type": "boolean", "default": True}, "room_version": {"type": "string", "default": "1"}, "type": {"type": "string", "default": ""}, "predecessor": { "type": "object", "properties": { "event_id": {"type": "string"}, "room_id": {"type": "string", "format": "room_id"}, }, "required": ["event_id", "room_id"], }, }, "required": ["creator"], }, }, "required": ["type", "sender", "content", "state_key"], } room_guest_access = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": { "guest_access": { "type": "string", "enum": ["can_join", "forbidden"], "default": "forbidden", }, }, "required": ["guest_access"], }, }, "required": ["type", "sender", "content", "state_key"], } room_join_rules = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": { "join_rule": { "type": "string", "enum": [ "public", "knock", "invite", "private", "restricted", "knock_restricted", ], "default": "invite", }, }, "required": ["join_rule"], }, }, "required": ["type", "sender", "content", "state_key"], } room_history_visibility = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": { "history_visibility": { "type": "string", "enum": [ "invited", "joined", "shared", "world_readable", ], "default": "shared", }, }, "required": ["history_visibility"], }, }, "required": ["type", "sender", "content", "state_key"], } room_canonical_alias = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": {"alias": {"type": ["string", "null"]}}, "required": [], }, }, "required": ["type", "sender", "content", "state_key"], } room_name = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": {"name": {"type": "string"}}, "required": ["name"], }, }, "required": ["type", "sender", "content", "state_key"], } room_encryption = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", }, }, "required": ["type", "sender", "content", "state_key"], } room_topic = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": {"topic": {"type": "string"}}, "required": ["topic"], }, }, "required": ["type", "sender", "content", "state_key"], } room_space_parent = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": { "canonical": {"type": "boolean", "default": False}, "via": {"type": "array", "items": {"type": "string"}}, }, }, }, "required": ["type", "sender", "content", "state_key"], } room_space_child = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": { "suggested": {"type": "boolean", "default": False}, "via": {"type": "array", "items": {"type": "string"}}, "order": {"type": "string"}, }, }, }, "required": ["type", "sender", "content", "state_key"], } room_avatar = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": { "info": { "h": {"type": "integer"}, "w": {"type": "integer"}, "mimetype": {"type", "string"}, "size": {"type": "integer"}, }, "url": {"type": "string"}, }, "required": [], }, }, "required": ["type", "sender", "content", "state_key"], } room_power_levels = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string"}, "type": {"type": "string"}, "content": { "type": "object", "properties": { "ban": {"type": "integer", "default": 50}, "kick": {"type": "integer", "default": 50}, "invite": {"type": "integer", "default": 50}, "redact": {"type": "integer", "default": 50}, "users_default": {"type": "integer", "default": 0}, "events_default": {"type": "integer", "default": 0}, "state_default": {"type": "integer", "default": 50}, "events": { "type": "object", "default": {}, "patternProperties": {EventTypeRegex: {"type": "integer"}}, }, "users": { "type": "object", "default": {}, "patternProperties": {UserIdRegex: {"type": "integer"}}, }, "notifications": { "type": "object", "default": {}, "properties": { "room": {"type": "integer", "default": 50}, }, }, }, }, }, "required": ["type", "sender", "content", "state_key"], } room_membership = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "state_key": {"type": "string", "format": "user_id"}, "type": {"type": "string", "enum": ["m.room.member"]}, "prev_content": { "type": "object", "properties": { "membership": { "type": "string", "enum": ["invite", "join", "knock", "leave", "ban"], }, "avatar_url": {"type": ["string", "null"]}, "displayname": {"type": ["string", "null"]}, }, "required": ["membership"], }, "content": { "type": "object", "properties": { "membership": { "type": "string", "enum": ["invite", "join", "knock", "leave", "ban"], }, "reason": {"type": ["string", "null"]}, "avatar_url": {"type": ["string", "null"]}, "displayname": {"type": ["string", "null"]}, }, "required": ["membership"], }, }, "required": ["type", "sender", "state_key", "content"], } room_redaction = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "redacts": {"type": "string"}, "content": { "type": "object", "properties": {"reason": {"type": "string"}}, }, }, "required": ["sender", "redacts"], } sticker = { "type": "object", "properties": { "sender": {"type": "string", "format": "user_id"}, "content": { "type": "object", "properties": { "body": {"type": "string"}, "url": {"type": "string"}, }, "required": ["body", "url"], }, }, "required": ["sender"], } reaction = { "type": "object", "properties": { "sender": { "type": "string", "format": "user_id", }, "content": { "type": "object", "properties": { "m.relates_to": { "type": "object", "properties": { "rel_type": { "type": "string", "const": "m.annotation", }, "event_id": {"type": "string"}, "key": {"type": "string"}, }, "required": ["rel_type", "event_id", "key"], }, }, "required": ["m.relates_to"], }, }, "required": ["sender", "content"], } room_resolve_alias = { "type": "object", "properties": { "room_id": {"type": "string"}, "servers": {"type": "array", "items": {"type": "string"}}, }, "required": ["room_id", "servers"], } room_get_visibility = { "type": "object", "properties": { "room_id": {"type": "string"}, "visibility": {"type": "string", "enum": ["private", "public"]}, }, "required": ["visibility"], } room_event_id = { "type": "object", "properties": {"event_id": {"type": "string"}}, "required": ["event_id"], } room_id = { "type": "object", "properties": {"room_id": {"type": "string"}}, "required": ["room_id"], } room_create_response = { "type": "object", "properties": {"room_id": {"type": "string"}}, "required": ["room_id"], } room_messages = { "type": "object", "properties": { "chunk": {"type": "array"}, "start": {"type": "string"}, "end": {"type": "string"}, }, "required": ["chunk", "start"], } room_context = { "type": "object", "properties": { "start": {"type": "string"}, "end": {"type": "string"}, "state": {"type": "array"}, "events_before": {"type": "array"}, "events_after": {"type": "array"}, "event": {"type": "object"}, }, "required": [ "start", "end", "state", "events_before", "events_after", "event", ], } invite_event = { "type": "object", "properties": {"content": {"type": "object"}, "type": {"type": "string"}}, "required": ["content", "type"], } ephemeral_event = { "type": "object", "properties": {"content": {"type": "object"}, "type": {"type": "string"}}, "required": ["content", "type"], } m_typing = { "type": "object", "properties": { "content": { "type": "object", "properties": { "user_ids": {"type": "array", "items": {"type": "string"}} }, "required": ["user_ids"], }, "type": {"type": "string"}, "room_id": {"type": "string"}, }, "required": ["content", "type"], } m_receipt = { "type": "object", "properties": { "content": { "type": "object", "patternProperties": { r".*": { "type": "object", "properties": { "m.read": { "type": "object", "patternProperties": { UserIdRegex: { "type": ["object", "string"], "properties": {"ts": {"type": "integer"}}, "required": ["ts"], } }, } }, } }, }, "type": {"type": "string"}, "room_id": {"type": "string"}, }, "required": ["content", "type"], } get_openid_token = { "type": "object", "properties": { "access_token": {"type": "string"}, "expires_in": {"type": "integer"}, "matrix_server_name": {"type": "string"}, "token_type": {"type": "string"}, }, "required": ["access_token", "expires_in", "matrix_server_name", "token_type"], } keys_upload = { "type": "object", "properties": { "one_time_key_counts": { "type": "object", "properties": { "curve25519": {"type": "integer", "default": 0}, "signed_curve25519": {"type": "integer", "default": 0}, }, }, }, "required": ["one_time_key_counts"], } keys_query = { "type": "object", "properties": { "device_keys": { "type": "object", "patternProperties": { UserIdRegex: { "type": "object", "patternProperties": { r".+": { "type": "object", "properties": { "algorithms": { "type": "array", "items": {"type": "string"}, }, "device_id": {"type": "string"}, "user_id": {"type": "string"}, "keys": { "type": "object", "patternProperties": { KeyRegex: {"type": "string"} }, }, "signatures": { "type": "object", "patternProperties": { UserIdRegex: { "type": "object", "patternProperties": { KeyRegex: {"type": "string"} }, } }, }, }, "required": ["algorithms", "device_id", "keys"], } }, } }, }, "failures": {"type": "object"}, }, "required": ["device_keys", "failures"], } keys_claim = { "type": "object", "properties": { "one_time_keys": { "type": "object", "patternProperties": { UserIdRegex: { "type": "object", "patternProperties": { r".+": { "type": "object", "properties": { "patternProperties": { SignedCurveRegex: { "type": "object", "properties": { "key": {"type": "str"}, "signatures": { "type": "object", "patternProperties": { UserIdRegex: { "type": "object", "patternProperties": { KeyRegex: { "type": "string" } }, } }, }, }, "required": ["key", "signatures"], } }, }, } }, } }, }, "failures": {"type": "object"}, }, "required": ["one_time_keys", "failures"], } devices = { "type": "object", "properties": { "devices": { "type": "array", "items": { "type": "object", "properties": { "device_id": {"type": "string"}, "display_name": {"type": ["string", "null"]}, "last_seen_ip": {"type": ["string", "null"]}, "last_seen_ts": {"type": ["integer", "null"]}, }, "required": [ "device_id", "display_name", "last_seen_ip", "last_seen_ts", ], }, }, }, "required": ["devices"], } delete_devices = { "type": "object", "properties": { "session": {"type": "string"}, "flows": { "type": "array", "items": { "type": "object", "properties": { "stages": {"type": "array", "items": {"type": "string"}}, "required": ["stages"], }, }, }, "params": { "type": "object", "patternProperties": { r".+": { "type": "object", "patternProperties": {r".+": {"type": "string"}}, } }, }, "required": ["session", "flows", "params"], }, } joined_members = { "type": "object", "properties": { "joined": { "type": "object", "patternProperties": { UserIdRegex: { "type": "object", "properties": { "avatar_url": {"type": ["string", "null"]}, "display_name": {"type": ["string", "null"]}, }, "required": ["display_name"], } }, } }, "required": ["joined"], } joined_rooms = { "type": "object", "properties": {"joined_rooms": {"type": "array", "items": {"type": "string"}}}, "required": ["joined_rooms"], } call_invite = { "type": "object", "properties": { "type": {"type": "string"}, "content": { "type": "object", "properties": { "call_id": {"type": "string"}, "lifetime": {"type": "integer"}, "version": {"type": "integer"}, "offer": { "type": "object", "properties": { "type": {"type": "string", "enum": ["offer"]}, "sdp": {"type": "string"}, }, "required": ["type", "sdp"], }, }, "required": [ "call_id", "lifetime", "version", "offer", ], }, }, "required": [ "type", "content", ], } call_answer = { "type": "object", "properties": { "type": {"type": "string"}, "content": { "type": "object", "properties": { "call_id": {"type": "string"}, "version": {"type": "integer"}, "answer": { "type": "object", "properties": { "type": {"type": "string", "enum": ["answer"]}, "sdp": {"type": "string"}, }, "required": ["type", "sdp"], }, }, "required": [ "call_id", "version", "answer", ], }, }, "required": [ "type", "content", ], } call_hangup = { "type": "object", "properties": { "type": {"type": "string"}, "content": { "type": "object", "properties": { "call_id": {"type": "string"}, "version": {"type": "integer"}, }, "required": [ "call_id", "version", ], }, }, "required": [ "type", "content", ], } call_candidates = { "type": "object", "properties": { "type": {"type": "string"}, "content": { "type": "object", "properties": { "call_id": {"type": "string"}, "version": {"type": "integer"}, "candidates": { "type": "array", "items": { "type": "object", "properties": { "candidate": {"type": "string"}, "sdpMLineIndex": {"type": "integer"}, "sdpMid": {"type": "string"}, }, "required": ["candidate", "sdpMLineIndex", "sdpMid"], }, }, }, "required": ["call_id", "version", "candidates"], }, }, "required": [ "type", "content", ], } account_data = { "type": "object", "properties": {"type": {"type": "string"}, "content": {"type": "object"}}, "required": [ "type", "content", ], } fully_read = { "type": "object", "properties": { "type": {"type": "string"}, "content": { "type": "object", "properties": { "event_id": {"type": "string"}, }, "required": [ "event_id", ], }, }, "required": [ "type", "content", ], } tags = { "type": "object", "properties": { "type": {"type": "string"}, "content": { "type": "object", "properties": { "tags": { "type:": "object", "patternProperties": { r".*": { "type": "object", "properties": {"order": {"type": "number"}}, }, }, }, }, "required": ["tags"], }, }, "required": [ "type", "content", ], } push_rules = { "type": "object", "properties": { "type": { "type": "string", "const": "m.push_rules", }, "content": { "type": "object", "properties": { "global": {"type": "object"}, "device": {"type": "object"}, }, "required": ["global"], }, }, "required": ["type", "content"], } push_ruleset = { "type": "object", "properties": { "override": {"type": "array", "items": {"type": "object"}}, "content": {"type": "array", "items": {"type": "object"}}, "room": {"type": "array", "items": {"type": "object"}}, "sender": {"type": "array", "items": {"type": "object"}}, "underride": {"type": "array", "items": {"type": "object"}}, }, } push_rule = { "type": "object", "properties": { "rule_id": {"type": "string"}, "default": {"type": "boolean", "default": False}, "enabled": {"type": "boolean"}, "pattern": {"type": "string"}, "conditions": { "type": "array", "items": { "type": "object", "properties": { "kind": {"type": "string"}, "key": {"type": "string"}, "pattern": {"type": "string"}, "is": { "type": "string", "pattern": r"(==|<=|>=|<|>)?[0-9.-]+", }, }, "required": ["kind"], }, }, "actions": { "type": "array", "items": {"type": ["string", "object"]}, }, }, "required": ["rule_id", "default", "enabled", "actions"], } upload = { "type": "object", "properties": {"content_uri": {"type": "string"}}, "required": ["content_uri"], } content_repository_config = { "type": "object", "properties": {"m.upload.size": {"type": ["number", "null"]}}, } megolm_key_import = { "type": "array", "items": { "type": "object", "properties": { "algorithm": {"type": "string"}, "session_key": {"type": "string"}, "sender_key": {"type": "string"}, "room_id": {"type": "string"}, "sender_claimed_keys": { "type": "object", "properties": { "ed25519": {"type": "string"}, }, "required": ["ed25519"], }, "forwarding_curve25519_key_chain": { "type": "array", "items": {"type": "string"}, }, }, "required": [ "algorithm", "session_key", "sender_key", "room_id", "sender_claimed_keys", "forwarding_curve25519_key_chain", ], }, } get_profile = { "type": "object", "properties": { "displayname": {"type": "string"}, "avatar_url": {"type": "string"}, }, "not": {"required": ["errcode"]}, } get_displayname = { "type": "object", "properties": { "displayname": {"type": ["string", "null"]}, }, "required": ["displayname"], } get_avatar = { "type": "object", "properties": { "avatar_url": {"type": ["string", "null"]}, }, "required": ["avatar_url"], } key_verification_start = { "type": "object", "properties": { "sender": {"type": "string"}, "content": { "type": "object", "properties": { "transaction_id": {"type": "string"}, "from_device": {"type": "string"}, "method": {"type": "string"}, "key_agreement_protocols": { "type": "array", "items": {"type": "string"}, }, "hashes": {"type": "array", "items": {"type": "string"}}, "message_authentication_codes": { "type": "array", "items": {"type": "string"}, }, "short_authentication_string": { "type": "array", "items": {"type": "string"}, }, }, "required": [ "transaction_id", "from_device", "method", "key_agreement_protocols", "hashes", "message_authentication_codes", "short_authentication_string", ], }, }, "required": [ "sender", "content", ], } key_verification_accept = { "type": "object", "properties": { "sender": {"type": "string"}, "content": { "type": "object", "properties": { "transaction_id": {"type": "string"}, "commitment": {"type": "string"}, "key_agreement_protocol": {"type": "string"}, "hash": {"type": "string"}, "message_authentication_code": {"type": "string"}, "short_authentication_string": { "type": "array", "items": {"type": "string"}, }, }, "required": [ "transaction_id", "commitment", "key_agreement_protocol", "hash", "message_authentication_code", "short_authentication_string", ], }, }, "required": [ "sender", "content", ], } key_verification_key = { "type": "object", "properties": { "sender": {"type": "string"}, "content": { "type": "object", "properties": { "transaction_id": {"type": "string"}, "key": {"type": "string"}, }, "required": [ "transaction_id", "key", ], }, }, "required": [ "sender", "content", ], } key_verification_mac = { "type": "object", "properties": { "sender": {"type": "string"}, "content": { "type": "object", "properties": { "mac": { "type": "object", "patternProperties": {r".+": {"type": "string"}}, }, "keys": {"type": "string"}, }, "required": [ "transaction_id", "mac", "keys", ], }, }, "required": [ "sender", "content", ], } key_verification_cancel = { "type": "object", "properties": { "sender": {"type": "string"}, "content": { "type": "object", "properties": { "transaction_id": {"type": "string"}, "code": {"type": "string"}, "reason": {"type": "string"}, }, "required": [ "transaction_id", "code", "reason", ], }, }, "required": ["sender", "content"], } presence = { "type": "object", "properties": { "sender": {"type": "string"}, "type": {"type": "string", "const": "m.presence"}, "content": { "type": "object", "properties": { "presence": {"type": "string"}, "currently_active": {"type": "boolean"}, "last_active_ago": {"type": "integer"}, "status_msg": {"type": "string"}, }, "required": [ "presence", ], }, }, "required": ["sender", "type", "content"], } get_presence = { "type": "object", "properties": { "presence": {"type": "string"}, "last_active_ago": {"type": "integer"}, "status_msg": {"type": "string"}, "currently_active": {"type": "boolean"}, }, "required": [ "presence", ], } empty = {"type": "object", "properties": {}, "additionalProperties": False} upload_filter = { "type": "object", "properties": {"filter_id": {"type": "string"}}, "required": ["filter_id"], } whoami = { "type": "object", "properties": { "user_id": {"type": "string", "format": "user_id"}, "device_id": {"type": "string"}, "is_guest": {"type": "boolean"}, }, "required": ["user_id"], } room_tombstone = { "type": "object", "properties": { "type": {"type": "string", "const": "m.room.tombstone"}, "state_key": {"type": "string", "const": ""}, "content": { "type": "object", "properties": { "body": {"type": "string"}, "replacement_room": {"type": "string", "format": "room_id"}, }, "required": [ "body", "replacement_room", ], }, }, "required": [ "sender", "type", "content", "state_key", ], } space_hierarchy = { "type": "object", "properties": { "next_batch": {"type": "string"}, "rooms": { "type": "array", "items": { "type": "object", "properties": { "avatar_url": {"type": "string"}, "canonical_alias": {"type": "string"}, "children_state": { "type": "array", "items": { "type": "object", "properties": { "content": { "type": "object", "properties": { "via": { "type": "array", "items": {"type": "string"}, }, }, "required": ["via"], }, "origin_server_ts": {"type": "integer"}, "sender": {"type": "string"}, "state_key": {"type": "string"}, "type": {"type": "string"}, }, "required": [ "content", "origin_server_ts", "sender", "state_key", "type", ], }, }, "guest_can_join": {"type": "boolean"}, "join_rule": {"type": "string"}, "name": {"type": "string"}, "num_joined_members": {"type": "integer"}, "room_id": {"type": "string"}, "room_type": {"type": "string"}, "topic": {"type": "string"}, "world_readable": {"type": "boolean"}, }, "required": [ "children_state", "guest_can_join", "num_joined_members", "room_id", "world_readable", ], }, }, }, "required": ["rooms"], } matrix-nio-0.24.0/nio/store/000077500000000000000000000000001455215747700156265ustar00rootroot00000000000000matrix-nio-0.24.0/nio/store/__init__.py000066400000000000000000000022311455215747700177350ustar00rootroot00000000000000"""Nio storage module. This module contains storage classes that are used to store encryption devices, encryption keys and the trust state of devices. The module contains three store implementations one using a Sqlite database and plaintext files to store keys and the truststate of devices, one that uses a pure Sqlite database and one that stores the Sqlite database in memory. User provided store types can be implemented by overriding the methods provided in the MatrixStore base class. isort:skip_file """ from .._compat import package_installed if package_installed("olm"): from .log import logger from .file_trustdb import Ed25519Key, Key, KeyStore from .models import ( Accounts, DeviceKeys, DeviceKeys_v1, DeviceTrustField, DeviceTrustState, EncryptedRooms, ForwardedChains, Keys, MegolmInboundSessions, OlmSessions, OutgoingKeyRequests, StoreVersion, SyncTokens, ) from .database import ( DefaultStore, MatrixStore, SqliteMemoryStore, SqliteStore, use_database, use_database_atomic, ) matrix-nio-0.24.0/nio/store/database.py000066400000000000000000000730301455215747700177470ustar00rootroot00000000000000# Copyright 2018 Zil0 # Copyright © 2018, 2019 Damir Jelić # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import annotations import os import sqlite3 from dataclasses import dataclass, field from functools import wraps from typing import List, Optional from peewee import DoesNotExist, SqliteDatabase from playhouse.sqliteq import SqliteQueueDatabase from ..crypto import ( DeviceStore, GroupSessionStore, InboundGroupSession, OlmAccount, OlmDevice, OutgoingKeyRequest, Session, SessionStore, TrustState, ) from . import ( Accounts, DeviceKeys, DeviceKeys_v1, DeviceTrustState, EncryptedRooms, ForwardedChains, Key, Keys, KeyStore, MegolmInboundSessions, OlmSessions, OutgoingKeyRequests, StoreVersion, SyncTokens, ) def use_database(fn): """ Ensure that the correct database context is used for the wrapped function. """ @wraps(fn) def inner(self, *args, **kwargs): with self.database.bind_ctx(self.models): return fn(self, *args, **kwargs) return inner def use_database_atomic(fn): """ Ensure that the correct database context is used for the wrapped function. This also ensures that the database transaction will be atomic. """ @wraps(fn) def inner(self, *args, **kwargs): with self.database.bind_ctx(self.models): if isinstance(self.database, SqliteQueueDatabase): return fn(self, *args, **kwargs) else: with self.database.atomic(): return fn(self, *args, **kwargs) return inner @dataclass class MatrixStore: """Storage class for matrix state.""" models = [ Accounts, OlmSessions, MegolmInboundSessions, ForwardedChains, DeviceKeys, EncryptedRooms, OutgoingKeyRequests, StoreVersion, Keys, SyncTokens, ] store_version = 2 user_id: str = field() device_id: str = field() store_path: str = field() pickle_key: str = "" database_name: str = "" database_path: str = field(init=False) database: SqliteDatabase = field(init=False) def _create_database(self): return SqliteDatabase( self.database_path, pragmas={ "foreign_keys": 1, "secure_delete": 1, }, ) def upgrade_to_v2(self): with self.database.bind_ctx([DeviceKeys_v1]): self.database.drop_tables( [ DeviceTrustState, DeviceKeys_v1, ], safe=True, ) with self.database.bind_ctx(self.models): self.database.create_tables([DeviceKeys, DeviceTrustState]) self._update_version(2) def __post_init__(self): self.database_name = self.database_name or f"{self.user_id}_{self.device_id}.db" self.database_path = os.path.join(self.store_path, self.database_name) self.database = self._create_database() self.database.connect() store_version = self._get_store_version() # Update the store if it's an old version here. if store_version == 1: self.upgrade_to_v2() with self.database.bind_ctx(self.models): self.database.create_tables(self.models) def _get_store_version(self): with self.database.bind_ctx([StoreVersion]): self.database.create_tables([StoreVersion]) v, _ = StoreVersion.get_or_create(defaults={"version": self.store_version}) return v.version def _update_version(self, new_version): with self.database.bind_ctx([StoreVersion]): v, _ = StoreVersion.get_or_create(defaults={"version": new_version}) v.version = new_version v.save() @use_database def _get_account(self): try: return Accounts.get( Accounts.user_id == self.user_id, Accounts.device_id == self.device_id ) except DoesNotExist: return None def load_account(self) -> Optional[OlmAccount]: """Load the Olm account from the database. Returns: ``OlmAccount`` object, or ``None`` if it wasn't found for the current device_id. """ account = self._get_account() if not account: return None return OlmAccount.from_pickle(account.account, self.pickle_key, account.shared) @use_database def save_account(self, account): """Save the provided Olm account to the database. Args: account (OlmAccount): The olm account that will be pickled and saved in the database. """ Accounts.insert( user_id=self.user_id, device_id=self.device_id, shared=account.shared, account=account.pickle(self.pickle_key), ).on_conflict_ignore().execute() Accounts.update( { Accounts.account: account.pickle(self.pickle_key), Accounts.shared: account.shared, } ).where( (Accounts.user_id == self.user_id) & (Accounts.device_id == self.device_id) ).execute() @use_database def load_sessions(self) -> SessionStore: """Load all Olm sessions from the database. Returns: ``SessionStore`` object, containing all the loaded sessions. """ session_store = SessionStore() account = self._get_account() if not account: return session_store for s in account.olm_sessions: session = Session.from_pickle(s.session, s.creation_time, self.pickle_key) session_store.add(s.sender_key, session) return session_store @use_database def save_session(self, curve_key, session): """Save the provided Olm session to the database. Args: curve_key (str): The curve key that owns the Olm session. session (Session): The Olm session that will be pickled and saved in the database. """ account = self._get_account() assert account OlmSessions.replace( account=account, sender_key=curve_key, session=session.pickle(self.pickle_key), session_id=session.id, creation_time=session.creation_time, last_usage_date=session.use_time, ).execute() @use_database def load_inbound_group_sessions(self) -> GroupSessionStore: """Load all Olm sessions from the database. Returns: ``GroupSessionStore`` object, containing all the loaded sessions. """ store = GroupSessionStore() account = self._get_account() if not account: return store for s in account.inbound_group_sessions: session = InboundGroupSession.from_pickle( s.session, s.fp_key, s.sender_key, s.room_id, self.pickle_key, [chain.sender_key for chain in s.forwarded_chains], ) store.add(session) return store @use_database def save_inbound_group_session(self, session): """Save the provided Megolm inbound group session to the database. Args: session (InboundGroupSession): The session to save. """ account = self._get_account() assert account MegolmInboundSessions.insert( sender_key=session.sender_key, account=account, fp_key=session.ed25519, room_id=session.room_id, session=session.pickle(self.pickle_key), session_id=session.id, ).on_conflict_ignore().execute() MegolmInboundSessions.update( {MegolmInboundSessions.session: session.pickle(self.pickle_key)} ).where(MegolmInboundSessions.session_id == session.id).execute() # TODO, use replace many here for chain in session.forwarding_chain: ForwardedChains.replace(sender_key=chain, session=session.id).execute() @use_database def load_device_keys(self) -> DeviceStore: """Load all the device keys from the database. Returns DeviceStore containing the OlmDevices with the device keys. """ store = DeviceStore() account = self._get_account() if not account: return store for d in account.device_keys: store.add( OlmDevice( d.user_id, d.device_id, {k.key_type: k.key for k in d.keys}, display_name=d.display_name, deleted=d.deleted, ) ) return store @use_database_atomic def save_device_keys(self, device_keys): """Save the provided device keys to the database. Args: device_keys (Dict[str, Dict[str, OlmDevice]]): A dictionary containing a mapping from a user id to a dictionary containing a mapping of a device id to a OlmDevice. """ account = self._get_account() assert account rows = [] for user_id, devices_dict in device_keys.items(): for device_id, device in devices_dict.items(): rows.append( { "account": account, "user_id": user_id, "device_id": device_id, "display_name": device.display_name, "deleted": device.deleted, } ) if not rows: return for idx in range(0, len(rows), 100): data = rows[idx : idx + 100] DeviceKeys.insert_many(data).on_conflict_ignore().execute() for user_id, devices_dict in device_keys.items(): for device_id, device in devices_dict.items(): d = DeviceKeys.get( (DeviceKeys.account == account) & (DeviceKeys.user_id == user_id) & (DeviceKeys.device_id == device_id) ) d.deleted = device.deleted d.save() for key_type, key in device.keys.items(): Keys.replace(key_type=key_type, key=key, device=d).execute() @use_database def load_encrypted_rooms(self): """Load the set of encrypted rooms for this account. Returns: ``Set`` containing room ids of encrypted rooms. """ account = self._get_account() if not account: return set() return {room.room_id for room in account.encrypted_rooms} @use_database def load_outgoing_key_requests(self): """Load the set of outgoing key requests for this account. Returns: ``Set`` containing request ids of key requests. """ account = self._get_account() if not account: return {} return { request.request_id: OutgoingKeyRequest.from_database(request) for request in account.out_key_requests } @use_database def add_outgoing_key_request(self, key_request: OutgoingKeyRequest) -> None: """Add an outgoing key request to the store.""" account = self._get_account() assert account OutgoingKeyRequests.insert( request_id=key_request.request_id, session_id=key_request.session_id, room_id=key_request.room_id, algorithm=key_request.algorithm, account=account, ).on_conflict_ignore().execute() @use_database def remove_outgoing_key_request(self, key_request: OutgoingKeyRequest) -> None: """Remove an active outgoing key request from the store.""" account = self._get_account() assert account db_key_request = OutgoingKeyRequests.get_or_none( OutgoingKeyRequests.request_id == key_request.request_id, OutgoingKeyRequests.account == account, ) if db_key_request: db_key_request.delete_instance() @use_database_atomic def save_encrypted_rooms(self, rooms): """Save the set of room ids for this account.""" account = self._get_account() assert account data = [(room_id, account) for room_id in rooms] for idx in range(0, len(data), 400): rows = data[idx : idx + 400] EncryptedRooms.insert_many( rows, fields=[EncryptedRooms.room_id, EncryptedRooms.account] ).on_conflict_ignore().execute() @use_database def save_sync_token(self, token: str) -> None: """Save the given token""" account = self._get_account() assert account SyncTokens.replace(account=account, token=token).execute() @use_database def load_sync_token(self) -> Optional[str]: account = self._get_account() if not account: return None token = SyncTokens.get_or_none( SyncTokens.account == account.id, ) if token: return token.token return None @use_database def delete_encrypted_room(self, room: str) -> None: """Delete an encrypted room from the store.""" db_room = EncryptedRooms.get_or_none(EncryptedRooms.room_id == room) if db_room: db_room.delete_instance() def blacklist_device(self, device: OlmDevice) -> bool: """Mark a device as blacklisted. Args: device (OlmDevice): The device that will be marked as blacklisted Returns True if the device was blacklisted, False otherwise, e.g. if the device was already blacklisted. """ raise NotImplementedError def unblacklist_device(self, device: OlmDevice) -> bool: """Unmark a device as blacklisted. Args: device (OlmDevice): The device that will be unmarked as blacklisted """ raise NotImplementedError def verify_device(self, device: OlmDevice) -> bool: """Mark a device as verified. Args: device (OlmDevice): The device that will be marked as verified Returns True if the device was verified, False otherwise, e.g. if the device was already verified. """ raise NotImplementedError def is_device_verified(self, device: OlmDevice) -> bool: """Check if a device is verified. Args: device (OlmDevice): The device that will be checked if it's verified. """ raise NotImplementedError def is_device_blacklisted(self, device: OlmDevice) -> bool: """Check if a device is blacklisted. Args: device (OlmDevice): The device that will be checked if it's blacklisted. """ raise NotImplementedError def unverify_device(self, device: OlmDevice) -> bool: """Unmark a device as verified. Args: device (OlmDevice): The device that will be unmarked as verified Returns True if the device was unverified, False otherwise, e.g. if the device wasn't verified. """ raise NotImplementedError def ignore_device(self, device: OlmDevice) -> bool: """Mark a device as ignored. Args: device (OlmDevice): The device that will be marked as blacklisted Returns True if the device was ignored, False otherwise, e.g. if the device was already ignored. """ raise NotImplementedError def unignore_device(self, device: OlmDevice) -> bool: """Unmark a device as ignored. Args: device (OlmDevice): The device that will be marked as blacklisted Returns True if the device was unignored, False otherwise, e.g. if the device wasn't ignored in the first place. """ raise NotImplementedError def ignore_devices(self, devices: List[OlmDevice]) -> None: """Mark a list of devices as ignored. This is a more efficient way to mark multiple devices as ignored. Args: devices (list[OlmDevice]): A list of OlmDevices that will be marked as ignored. """ raise NotImplementedError def is_device_ignored(self, device: OlmDevice) -> bool: """Check if a device is ignored. Args: device (OlmDevice): The device that will be checked if it's ignored. """ raise NotImplementedError @dataclass class DefaultStore(MatrixStore): """The default nio Matrix Store. This store uses an Sqlite database as the main storage format while device trust state is stored in plaintext files using a format similar to the ssh known_hosts file format. The files will be created in the same directory as the main Sqlite database. One such file is created for each of the 3 valid states (verified, blacklisted, ignored). If a device isn't found in any of those files the verification state is considered to be unset. Args: user_id (str): The fully-qualified ID of the user that owns the store. device_id (str): The device id of the user's device. store_path (str): The path where the store should be stored. pickle_key (str, optional): A passphrase that will be used to encrypt encryption keys while they are in storage. database_name (str, optional): The file-name of the database that should be used. """ trust_db: KeyStore = field(init=False) blacklist_db: KeyStore = field(init=False) def __post_init__(self): super().__post_init__() trust_file_path = f"{self.user_id}_{self.device_id}.trusted_devices" self.trust_db = KeyStore(os.path.join(self.store_path, trust_file_path)) blacklist_file_path = f"{self.user_id}_{self.device_id}.blacklisted_devices" self.blacklist_db = KeyStore(os.path.join(self.store_path, blacklist_file_path)) ignore_file_path = f"{self.user_id}_{self.device_id}.ignored_devices" self.ignore_db = KeyStore(os.path.join(self.store_path, ignore_file_path)) def blacklist_device(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) self.trust_db.remove(key) self.ignore_db.remove(key) device.trust_state = TrustState.blacklisted return self.blacklist_db.add(key) def unblacklist_device(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) if self.blacklist_db.remove(key): device.trust_state = TrustState.unset return True return False def verify_device(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) self.blacklist_db.remove(key) self.ignore_db.remove(key) device.trust_state = TrustState.verified return self.trust_db.add(key) def is_device_verified(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) return key in self.trust_db def is_device_blacklisted(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) return key in self.blacklist_db def unverify_device(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) if self.trust_db.remove(key): device.trust_state = TrustState.unset return True return False def ignore_device(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) self.blacklist_db.remove(key) self.trust_db.remove(key) device.trust_state = TrustState.ignored return self.ignore_db.add(key) def unignore_device(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) if self.ignore_db.remove(key): device.trust_state = TrustState.unset return True return False def ignore_devices(self, devices: List[OlmDevice]) -> None: keys = [Key.from_olmdevice(device) for device in devices] self.blacklist_db.remove_many(keys) self.trust_db.remove_many(keys) self.ignore_db.add_many(keys) for device in devices: device.trust_state = TrustState.ignored return def is_device_ignored(self, device: OlmDevice) -> bool: key = Key.from_olmdevice(device) return key in self.ignore_db @use_database def load_device_keys(self) -> DeviceStore: store = DeviceStore() account = self._get_account() if not account: return store for d in account.device_keys: device = OlmDevice( d.user_id, d.device_id, {k.key_type: k.key for k in d.keys}, display_name=d.display_name, deleted=d.deleted, ) trust_state = TrustState.unset key = Key.from_olmdevice(device) if key in self.trust_db: trust_state = TrustState.verified elif key in self.blacklist_db: trust_state = TrustState.blacklisted elif key in self.ignore_db: trust_state = TrustState.ignored device.trust_state = trust_state store.add(device) return store @dataclass class SqliteStore(MatrixStore): """The Sqlite only nio Matrix Store. This store uses an Sqlite database as the main storage format as well as the store format for the trust state. Args: user_id (str): The fully-qualified ID of the user that owns the store. device_id (str): The device id of the user's device. store_path (str): The path where the store should be stored. pickle_key (str, optional): A passphrase that will be used to encrypt encryption keys while they are in storage. database_name (str, optional): The file-name of the database that should be used. """ models = MatrixStore.models + [DeviceTrustState] def _get_device(self, device): acc = self._get_account() if not acc: return None try: return DeviceKeys.get( DeviceKeys.user_id == device.user_id, DeviceKeys.device_id == device.id, DeviceKeys.account == acc, ) except DoesNotExist: return None @use_database def verify_device(self, device: OlmDevice) -> bool: if self.is_device_verified(device): return False d = self._get_device(device) assert d DeviceTrustState.replace(device=d, state=TrustState.verified).execute() device.trust_state = TrustState.verified return True @use_database def unverify_device(self, device: OlmDevice) -> bool: if not self.is_device_verified(device): return False d = self._get_device(device) assert d DeviceTrustState.replace(device=d, state=TrustState.unset).execute() device.trust_state = TrustState.unset return True @use_database def is_device_verified(self, device: OlmDevice) -> bool: d = self._get_device(device) if not d: return False try: trust_state = d.trust_state[0].state except IndexError: return False return trust_state == TrustState.verified @use_database def blacklist_device(self, device: OlmDevice) -> bool: if self.is_device_blacklisted(device): return False d = self._get_device(device) assert d DeviceTrustState.replace(device=d, state=TrustState.blacklisted).execute() device.trust_state = TrustState.blacklisted return True @use_database def unblacklist_device(self, device: OlmDevice) -> bool: if not self.is_device_blacklisted(device): return False d = self._get_device(device) assert d DeviceTrustState.replace(device=d, state=TrustState.unset).execute() device.trust_state = TrustState.unset return True @use_database def is_device_blacklisted(self, device: OlmDevice) -> bool: d = self._get_device(device) if not d: return False try: trust_state = d.trust_state[0].state except IndexError: return False return trust_state == TrustState.blacklisted @use_database def ignore_device(self, device: OlmDevice) -> bool: if self.is_device_ignored(device): return False d = self._get_device(device) assert d DeviceTrustState.replace(device=d, state=TrustState.ignored).execute() device.trust_state = TrustState.ignored return True @use_database def unignore_device(self, device: OlmDevice) -> bool: if not self.is_device_ignored(device): return False d = self._get_device(device) assert d DeviceTrustState.replace(device=d, state=TrustState.unset).execute() device.trust_state = TrustState.unset return True def _legacy_get_device_ids(self, account, devices): device_ids = [] for device in devices: d = DeviceKeys.get_or_none( DeviceKeys.account == account.id, DeviceKeys.user_id == device.user_id, DeviceKeys.device_id == device.id, ) assert d device_ids.append(d.id) return device_ids def _get_device_ids(self, account, devices): device_ids = [] tuple_values = [(d.user_id, d.id) for d in devices] values = [item for sublist in tuple_values for item in sublist] for idx in range(0, len(values), 300): data = values[idx : idx + 300] query_string = ( "SELECT devicekeys.* from devicekeys " "JOIN accounts ON devicekeys.account_id=accounts.id " "WHERE accounts.id == ? AND " "(devicekeys.user_id, devicekeys.device_id) IN " f"(VALUES {','.join(['(?, ?)'] * (len(data) // 2))})" ) query = DeviceKeys.raw(query_string, account.id, *data) device_ids += [device_key.id for device_key in query] return device_ids @use_database_atomic def ignore_devices(self, devices: List[OlmDevice]) -> None: acc = self._get_account() if not acc: return None if sqlite3.sqlite_version_info >= (3, 15, 2): device_ids = self._get_device_ids(acc, devices) else: device_ids = self._legacy_get_device_ids(acc, devices) rows = [ {"device_id": device_id, "state": TrustState.ignored} for device_id in device_ids ] assert len(rows) == len(devices) for idx in range(0, len(rows), 100): trust_data = rows[idx : idx + 100] DeviceTrustState.replace_many(trust_data).execute() for device in devices: device.trust_state = TrustState.ignored @use_database def is_device_ignored(self, device: OlmDevice) -> bool: d = self._get_device(device) if not d: return False try: trust_state = d.trust_state[0].state except IndexError: return False return trust_state == TrustState.ignored @use_database def load_device_keys(self) -> DeviceStore: store = DeviceStore() account = self._get_account() if not account: return store for d in account.device_keys: try: trust_state = d.trust_state[0].state except IndexError: trust_state = TrustState.unset store.add( OlmDevice( d.user_id, d.device_id, {k.key_type: k.key for k in d.keys}, display_name=d.display_name, deleted=d.deleted, trust_state=trust_state, ) ) return store class SqliteMemoryStore(SqliteStore): """The Sqlite only nio Matrix Store. This store uses a Sqlite database as the main storage format as well as the store format for the trust state. The Sqlite database will be stored only in memory and all the data will be lost after the object is deleted. Args: user_id (str): The fully-qualified ID of the user that owns the store. device_id (str): The device id of the user's device. pickle_key (str, optional): A passphrase that will be used to encrypt encryption keys while they are in storage. """ def __init__(self, user_id, device_id, pickle_key=""): super().__init__(user_id, device_id, "", pickle_key=pickle_key) def _create_database(self): return SqliteDatabase( ":memory:", pragmas={ "foreign_keys": 1, "secure_delete": 1, }, ) matrix-nio-0.24.0/nio/store/file_trustdb.py000066400000000000000000000107121455215747700206670ustar00rootroot00000000000000from __future__ import annotations from functools import wraps from typing import Any, Iterator, List, Optional from atomicwrites import atomic_write from ..crypto import OlmDevice from ..exceptions import OlmTrustError from . import logger class Key: def __init__(self, user_id: str, device_id: str, key: str): self.user_id = user_id self.device_id = device_id self.key = key @classmethod def from_line(cls, line: str) -> Optional[Key]: fields = line.split(" ") if len(fields) < 4: return None user_id, device_id, key_type, key = fields[:4] if key_type == "matrix-ed25519": return Ed25519Key(user_id.strip(), device_id.strip(), key.strip()) else: return None def to_line(self) -> str: key_type = "" if isinstance(self, Ed25519Key): key_type = "matrix-ed25519" else: # pragma: no cover raise NotImplementedError(f"Invalid key type {type(self.key)}") line = f"{self.user_id} {self.device_id} {key_type} {str(self.key)}\n" return line @classmethod def from_olmdevice(cls, device: OlmDevice) -> Ed25519Key: user_id = device.user_id device_id = device.id return Ed25519Key(user_id, device_id, device.ed25519) class Ed25519Key(Key): def __eq__(self, value: Any) -> bool: if not isinstance(value, Ed25519Key): return NotImplemented if ( self.user_id == value.user_id and self.device_id == value.device_id and self.key == value.key ): return True return False class KeyStore: def __init__(self, filename: str): self._entries: List[Key] = [] self._filename: str = filename self._load(filename) def __iter__(self) -> Iterator[Key]: yield from self._entries def __repr__(self) -> str: return f"KeyStore object, file: {self._filename}" def _load(self, filename: str): try: with open(filename) as f: for line in f: line = line.strip() if not line or line.startswith("#"): continue entry = Key.from_line(line) if not entry: continue self._entries.append(entry) except FileNotFoundError: pass def get_key(self, user_id: str, device_id: str) -> Optional[Key]: for entry in self._entries: if user_id == entry.user_id and device_id == entry.device_id: return entry return None def _save_store(f): @wraps(f) def decorated(self, *args, **kwargs): ret = f(self, *args, **kwargs) self._save() return ret return decorated def _save(self): with atomic_write(self._filename, overwrite=True) as f: for entry in self._entries: line = entry.to_line() f.write(line) @_save_store # type: ignore def add_many(self, keys: List[Key]): for key in keys: self._add_without_save(key) def _add_without_save(self, key: Key) -> bool: existing_key = self.get_key(key.user_id, key.device_id) if existing_key: if ( existing_key.user_id == key.user_id and existing_key.device_id == key.device_id and type(existing_key) is type(key) ): if existing_key.key != key.key: message = ( f"Error: adding existing device to trust store with " f"mismatching fingerprint {key.key} {existing_key.key}" ) logger.error(message) raise OlmTrustError(message) self._entries.append(key) return True @_save_store # type: ignore def add(self, key: Key) -> bool: return self._add_without_save(key) @_save_store # type: ignore def remove_many(self, keys: List[Key]): for key in keys: if key in self._entries: self._entries.remove(key) @_save_store # type: ignore def remove(self, key: Key) -> bool: if key in self._entries: self._entries.remove(key) return True return False def check(self, key: Key) -> bool: return key in self._entries matrix-nio-0.24.0/nio/store/log.py000066400000000000000000000015001455215747700167550ustar00rootroot00000000000000import logging # Copyright © 2019 Damir Jelić # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. logger = logging.getLogger(__name__) matrix-nio-0.24.0/nio/store/models.py000066400000000000000000000127711455215747700174730ustar00rootroot00000000000000# Copyright 2018 Zil0 # Copyright © 2018, 2019 Damir Jelić # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time from datetime import datetime from peewee import ( SQL, BlobField, BooleanField, ForeignKeyField, IntegerField, Model, TextField, ) from ..crypto import TrustState class ByteField(BlobField): def python_value(self, value): # pragma: no cover if isinstance(value, bytes): return value return bytes(value, "utf-8") def db_value(self, value): # pragma: no cover if isinstance(value, bytearray): return bytes(value) return value class DeviceTrustField(IntegerField): """Database field to hold a TrustState enum value.""" def python_value(self, value): # pragma: no cover return TrustState(value) def db_value(self, value): # pragma: no cover return value.value # Please don't remove this. # This is a workaround for this bug: https://bugs.python.org/issue27400 class DateField(TextField): def python_value(self, value): # pragma: no cover format = "%Y-%m-%d %H:%M:%S.%f" try: return datetime.strptime(value, format) except TypeError: return datetime(*(time.strptime(value, format)[0:6])) def db_value(self, value): # pragma: no cover return value.strftime("%Y-%m-%d %H:%M:%S.%f") class StoreVersion(Model): version = IntegerField() class Accounts(Model): account = ByteField() user_id = TextField() device_id = TextField() shared = BooleanField() class Meta: constraints = [SQL("UNIQUE(user_id,device_id)")] class OlmSessions(Model): creation_time = DateField() last_usage_date = DateField() sender_key = TextField() account = ForeignKeyField( model=Accounts, backref="olm_sessions", on_delete="CASCADE" ) session = ByteField() session_id = TextField(primary_key=True) class DeviceKeys_v1(Model): sender_key = TextField() deleted = BooleanField() account = ForeignKeyField( model=Accounts, column_name="account_id", backref="device_keys", on_delete="CASCADE", ) fp_key = TextField() device_id = TextField() user_id = TextField() class Meta: constraints = [SQL("UNIQUE(account_id,user_id,device_id)")] table_name = "devicekeys" class DeviceKeys(Model): device_id = TextField() user_id = TextField() display_name = TextField(default="") deleted = BooleanField() account = ForeignKeyField( model=Accounts, column_name="account_id", backref="device_keys", on_delete="CASCADE", ) class Meta: constraints = [SQL("UNIQUE(account_id,user_id,device_id)")] class Keys(Model): key_type = TextField() key = TextField() device = ForeignKeyField( model=DeviceKeys, column_name="device_id", backref="keys", ) class Meta: constraints = [SQL("UNIQUE(device_id,key_type)")] class DeviceTrustState(Model): state = DeviceTrustField() device = ForeignKeyField( model=DeviceKeys, primary_key=True, backref="trust_state", column_name="device_id", ) class MegolmInboundSessions(Model): sender_key = TextField() account = ForeignKeyField( model=Accounts, backref="inbound_group_sessions", on_delete="CASCADE", ) fp_key = TextField() room_id = TextField() session = ByteField() session_id = TextField(primary_key=True) class ForwardedChains(Model): sender_key = TextField() session = ForeignKeyField( model=MegolmInboundSessions, column_name="session_id", backref="forwarded_chains", on_delete="CASCADE", ) class Meta: constraints = [SQL("UNIQUE(sender_key,session_id)")] class EncryptedRooms(Model): room_id = TextField() account = ForeignKeyField( model=Accounts, column_name="account_id", on_delete="CASCADE", backref="encrypted_rooms", ) class Meta: constraints = [SQL("UNIQUE(room_id,account_id)")] class OutgoingKeyRequests(Model): request_id = TextField() session_id = TextField() room_id = TextField() algorithm = TextField() account = ForeignKeyField( model=Accounts, column_name="account_id", on_delete="CASCADE", backref="out_key_requests", ) class Meta: constraints = [SQL("UNIQUE(request_id,account_id)")] class SyncTokens(Model): token = TextField() account = ForeignKeyField( model=Accounts, on_delete="CASCADE", backref="sync_token", ) class Meta: constraints = [SQL("UNIQUE(account_id)")] class TrackedUsers(Model): user_id = TextField() account = ForeignKeyField( model=Accounts, column_name="account_id", on_delete="CASCADE", backref="tracked_users", ) class Meta: constraints = [SQL("UNIQUE(account_id,user_id)")] matrix-nio-0.24.0/pyproject.toml000066400000000000000000000051741455215747700166300ustar00rootroot00000000000000[build-system] requires = ["poetry_core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] name = "matrix-nio" version = "0.24.0" description = "A Python Matrix client library, designed according to sans I/O principles." authors = [ "Damir Jelić ", "Paarth Shah " ] license = "ISC" readme = "README.md" repository = "https://github.com/poljar/matrix-nio" documentation = "https://matrix-nio.readthedocs.io/en/latest/" packages = [ {include = "nio"} ] [tool.poetry.dependencies] python = "^3.8.0" aiohttp = "^3.9.0" aiofiles = "^23.1.0" h11 = "^0.14.0" h2 = "^4.0.0" jsonschema = "^4.14.0" unpaddedbase64 = "^2.1.0" pycryptodome = "^3.10.1" python-olm = { version = "^3.1.3", optional = true } peewee = { version = "^3.14.4", optional = true } cachetools = { version = "^4.2.1", optional = true } atomicwrites = { version = "^1.4.0", optional = true } aiohttp-socks = "^0.8.4" [tool.poetry.extras] e2e = ["python-olm", "peewee", "cachetools", "atomicwrites"] [tool.poetry.dev-dependencies] pytest = "^6.2.3" pytest-isort = "^1.3.0" pytest-cov = "^2.11.1" hyperframe = "^6.0.0" hypothesis = "^6.8.9" hpack = "^4.0.0" faker = "^8.0.0" mypy = "^0.812" pytest-aiohttp = "^0.3.0" types-peewee = "^3.16.0.2" aioresponses = "^0.7.4" pytest-benchmark = "^3.2.3" ruff = "^0.0.291" [tool.ruff] select = [ "E", # pycodestyle "F", # Pyflakes "I001", # isort "UP", # pyupgrade "ASYNC", # flake8-async "C4", # flake8-comprehensions "T10", # flake8-debugger "FA", # flake8-future-annotations "PT", # flake8-pytest-style "RSE", # flake8-raise "PERF", # Perflint "FURB", # refurb ] ignore = [ "E501", # https://www.flake8rules.com/rules/E501.html - Let `black` handle this. ] # Allow autofix for all enabled rules (when `--fix`) is provided. fixable = ["ALL"] unfixable = [] # Exclude a variety of commonly ignored directories. exclude = [ ".bzr", ".direnv", ".eggs", ".git", ".git-rewrite", ".hg", ".mypy_cache", ".nox", ".pants.d", ".pytype", ".ruff_cache", ".svn", ".tox", ".venv", "__pypackages__", "_build", "buck-out", "build", "dist", "node_modules", "venv", "venv*", ] # Assume Python 3.8. target-version = "py38" [tool.ruff.pyupgrade] # Preserve types, even if a file imports `from __future__ import annotations`. # Needed for python < 3.10, should be removed afterwards. keep-runtime-typing = true [tool.ruff.per-file-ignores] "__init__.py" = [ "F401", "F403", "I001", ] [tool.ruff.flake8-pytest-style] fixture-parentheses = false matrix-nio-0.24.0/rtd-requirements.txt000066400000000000000000000003621455215747700177610ustar00rootroot00000000000000peewee==3.16.3 aiohttp==3.9.0 h11 h2 jsonschema==4.19.1 atomicwrites cachetools==5.3.1 unpaddedbase64 pycryptodome==3.19.1 sphinx==7.2.6 aiohttp_socks==0.8.3 aioresponses; python_version >= '3.5' aiofiles==23.2.1 m2r2 sphinx_rtd_theme==1.3.0 matrix-nio-0.24.0/setup.cfg000066400000000000000000000001461455215747700155270ustar00rootroot00000000000000[tool:pytest] testpaths = tests flake8-ignore = nio/*.py F401 nio/*.py F403 nio/*.py W503 matrix-nio-0.24.0/test-requirements.txt000066400000000000000000000004471455215747700201530ustar00rootroot00000000000000setuptools poetry pytest pytest-flake8 pytest-isort pytest-cov pytest-benchmark hyperframe hypothesis hpack faker mypy mypy_extensions aiohttp >= 3.9.0b1 pytest-aiohttp aioresponses python-olm>=3.2.15 peewee>=3.17.0 atomicwrites cachetools unpaddedbase64 pycryptodome aiofiles jsonschema h2 h11 matrix-nio-0.24.0/tests/000077500000000000000000000000001455215747700150475ustar00rootroot00000000000000matrix-nio-0.24.0/tests/api_test.py000066400000000000000000000061361455215747700172370ustar00rootroot00000000000000# Copyright © 2021 Famedly GmbH # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from nio.api import Api class TestClass: def test_profile_get(self) -> None: """Test that profile_get returns the HTTP path for the request.""" api = Api() encode_pairs = [ # a normal username ("@bob:example.com", "%40bob%3Aexample.com"), # an irregular but legal username ( "@a-z0-9._=-/:example.com", "%40a-z0-9._%3D-%2F%3Aexample.com" # Why include this? https://github.com/poljar/matrix-nio/issues/211 # There were issues with a username that included slashes, which is # legal by the standard: https://matrix.org/docs/spec/appendices#user-identifiers ), ] for unencoded, encoded in encode_pairs: expected_path = f"/_matrix/client/r0/profile/{encoded}" (method, actual_path) = api.profile_get(unencoded) assert actual_path == expected_path def test_profile_get_authed(self) -> None: """Test that profile_get sets access_token in query param""" api = Api() user_id = "@bob:example.com" encoded = "%40bob%3Aexample.com" token = "SECRET_TOKEN" expected = f"/_matrix/client/r0/profile/{encoded}?access_token={token}" resp = api.profile_get(user_id, token) assert resp == ("GET", expected) def test_delete_room_alias(self) -> None: """Test that profile_get sets access_token in query param""" api = Api() room_alias = "#room:example.com" encoded = "%23room%3Aexample.com" token = "SECRET_TOKEN" expected = f"/_matrix/client/r0/directory/room/{encoded}?access_token={token}" resp = api.delete_room_alias(token, room_alias) assert resp == ("DELETE", expected) def test_put_room_alias(self) -> None: """Test that profile_get sets access_token in query param""" api = Api() room_alias = "#room:example.com" encoded = "%23room%3Aexample.com" room_id = "!room_id:example.com" token = "SECRET_TOKEN" expected_path = ( f"/_matrix/client/r0/directory/room/{encoded}?access_token={token}" ) expected_data = '{"room_id":"!room_id:example.com"}' resp = api.put_room_alias(token, room_alias, room_id) assert resp == ("PUT", expected_path, expected_data) matrix-nio-0.24.0/tests/async_attachment_test.py000066400000000000000000000075711455215747700220170ustar00rootroot00000000000000from pathlib import Path import aiofiles import pytest import unpaddedbase64 from Crypto import Random # nosec from nio import EncryptionError from nio.crypto import async_encrypt_attachment, decrypt_attachment FILEPATH = "tests/data/test_bytes" @pytest.mark.asyncio() class TestClass: async def _get_data_cypher_keys(self, data=b"Test bytes"): *chunks, keys = [i async for i in async_encrypt_attachment(data)] return (data, b"".join(chunks), keys) async def test_encrypt(self, data=b"Test bytes", large=False): _, ciphertext, keys = await self._get_data_cypher_keys(data) plaintext = decrypt_attachment( ciphertext, keys["key"]["k"], keys["hashes"]["sha256"], keys["iv"], ) assert plaintext == b"Test bytes" * (16384 if large else 1) async def test_encrypt_large_bytes(self): # Makes sure our bytes chunking in async_generator_from_data # is working correctly await self.test_encrypt(b"Test bytes" * 16384, large=True) async def test_encrypt_str(self): await self.test_encrypt(FILEPATH) async def test_encrypt_path_object(self): await self.test_encrypt(Path(FILEPATH)) async def test_encrypt_iterable(self): await self.test_encrypt([b"Test ", b"bytes"]) async def test_encrypt_async_iterable(self): async def async_gen(): yield b"Test " yield b"bytes" await self.test_encrypt(async_gen()) async def test_encrypt_file_object(self): await self.test_encrypt(open(FILEPATH, "rb")) # noqa: ASYNC101 async def test_encrypt_async_file_object(self): await self.test_encrypt(await aiofiles.open(FILEPATH, "rb")) async def test_encrypt_bad_argument_type(self): with pytest.raises(TypeError): await self.test_encrypt(123) async def test_hash_verification(self): data, ciphertext, keys = await self._get_data_cypher_keys() with pytest.raises(EncryptionError): decrypt_attachment( ciphertext, keys["key"]["k"], "Fake hash", keys["iv"], ) async def test_invalid_key(self): data, ciphertext, keys = await self._get_data_cypher_keys() with pytest.raises(EncryptionError): decrypt_attachment( ciphertext, "Fake key", keys["hashes"]["sha256"], keys["iv"], ) async def test_invalid_iv(self): data, ciphertext, keys = await self._get_data_cypher_keys() with pytest.raises(EncryptionError): decrypt_attachment( ciphertext, keys["key"]["k"], keys["hashes"]["sha256"], "Fake iv", ) async def test_short_key(self): data, ciphertext, keys = await self._get_data_cypher_keys() with pytest.raises(EncryptionError): decrypt_attachment( ciphertext, unpaddedbase64.encode_base64(b"Fake key", urlsafe=True), keys["hashes"]["sha256"], keys["iv"], ) async def test_short_iv(self): data, ciphertext, keys = await self._get_data_cypher_keys() plaintext = decrypt_attachment( ciphertext, keys["key"]["k"], keys["hashes"]["sha256"], unpaddedbase64.encode_base64(b"F" + b"\x00" * 8), ) assert plaintext != data async def test_fake_key(self): data, ciphertext, keys = await self._get_data_cypher_keys() fake_key = Random.new().read(32) plaintext = decrypt_attachment( ciphertext, unpaddedbase64.encode_base64(fake_key, urlsafe=True), keys["hashes"]["sha256"], keys["iv"], ) assert plaintext != data matrix-nio-0.24.0/tests/async_client_test.py000066400000000000000000004467331455215747700211540ustar00rootroot00000000000000import asyncio import json import math import re import sys import time from datetime import datetime, timedelta from os import path from pathlib import Path from typing import Tuple from unittest.mock import AsyncMock from urllib.parse import urlparse from uuid import uuid4 import aiofiles import pytest from aiohttp import ( ClientRequest, ClientSession, ClientTimeout, TraceRequestChunkSentParams, ) from aioresponses import CallbackResult, aioresponses from helpers import faker from yarl import URL from nio import ( AsyncClient, AsyncClientConfig, ContentRepositoryConfigResponse, DeleteDevicesAuthResponse, DeleteDevicesResponse, DeletePushRuleResponse, DeviceList, DeviceOneTimeKeyCount, DevicesResponse, DirectRoomsErrorResponse, DirectRoomsResponse, DiscoveryInfoError, DiscoveryInfoResponse, DownloadError, DownloadResponse, EnablePushRuleResponse, ErrorResponse, FullyReadEvent, GetOpenIDTokenResponse, JoinedMembersResponse, JoinedRoomsResponse, JoinResponse, KeysClaimResponse, KeysUploadResponse, LocalProtocolError, LoginError, LoginInfoResponse, LoginResponse, LogoutError, LogoutResponse, MegolmEvent, OlmTrustError, PresenceEvent, PresenceGetResponse, PresenceSetResponse, ProfileGetAvatarResponse, ProfileGetDisplayNameResponse, ProfileGetError, ProfileGetResponse, ProfileSetAvatarResponse, ProfileSetDisplayNameResponse, PushCoalesce, PushContainsDisplayName, PushDontNotify, PushEventMatch, PushNotify, PushRoomMemberCount, PushRule, PushRuleKind, PushRuleset, PushRulesEvent, PushSenderNotificationPermission, PushSetTweak, PushUnknownAction, PushUnknownCondition, RegisterResponse, RoomBanResponse, RoomContextResponse, RoomCreateResponse, RoomDeleteAliasResponse, RoomEncryptionEvent, RoomForgetResponse, RoomGetEventError, RoomGetEventResponse, RoomGetStateEventResponse, RoomGetStateResponse, RoomGetVisibilityResponse, RoomInfo, RoomInviteResponse, RoomKeyRequest, RoomKickResponse, RoomKnockResponse, RoomLeaveResponse, RoomMemberEvent, RoomMessagesResponse, RoomMessageText, RoomPutAliasResponse, RoomPutStateResponse, RoomReadMarkersResponse, RoomRedactResponse, RoomResolveAliasResponse, Rooms, RoomSendResponse, RoomSummary, RoomTypingResponse, RoomUnbanResponse, SetPushRuleActionsResponse, SetPushRuleResponse, ShareGroupSessionResponse, SpaceGetHierarchyError, SpaceGetHierarchyResponse, SyncResponse, ThumbnailError, ThumbnailResponse, Timeline, TransferCancelledError, TransferMonitor, UpdateDeviceResponse, UpdateReceiptMarkerResponse, UploadFilterResponse, UploadResponse, ) from nio.api import EventFormat, ResizingMethod, RoomPreset, RoomVisibility from nio.client.async_client import connect_wrapper, on_request_chunk_sent from nio.crypto import OlmDevice, Session, decrypt_attachment TEST_ROOM_ID = "!testroom:example.org" ALICE_ID = "@alice:example.org" ALICE_DEVICE_ID = "JLAFKJWSCS" CAROL_ID = "@carol:example.org" DAVE_ID = "@dave:example.org" EIRIN_ID = "@eirin:example.org" @pytest.mark.asyncio() class TestClass: @staticmethod def _load_bytes(filename): with open(filename, "rb") as f: return f.read() @staticmethod def olm_message_to_event(message_dict, recipient, sender, type="m.room.encrypted"): olm_content = message_dict["messages"][recipient.user_id][recipient.device_id] return { "sender": sender.user_id, "type": type, "content": olm_content, } @staticmethod def _load_response(filename): with open(filename) as f: return json.loads(f.read()) @property def register_response(self): return self._load_response("tests/data/register_response.json") @property def login_response(self): return self._load_response("tests/data/login_response.json") @property def hierarchy_response(self): return self._load_response("tests/data/get_hierarchy_response.json") @property def logout_response(self): return self._load_response("tests/data/logout_response.json") @property def keys_upload_response(self): return self._load_response("tests/data/keys_upload.json") @property def final_keys_upload_response(self): return {"one_time_key_counts": {"curve25519": 10, "signed_curve25519": 50}} @property def sync_response(self): return self._load_response("tests/data/sync.json") @property def context_response(self): return self._load_response("tests/data/context.json") @property def messages_response(self): return self._load_response("tests/data/room_messages.json") @property def get_openid_token_response(self): return { "access_token": "SomeT0kenHere", "expires_in": 3600, "matrix_server_name": "example.com", "token_type": "Bearer", } @property def keys_query_response(self): return self._load_response("tests/data/keys_query.json") @property def joined_members_response(self): return { "joined": { # joined ALICE_ID: {"avatar_url": None, "display_name": "Alice"}, EIRIN_ID: {"avatar_url": None, "display_name": "Eirin"}, } } @property def joined_rooms_response(self): return {"joined_rooms": [TEST_ROOM_ID]} @property def room_get_state_response(self): return self._load_response("tests/data/room_state.json") @property def encryption_sync_response(self): timeline = Timeline( [ RoomMemberEvent( { "event_id": "event_id_1", "sender": ALICE_ID, "origin_server_ts": 1516809890615, }, ALICE_ID, "join", None, {"membership": "join"}, ), RoomMemberEvent( { "event_id": "event_id_2", "sender": ALICE_ID, "origin_server_ts": 1516809890615, }, CAROL_ID, "invite", None, {"membership": "invite"}, ), RoomEncryptionEvent( { "event_id": "event_id_3", "sender": ALICE_ID, "origin_server_ts": 1516809890615, } ), ], False, "prev_batch_token", ) test_room_info = RoomInfo(timeline, [], [], [], RoomSummary(1, 2, [])) rooms = Rooms({}, {TEST_ROOM_ID: test_room_info}, {}) return SyncResponse( "token123", rooms, DeviceOneTimeKeyCount(49, 50), DeviceList([ALICE_ID], []), [], [], ) def synce_response_for(self, own_user, other_user): timeline = Timeline( [ RoomMemberEvent( { "event_id": "event_id_1", "sender": own_user, "origin_server_ts": 1516809890615, }, own_user, "join", None, {"membership": "join"}, ), RoomMemberEvent( { "event_id": "event_id_1", "sender": other_user, "origin_server_ts": 1516809890615, }, other_user, "join", None, {"membership": "join"}, ), RoomEncryptionEvent( { "event_id": "event_id_2", "sender": other_user, "origin_server_ts": 1516809890615, } ), ], False, "prev_batch_token", ) test_room_info = RoomInfo(timeline, [], [], [], RoomSummary(0, 2, [])) rooms = Rooms({}, {TEST_ROOM_ID: test_room_info}, {}) return SyncResponse( "token123", rooms, DeviceOneTimeKeyCount(50, 50), DeviceList([other_user], []), [], [], ) @property def empty_sync(self): return { "account_data": {"events": []}, "device_lists": {"changed": [], "left": []}, "device_one_time_keys_count": {"signed_curve25519": 50}, "groups": {"invite": {}, "join": {}, "leave": {}}, "next_batch": "s1059_133339_44_763_246_1_586_12411_1", "presence": {"events": []}, "rooms": {"invite": {}, "join": {}, "leave": {}}, "to_device": {"events": []}, } def sync_with_to_device_events(self, event, sync_token=None): response = self.empty_sync response["to_device"]["events"].append(event) if sync_token: response["next_batch"] += sync_token return response def sync_with_room_event(self, event, sync_token=None): response = self.empty_sync response["rooms"]["join"][TEST_ROOM_ID] = { "timeline": {"events": [event], "limited": False, "prev_batch": "12345"}, "state": {"events": []}, "ephemeral": {"events": []}, "account_data": {"events": []}, } if sync_token: response["next_batch"] += sync_token return response @property def limit_exceeded_error_response(self): return self._load_response("tests/data/limit_exceeded_error.json") @property def upload_response(self): return self._load_response("tests/data/upload_response.json") @property def file_response(self): return self._load_bytes("tests/data/file_response") @staticmethod def room_id_response(room_id): return {"room_id": room_id} @staticmethod def get_profile_response(displayname, avatar_url): return {"displayname": displayname, "avatar_url": avatar_url} @staticmethod def get_profile_unauth_error_response(): return {"errcode": "M_MISSING_TOKEN", "error": "Missing access token"} @staticmethod def get_displayname_response(displayname): return {"displayname": displayname} @staticmethod def get_avatar_response(avatar_url): return {"avatar_url": avatar_url} @property def room_resolve_alias_response(self): return {"room_id": TEST_ROOM_ID, "servers": ["example.org", "matrix.org"]} @property def whoami_response(self): return self._load_response("tests/data/whoami_response.json") async def test_mxc_to_http(self, async_client): mxc = "mxc://privacytools.io/123foo" url_path = "/_matrix/media/r0/download/privacytools.io/123foo" async_client.homeserver = "https://chat.privacytools.io" expected = f"{async_client.homeserver}{url_path}" assert await async_client.mxc_to_http(mxc) == expected other_server = "http://localhost:8081" expected = f"{other_server}{url_path}" assert await async_client.mxc_to_http(mxc, other_server) == expected async def test_register(self, async_client, aioresponse): assert not async_client.access_token aioresponse.post( "https://example.org/_matrix/client/r0/register", status=200, payload=self.register_response, ) resp = await async_client.register("user", "password") assert isinstance(resp, RegisterResponse) assert async_client.access_token async def test_register_with_token(self, async_client, aioresponse): assert not async_client.access_token # first response should return session token + flows aioresponse.post( "https://example.org/_matrix/client/r0/register", status=401, payload={ "session": "abc1234", "flows": [{"stages": ["m.login.registration_token", "m.login.dummy"]}], "params": {}, }, ) # second response indicates that registration_token flow is completed aioresponse.post( "https://example.org/_matrix/client/r0/register", status=401, payload={ "session": "abc1234", "flows": [{"stages": ["m.login.registration_token", "m.login.dummy"]}], "params": {}, "completed": [ "m.login.registration_token", ], }, ) # third response should return access token aioresponse.post( "https://example.org/_matrix/client/r0/register", status=200, payload=self.register_response, ) resp = await async_client.register_with_token("user", "password", "token") assert isinstance(resp, RegisterResponse) assert async_client.access_token async def test_discovery_info(self, async_client, aioresponse): aioresponse.get( "https://example.org/.well-known/matrix/client", status=200, payload={ "m.homeserver": {"base_url": "https://an.example.org"}, "m.identity_server": {"base_url": "https://foo.bar"}, }, ) resp = await async_client.discovery_info() assert isinstance(resp, DiscoveryInfoResponse) assert resp.homeserver_url == "https://an.example.org" assert resp.identity_server_url == "https://foo.bar" async def test_discovery_info_trailing_slashes( self, async_client, aioresponse, ): aioresponse.get( "https://example.org/.well-known/matrix/client", status=200, payload={ "m.homeserver": {"base_url": "https://an.example.org/"}, "m.identity_server": {"base_url": "https://foo.bar/"}, }, ) resp = await async_client.discovery_info() assert isinstance(resp, DiscoveryInfoResponse) assert resp.homeserver_url == "https://an.example.org" assert resp.identity_server_url == "https://foo.bar" async def test_discovery_info_invalid_content_type( # matrix.org does this self, async_client, aioresponse, ): aioresponse.get( "https://example.org/.well-known/matrix/client", status=200, payload={"m.homeserver": {"base_url": "https://an.example.org"}}, content_type="", ) resp = await async_client.discovery_info() assert isinstance(resp, DiscoveryInfoResponse) assert resp.homeserver_url == "https://an.example.org" assert resp.identity_server_url is None async def test_discovery_info_bad_url(self, async_client, aioresponse): aioresponse.get( "https://example.org/.well-known/matrix/client", status=200, payload={"m.homeserver": {"base_url": "invalid://example.org"}}, ) resp2 = await async_client.discovery_info() assert isinstance(resp2, DiscoveryInfoError) async def test_login_info(self, async_client, aioresponse): """Test that we can get login info""" aioresponse.get( "https://example.org/_matrix/client/r0/login", status=200, payload={"flows": [{"type": "m.login.password"}]}, ) resp = await async_client.login_info() assert isinstance(resp, LoginInfoResponse) async def test_login(self, async_client, aioresponse): assert not async_client.access_token assert not async_client.logged_in aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) resp = await async_client.login("wordpass") assert isinstance(resp, LoginResponse) assert async_client.access_token assert async_client.logged_in async def test_failed_login(self, async_client, aioresponse): assert not async_client.access_token assert not async_client.logged_in aioresponse.post( "https://example.org/_matrix/client/r0/login", status=400, body="" ) resp = await async_client.login("wordpass") assert isinstance(resp, LoginError) assert not async_client.logged_in assert async_client.client_session await async_client.close() assert not async_client.client_session async def test_login_raw(self, async_client, aioresponse): assert not async_client.access_token assert not async_client.logged_in aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) auth_dict = { "type": "m.login.password", "identifier": { "type": "m.id.thirdparty", "medium": "email", "address": "testemail@mail.org", }, "password": "PASSWORDABCD", "initial_device_display_name": "Test user", } resp = await async_client.login_raw(auth_dict) assert isinstance(resp, LoginResponse) assert async_client.access_token assert async_client.logged_in async def test_failed_login_raw(self, async_client, aioresponse): assert not async_client.access_token assert not async_client.logged_in aioresponse.post( "https://example.org/_matrix/client/r0/login", status=400, body="" ) auth_dict = { "type": "m.login.password", "identifier": { "type": "m.id.thirdparty", "medium": "email", "address": "testemail@mail.org", }, "password": "WRONGPASSWORD", "initial_device_display_name": "Test user", } resp = await async_client.login_raw(auth_dict) assert isinstance(resp, LoginError) assert not async_client.logged_in assert async_client.client_session await async_client.close() assert not async_client.client_session async def test_login_raw_with_empty_dict(self, async_client, aioresponse): assert not async_client.access_token assert not async_client.logged_in auth_dict = {} resp = None with pytest.raises(ValueError, match="Auth dictionary shall not be empty"): resp = await async_client.login_raw(auth_dict) assert not resp assert not async_client.logged_in assert not async_client.client_session await async_client.close() assert not async_client.client_session async def test_login_raw_with_none_dict(self, async_client, aioresponse): assert not async_client.access_token assert not async_client.logged_in auth_dict = None resp = None with pytest.raises(ValueError, match="Auth dictionary shall not be empty"): resp = await async_client.login_raw(auth_dict) assert not resp assert not async_client.logged_in assert not async_client.client_session await async_client.close() assert not async_client.client_session async def test_whoami(self, async_client, aioresponse): async_client.restore_login( user_id="unknown", device_id="unknown", access_token="abc123", ) aioresponse.get( "https://example.org/_matrix/client/r0/account/whoami?access_token=abc123", status=200, payload=self.whoami_response, ) await async_client.whoami() assert async_client.user_id != "unknown" assert async_client.device_id != "unknown" async def test_logout(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/logout?access_token=abc123", status=200, payload=self.logout_response, ) resp = await async_client.login("wordpass") assert async_client.access_token assert async_client.logged_in resp2 = await async_client.logout() assert isinstance(resp, LoginResponse) assert isinstance(resp2, LogoutResponse) assert not async_client.access_token assert not async_client.logged_in async def test_failed_logout(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/logout?access_token=abc123", status=400, body="", ) resp = await async_client.login("wordpass") assert async_client.access_token assert async_client.logged_in resp2 = await async_client.logout() assert isinstance(resp, LoginResponse) assert isinstance(resp2, LogoutError) assert async_client.access_token assert async_client.logged_in async def test_logout_all_devices(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/logout/all?access_token=abc123", status=200, payload=self.logout_response, ) resp = await async_client.login("wordpass") assert async_client.access_token assert async_client.logged_in resp2 = await async_client.logout(all_devices=True) assert isinstance(resp, LoginResponse) assert isinstance(resp2, LogoutResponse) assert not async_client.access_token assert not async_client.logged_in async def test_failed_logout_all_devices(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/logout/all?access_token=abc123", status=400, body="", ) resp = await async_client.login("wordpass") assert async_client.access_token assert async_client.logged_in resp2 = await async_client.logout(all_devices=True) assert isinstance(resp, LoginResponse) assert isinstance(resp2, LogoutError) assert async_client.access_token assert async_client.logged_in async def test_sync(self, async_client: AsyncClient, aioresponse: aioresponses): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) url = r"^https://example\.org/_matrix/client/r0/" r"sync\?access_token=abc123" aioresponse.get(re.compile(rf"{url}$"), status=200, payload=self.sync_response) with pytest.raises(LocalProtocolError): resp2 = await async_client.sync() resp = await async_client.login("wordpass") resp2 = await async_client.sync() assert isinstance(resp, LoginResponse) assert isinstance(resp2, SyncResponse) # Test with filter ID aioresponse.get( re.compile(rf"{url}&filter=test_id&since=[\w\d_]*"), status=200, payload=self.sync_response, ) resp3 = await async_client.sync(sync_filter="test_id") assert isinstance(resp3, SyncResponse) # Test with filter dict aioresponse.get( re.compile(rf"{url}&filter=[\w\d%]*&since=[\w\d_]*"), status=200, payload=self.sync_response, ) resp4 = await async_client.sync(sync_filter={}) assert isinstance(resp4, SyncResponse) # Test with timeout aioresponse.get( re.compile(rf"{url}&since=[\w\d_]*&timeout=60000"), status=200, payload=self.sync_response, ) resp5 = await async_client.sync(timeout=None) assert isinstance(resp5, SyncResponse) async def test_sync_presence(self, async_client, aioresponse): """Test if prsences info in sync events are parsed correctly""" await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.get( f"https://example.org/_matrix/client/r0/sync?access_token={async_client.access_token}", status=200, payload=self.sync_response, ) resp = await async_client.sync() assert isinstance(resp, SyncResponse) user = async_client.rooms["!SVkFJHzfwvuaIEawgC:localhost"].users[ "@example:localhost" ] assert user.currently_active assert user.last_active_ago == 1337 assert user.presence == "online" assert user.status_msg == "I am here." async def test_sync_notification_counts(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.get( "https://example.org/_matrix/client/r0/sync?access_token=abc123", status=200, payload=self.sync_response, ) resp = await async_client.sync() assert isinstance(resp, SyncResponse) room = async_client.rooms["!SVkFJHzfwvuaIEawgC:localhost"] assert room.unread_notifications == 11 assert room.unread_highlights == 1 async def test_sync_push_rules(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.get( "https://example.org/_matrix/client/r0/sync?access_token=abc123", status=200, payload=self.sync_response, ) resp = await async_client.sync() assert isinstance(resp, SyncResponse) rules = resp.account_data_events[0] assert isinstance(rules, PushRulesEvent) assert isinstance(rules.global_rules, PushRuleset) assert isinstance(rules.device_rules, PushRuleset) # Test __bool__ implementations assert bool(rules) is True assert bool(rules.device_rules) is False assert rules.global_rules.override == [ PushRule( kind=PushRuleKind.override, id=".m.rule.suppress_notices", default=True, enabled=False, actions=[PushDontNotify()], conditions=[PushEventMatch("content.msgtype", "m.notice")], ), ] assert rules.global_rules.content == [ PushRule( kind=PushRuleKind.content, id=".m.rule.contains_user_name", default=True, pattern="alice", actions=[ PushNotify(), PushUnknownAction("do_special_thing"), PushSetTweak("sound", "default"), PushSetTweak("highlight", True), ], ), ] assert not rules.global_rules.room assert not rules.global_rules.sender assert rules.global_rules.underride == [ PushRule( kind=PushRuleKind.underride, id=".m.rule.special_call", default=True, conditions=[ PushUnknownCondition({"kind": "special_kind"}), PushEventMatch("type", "m.call.invite"), ], actions=[ PushCoalesce(), PushSetTweak("sound", "ring"), PushSetTweak("highlight", False), ], ), PushRule( kind=PushRuleKind.underride, id=".m.rule.room_less_than_10_room_perm", default=True, conditions=[ PushSenderNotificationPermission("room"), PushRoomMemberCount(10, "<"), PushEventMatch("type", "m.room.message"), ], actions=[PushNotify()], ), PushRule( kind=PushRuleKind.underride, id=".m.rule.room_one_to_one", default=True, conditions=[ PushRoomMemberCount(2, "=="), PushEventMatch("type", "m.room.message"), ], actions=[ PushNotify(), PushSetTweak("sound", "default"), PushSetTweak("highlight", False), ], ), ] async def test_keys_upload(self, async_client, aioresponse): with pytest.raises(LocalProtocolError): resp2 = await async_client.keys_upload() assert not async_client.should_upload_keys aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/keys/upload?access_token=abc123", status=200, payload=self.keys_upload_response, ) await async_client.login("wordpass") assert async_client.should_upload_keys assert not async_client.olm_account_shared resp2 = await async_client.keys_upload() assert isinstance(resp2, KeysUploadResponse) assert async_client.olm_account_shared assert async_client.should_upload_keys async def test_keys_query(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/keys/query?access_token=abc123", status=200, payload=self.keys_query_response, ) await async_client.login("wordpass") assert not async_client.should_query_keys await async_client.receive_response(self.encryption_sync_response) assert async_client.should_query_keys await async_client.keys_query() assert not async_client.should_query_keys async def test_message_sending(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) aioresponse.put( "https://example.org/_matrix/client/r0/rooms/!testroom:example.org/send/m.room.encrypted/1?access_token=abc123", status=200, payload={"event_id": "$1555:example.org"}, ) aioresponse.get( f"https://example.org/_matrix/client/r0/rooms/{TEST_ROOM_ID}/joined_members?access_token=abc123", status=200, payload=self.joined_members_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/keys/query?access_token=abc123", status=200, payload=self.keys_query_response, ) await async_client.login("wordpass") await async_client.receive_response(self.encryption_sync_response) response = await async_client.joined_members(TEST_ROOM_ID) async_client.olm.create_outbound_group_session(TEST_ROOM_ID) async_client.olm.outbound_group_sessions[TEST_ROOM_ID].shared = True response = await async_client.room_send( TEST_ROOM_ID, "m.room.message", {"body": "hello"}, "1" ) assert isinstance(response, RoomSendResponse) async def test_room_get_event(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in base_url = "https://example.org/_matrix/client/r0" response = { "content": { "body": "This is an example text message", "msgtype": "m.text", "format": "org.matrix.custom.html", "formatted_body": "This is an example text message", }, "type": "m.room.message", "event_id": "$15163622445EBvZJ:localhost", "room_id": TEST_ROOM_ID, "sender": "@example:example.org", "origin_server_ts": 1432735824653, "unsigned": {"age": 1234}, } aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/event/$15163622445EBvZJ:localhost?access_token=abc123", status=200, payload=response, ) resp = await async_client.room_get_event( TEST_ROOM_ID, "$15163622445EBvZJ:localhost" ) assert isinstance(resp, RoomGetEventResponse) assert isinstance(resp.event, RoomMessageText) aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/event/$not-found:localhost?access_token=abc123", status=200, payload={"errcode": "M_NOT_FOUND", "error": "Event not found."}, ) resp = await async_client.room_get_event(TEST_ROOM_ID, "$not-found:localhost") assert isinstance(resp, RoomGetEventError) async def test_list_direct_rooms(self, async_client, aioresponse: aioresponses): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in base_url = "https://example.org/_matrix/client/r0" response = { "@alice:example.org": ["!foobar:example.org"], "@bob:example.org": ["!dingle:example.org", "!dongle:example.org"], } aioresponse.get( f"{base_url}/user/{async_client.user_id}/account_data/m.direct?access_token=abc123", status=200, payload=response, ) resp = await async_client.list_direct_rooms() assert isinstance(resp, DirectRoomsResponse) response = { "errcode": "M_NOT_FOUND", "error": "Account data not found", } aioresponse.get( f"{base_url}/user/{async_client.user_id}/account_data/m.direct?access_token=abc123", status=404, payload=response, ) resp = await async_client.list_direct_rooms() assert isinstance(resp, DirectRoomsErrorResponse) async def test_room_put_state(self, async_client, aioresponse: aioresponses): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in base_url = "https://example.org/_matrix/client/r0" # Test when key is set state_key = "a-state-key" aioresponse.put( f"{base_url}/rooms/{TEST_ROOM_ID}/state/org.example.event_type/{state_key}?access_token=abc123", status=200, payload={"event_id": "$1337stateeventid2342:example.org"}, ) resp = await async_client.room_put_state( room_id=TEST_ROOM_ID, event_type="org.example.event_type", content={}, state_key=state_key, ) assert isinstance(resp, RoomPutStateResponse) # Test when key is empty (and slash is optional) aioresponse.put( f"{base_url}/rooms/{TEST_ROOM_ID}/state/org.example.event_type?access_token=abc123", status=200, payload={"event_id": "$1337stateeventid2342:example.org"}, ) resp = await async_client.room_put_state( room_id=TEST_ROOM_ID, event_type="org.example.event_type", content={}, state_key="", ) assert isinstance(resp, RoomPutStateResponse) async def test_room_get_state_event(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in base_url = "https://example.org/_matrix/client/r0" # Test when state key is set state_key = "a-state-key" aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/state/m.room.name/{state_key}?access_token=abc123", status=200, payload={"name": "Test Room"}, ) resp = await async_client.room_get_state_event( room_id=TEST_ROOM_ID, event_type="m.room.name", state_key=state_key ) assert isinstance(resp, RoomGetStateEventResponse) # without state key aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/state/m.room.name?access_token=abc123", status=200, payload={"name": "Test Room"}, ) resp = await async_client.room_get_state_event( room_id=TEST_ROOM_ID, event_type="m.room.name", state_key="" ) assert isinstance(resp, RoomGetStateEventResponse) async def test_room_get_state(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in base_url = "https://example.org/_matrix/client/r0" aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/state?access_token=abc123", status=200, payload=self.room_get_state_response, ) resp = await async_client.room_get_state( TEST_ROOM_ID, ) assert isinstance(resp, RoomGetStateResponse) def keys_claim_dict(self, client): to_share = client.olm.share_keys() one_time_key = list(to_share["one_time_keys"].items())[0] return { "one_time_keys": { ALICE_ID: { ALICE_DEVICE_ID: {one_time_key[0]: one_time_key[1]}, }, }, "failures": {}, } async def test_key_claiming(self, alice_client, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) alice_client.load_store() alice_device = OlmDevice( ALICE_ID, ALICE_DEVICE_ID, alice_client.olm.account.identity_keys ) async_client.device_store.add(alice_device) missing = async_client.get_missing_sessions(TEST_ROOM_ID) assert ALICE_ID in missing assert ALICE_DEVICE_ID in missing[ALICE_ID] aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=abc123", status=200, payload=self.keys_claim_dict(alice_client), ) response = await async_client.keys_claim(missing) assert isinstance(response, KeysClaimResponse) assert not async_client.get_missing_sessions(TEST_ROOM_ID) assert async_client.olm.session_store.get(alice_device.curve25519) async def test_session_sharing(self, alice_client, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) alice_client.load_store() alice_device = OlmDevice( ALICE_ID, ALICE_DEVICE_ID, alice_client.olm.account.identity_keys ) async_client.device_store.add(alice_device) async_client.verify_device(alice_device) missing = async_client.get_missing_sessions(TEST_ROOM_ID) assert ALICE_ID in missing assert ALICE_DEVICE_ID in missing[ALICE_ID] to_share = alice_client.olm.share_keys() one_time_key = list(to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { ALICE_ID: { ALICE_DEVICE_ID: {one_time_key[0]: one_time_key[1]}, }, }, "failures": {}, } aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=abc123", status=200, payload=key_claim_dict, ) aioresponse.put( "https://example.org/_matrix/client/r0/sendToDevice/m.room.encrypted/1?access_token=abc123", status=200, payload={}, ) with pytest.raises(KeyError): session = async_client.olm.outbound_group_sessions[TEST_ROOM_ID] response = await async_client.share_group_session(TEST_ROOM_ID) session = async_client.olm.outbound_group_sessions[TEST_ROOM_ID] assert session.shared assert isinstance(response, ShareGroupSessionResponse) assert not async_client.get_missing_sessions(TEST_ROOM_ID) assert async_client.olm.session_store.get(alice_device.curve25519) async def test_session_sharing_2(self, alice_client, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) alice_client.load_store() aioresponse.put( "https://example.org/_matrix/client/r0/sendToDevice/m.room_key_request/1?access_token=abc123", status=200, payload={}, ) event = MegolmEvent.from_dict( self._load_response("tests/data/events/megolm.json") ) await async_client.request_room_key(event, "1") assert ( "X3lUlvLELLYxeTx4yOVu6UDpasGEVO0Jbu+QFnm0cKQ" in async_client.outgoing_key_requests ) async def test_get_openid_token(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.post( f"https://example.org/_matrix/client/r0/user/{ALICE_ID}/openid/request_token?access_token=abc123", status=200, payload=self.get_openid_token_response, ) resp = await async_client.get_openid_token(ALICE_ID) assert isinstance(resp, GetOpenIDTokenResponse) async def test_joined_members(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in resp = self.encryption_sync_response # Mimic an outdated initial sync (synapse bug?) with a member that # was present before, but already left and is absent from # joined_members_response. resp.rooms.join[TEST_ROOM_ID].timeline.events.append( RoomMemberEvent( { "event_id": "event_id_4", "sender": DAVE_ID, "origin_server_ts": 1516809890699, }, DAVE_ID, "join", None, {"membership": "join"}, ), ) await async_client.receive_response(resp) aioresponse.get( f"https://example.org/_matrix/client/r0/rooms/{TEST_ROOM_ID}/" "joined_members?access_token=abc123", status=200, payload=self.joined_members_response, ) room = async_client.rooms[TEST_ROOM_ID] assert not room.members_synced assert tuple(room.users) == (ALICE_ID, CAROL_ID, DAVE_ID) assert tuple(room.invited_users) == (CAROL_ID,) response = await async_client.joined_members(TEST_ROOM_ID) assert isinstance(response, JoinedMembersResponse) assert room.members_synced assert tuple(room.users) == (ALICE_ID, CAROL_ID, EIRIN_ID) assert tuple(room.invited_users) == (CAROL_ID,) async def test_joined_rooms(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.get( "https://example.org/_matrix/client/r0/joined_rooms?access_token=abc123", status=200, payload=self.joined_rooms_response, ) response = await async_client.joined_rooms() assert isinstance(response, JoinedRoomsResponse) async def test_key_exports(self, async_client, tempdir): file = path.join(tempdir, "keys_file") await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) async_client.olm.create_outbound_group_session(TEST_ROOM_ID) out_session = async_client.olm.outbound_group_sessions[TEST_ROOM_ID] assert async_client.olm.inbound_group_store.get( TEST_ROOM_ID, async_client.olm.account.identity_keys["curve25519"], out_session.id, ) await async_client.export_keys(file, "pass") alice_client = AsyncClient( "https://example.org", "alice", ALICE_DEVICE_ID, tempdir ) alice_client.user_id = ALICE_ID alice_client.load_store() await alice_client.import_keys(file, "pass") imported_session = alice_client.olm.inbound_group_store.get( TEST_ROOM_ID, async_client.olm.account.identity_keys["curve25519"], out_session.id, ) assert imported_session.id == out_session.id async def test_room_create(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.post( "https://example.org/_matrix/client/r0/createRoom" "?access_token=abc123", status=200, payload=self.room_id_response(TEST_ROOM_ID), ) resp = await async_client.room_create( visibility=RoomVisibility.public, alias="foo", name="bar", topic="Foos and bars", room_version="5", preset=RoomPreset.trusted_private_chat, invite={ALICE_ID}, initial_state=[], power_level_override={}, ) assert isinstance(resp, RoomCreateResponse) assert resp.room_id == TEST_ROOM_ID async def test_room_create__space(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.post( "https://example.org/_matrix/client/r0/createRoom" "?access_token=abc123", status=200, payload=self.room_id_response(TEST_ROOM_ID), ) resp = await async_client.room_create( visibility=RoomVisibility.public, alias="foo-space", name="bar", topic="Foos and bars space", room_version="9", preset=RoomPreset.public_chat, invite={ALICE_ID}, initial_state=[], power_level_override={}, space=True, ) assert isinstance(resp, RoomCreateResponse) assert resp.room_id == TEST_ROOM_ID async def test_room_create__typed(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.post( "https://example.org/_matrix/client/r0/createRoom" "?access_token=abc123", status=200, payload=self.room_id_response(TEST_ROOM_ID), ) resp = await async_client.room_create( visibility=RoomVisibility.public, alias="foo-space", name="bar", topic="Foos and bars space", room_version="9", room_type="nio.matrix.test", preset=RoomPreset.public_chat, invite={ALICE_ID}, initial_state=[], power_level_override={}, space=True, ) assert isinstance(resp, RoomCreateResponse) assert resp.room_id == TEST_ROOM_ID async def test_join(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.post( f"https://example.org/_matrix/client/r0/join/{TEST_ROOM_ID}?access_token=abc123", status=200, payload=self.room_id_response(TEST_ROOM_ID), ) resp = await async_client.join(TEST_ROOM_ID) assert isinstance(resp, JoinResponse) assert resp.room_id == TEST_ROOM_ID async def test_room_invite(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.post( f"https://example.org/_matrix/client/r0/rooms/{TEST_ROOM_ID}/invite?access_token=abc123", status=200, payload={}, ) resp = await async_client.room_invite(TEST_ROOM_ID, ALICE_ID) assert isinstance(resp, RoomInviteResponse) async def test_room_knock(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.post( f"https://example.org/_matrix/client/r0/knock/{TEST_ROOM_ID}?access_token=abc123", status=200, payload=self.room_id_response(TEST_ROOM_ID), ) resp = await async_client.room_knock(TEST_ROOM_ID, reason="test") assert isinstance(resp, RoomKnockResponse) async def test_room_leave(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.post( f"https://example.org/_matrix/client/r0/rooms/{TEST_ROOM_ID}/leave?access_token=abc123", status=200, payload={}, ) resp = await async_client.room_leave(TEST_ROOM_ID) assert isinstance(resp, RoomLeaveResponse) async def test_room_forget(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) room_id = next(iter(async_client.rooms)) aioresponse.post( f"https://example.org/_matrix/client/r0/rooms/{room_id}/forget?access_token=abc123", status=200, payload={}, ) resp = await async_client.room_forget(room_id) assert isinstance(resp, RoomForgetResponse) assert room_id not in async_client.rooms async def test_room_kick(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) room_id = next(iter(async_client.rooms)) aioresponse.post( f"https://example.org/_matrix/client/r0/rooms/{room_id}/kick" f"?access_token=abc123", status=200, body={"user_id": ALICE_ID, "reason": "test"}, payload={}, ) resp = await async_client.room_kick(room_id, ALICE_ID, "test") assert isinstance(resp, RoomKickResponse) async def test_room_ban(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) room_id = next(iter(async_client.rooms)) aioresponse.post( f"https://example.org/_matrix/client/r0/rooms/{room_id}/ban" f"?access_token=abc123", status=200, body={"user_id": ALICE_ID, "reason": "test"}, payload={}, ) resp = await async_client.room_ban(room_id, ALICE_ID, "test") assert isinstance(resp, RoomBanResponse) async def test_room_unban(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) room_id = next(iter(async_client.rooms)) aioresponse.post( f"https://example.org/_matrix/client/r0/rooms/{room_id}/unban" f"?access_token=abc123", status=200, body={"user_id": ALICE_ID}, payload={}, ) resp = await async_client.room_unban(room_id, ALICE_ID) assert isinstance(resp, RoomUnbanResponse) async def test_room_redact(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) room_id = next(iter(async_client.rooms)) event_id = "$15163622445EBvZJ:localhost" tx_id = uuid4() reason = "for no reason" aioresponse.put( f"https://example.org/_matrix/client/r0/rooms/{room_id}/redact/{event_id}/{tx_id}?access_token=abc123", status=200, payload={"event_id": "$90813622447EBvZJ:localhost"}, ) resp = await async_client.room_redact(room_id, event_id, reason, tx_id) assert isinstance(resp, RoomRedactResponse) async def test_context(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in event_id = "$15163622445EBvZJ:localhost" await async_client.receive_response(self.encryption_sync_response) aioresponse.get( f"https://example.org/_matrix/client/r0/rooms/{TEST_ROOM_ID}/context/{event_id}?access_token=abc123", status=200, payload=self.context_response, ) response = await async_client.room_context(TEST_ROOM_ID, event_id) assert isinstance(response, RoomContextResponse) async def test_room_messages(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) await async_client.receive_response(self.encryption_sync_response) # No filter url = ( f"https://example.org/_matrix/client/r0/rooms/{TEST_ROOM_ID}/" "messages?access_token=abc123" "&dir=b&from=start_token&limit=10" ) aioresponse.get(url, status=200, payload=self.messages_response) resp = await async_client.room_messages(TEST_ROOM_ID, "start_token") assert isinstance(resp, RoomMessagesResponse) # Dict filter aioresponse.get( url + '&filter={"room":{"state":{"limit":1}}}', status=200, payload=self.messages_response, ) resp = await async_client.room_messages( TEST_ROOM_ID, "start_token", message_filter={"room": {"state": {"limit": 1}}}, ) assert isinstance(resp, RoomMessagesResponse) async def test_room_typing(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in await async_client.receive_response(self.encryption_sync_response) room_id = list(async_client.rooms.keys())[0] aioresponse.put( f"https://example.org/_matrix/client/r0/rooms/{room_id}/typing/{async_client.user_id}?access_token=abc123", status=200, payload={}, ) resp = await async_client.room_typing(room_id, typing_state=True) assert isinstance(resp, RoomTypingResponse) async def test_update_receipt_marker(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in room_id = TEST_ROOM_ID event_id = "$event1:test.org" aioresponse.post( f"https://example.org/_matrix/client/r0/rooms/{room_id}/receipt/" f"m.read/{event_id}?access_token=abc123", status=200, payload={}, ) resp = await async_client.update_receipt_marker(room_id, event_id) assert isinstance(resp, UpdateReceiptMarkerResponse) async def test_room_read_marker( self, async_client: AsyncClient, aioresponse: aioresponses ): """Test that we can set the room read receipt marker.""" await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) await async_client.receive_response(self.encryption_sync_response) room_id = list(async_client.rooms.keys())[0] fully_read_event_id = "$15163622445EBvZJ:localhost" receipt_event_id = "$15163700000EBvZJ:localhost" aioresponse.post( f"https://example.org/_matrix/client/r0/rooms/{room_id}" + "/read_markers?access_token=abc123", status=200, payload={}, ) resp = await async_client.room_read_markers( room_id, fully_read_event_id, receipt_event_id ) assert isinstance(resp, RoomReadMarkersResponse) async def test_content_repository_config(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in aioresponse.get( "https://example.org/_matrix/media/r0/config?access_token=abc123", status=200, payload={"m.upload.size": 1024}, ) response = await async_client.content_repository_config() assert isinstance(response, ContentRepositoryConfigResponse) assert response.upload_size == 1024 async def test_upload(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in path = Path("tests/data/file_response") filesize = path.stat().st_size monitor = TransferMonitor(filesize) aioresponse.post( "https://example.org/_matrix/media/r0/upload?access_token=abc123&filename=test.png", status=200, payload=self.upload_response, repeat=True, ) resp, decryption_info = await async_client.upload( lambda *_: path, "image/png", "test.png", monitor=monitor, ) assert isinstance(resp, UploadResponse) assert decryption_info is None # aioresponse doesn't do anything with the data_generator() in # upload(), so the monitor isn't updated. monitor.cancel = True self._wait_monitor_thread_exited(monitor) async def test_upload_binary_file_object( self, async_client: AsyncClient, aioresponse ): """Test uploading binary files using file objects.""" await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in path = Path("tests/data/file_response") filesize = path.stat().st_size monitor = TransferMonitor(filesize) aioresponse.post( "https://example.org/_matrix/media/r0/upload?access_token=abc123&filename=test.png", status=200, payload=self.upload_response, repeat=True, ) # Upload binary file using a standard file object with open("tests/data/file_response", "r+b") as f: # noqa: ASYNC101 resp, decryption_info = await async_client.upload( f, "image/png", "test.png", monitor=monitor, ) assert isinstance(resp, UploadResponse) assert decryption_info is None # Upload binary file using an async file object async with aiofiles.open("tests/data/file_response", "r+b") as f: resp, decryption_info = await async_client.upload( f, "image/png", "test.png", monitor=monitor, ) assert isinstance(resp, UploadResponse) assert decryption_info is None monitor.cancel = True self._wait_monitor_thread_exited(monitor) async def test_upload_text_file_object( self, async_client: AsyncClient, aioresponse ): """Test uploading text files using file objects.""" await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in path = Path("tests/data/sample_text_file.py") filesize = path.stat().st_size monitor = TransferMonitor(filesize) aioresponse.post( "https://example.org/_matrix/media/r0/upload?access_token=abc123&filename=test.py", status=200, payload=self.upload_response, repeat=True, ) # Upload text file using a async file object async with aiofiles.open("tests/data/sample_text_file.py") as f: resp, decryption_info = await async_client.upload( f, "text/plain", "test.py", monitor=monitor, ) assert isinstance(resp, UploadResponse) assert decryption_info is None monitor.cancel = True self._wait_monitor_thread_exited(monitor) async def test_upload_retry(self, async_client: AsyncClient, aioresponse): """Test that files upload correctly after receiving a 429 or timeout. Uses an internal helper function check_content to verify that the file will be sought back to the start after receiving a 429 message from the server. """ await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in path = Path("tests/data/sample_text_file.py") filesize = path.stat().st_size monitor = TransferMonitor(filesize) async def check_content(url, **kwargs): """Verify the data that the server receives is the full file.""" data = kwargs["data"] received = "" async for piece in data: received += piece async with aiofiles.open(path) as f: assert received == await f.read() # We make sure to read the data in the first post response to verify # that we can read the full file in a subsequent post. aioresponse.post( "https://example.org/_matrix/media/r0/upload?access_token=abc123&filename=test.py", status=429, payload=self.limit_exceeded_error_response, callback=check_content, ) aioresponse.post( "https://example.org/_matrix/media/r0/upload?access_token=abc123&filename=test.py", status=200, payload=self.upload_response, callback=check_content, ) async with aiofiles.open("tests/data/sample_text_file.py") as f: resp, decryption_info = await async_client.upload( f, "text/plain", "test.py", monitor=monitor, ) assert isinstance(resp, UploadResponse) assert decryption_info is None monitor.cancel = True self._wait_monitor_thread_exited(monitor) async def test_encrypted_upload(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in path = Path("tests/data/file_response") filesize = path.stat().st_size monitor = TransferMonitor(filesize) aioresponse.post( "https://example.org/_matrix/media/r0/upload?access_token=abc123&filename=test.png", status=429, payload=self.limit_exceeded_error_response, ) aioresponse.post( "https://example.org/_matrix/media/r0/upload?access_token=abc123&filename=test.png", status=200, payload=self.upload_response, repeat=True, ) async with aiofiles.open(path, "rb") as file: resp, decryption_info = await async_client.upload( lambda *_: file, "image/png", "test.png", encrypt=True, monitor=monitor, filesize=filesize, ) assert isinstance(resp, UploadResponse) assert isinstance(decryption_info, dict) # aioresponse doesn't do anything with the data_generator() in # upload(), so the decryption dict doesn't get updated and # we can't test whether it works as intended here. # Ditto for the monitor stats. async def test_traceconfig_callbacks(self): monitor = TransferMonitor(1) class Context: def __init__(self): self.trace_request_ctx = monitor session = ClientSession() context = Context() params = TraceRequestChunkSentParams(method="POST", url="test", chunk=b"x") await on_request_chunk_sent(session, context, params) assert monitor.transferred == 1 self._verify_monitor_state_for_finished_transfer(monitor, 1) async def test_plain_data_generator(self, async_client): original_data = [b"123", b"456", b"789", b"0"] data_size = len(b"".join(original_data)) monitor = TransferMonitor( data_size, # Ensure the loop has time to land on the pause code _update_loop_sleep_time=0.1, ) gen = async_client._plain_data_generator(original_data, monitor) data = [] assert not monitor.pause data.append(await gen.__anext__()) # Pausing and resuming async def unpause(speed_when_paused): await asyncio.sleep(0.5) monitor.pause = False assert speed_when_paused == monitor.speed paused_at = time.time() monitor.pause = True speed_when_paused = monitor.average_speed asyncio.ensure_future(unpause(speed_when_paused)) data.append(await asyncio.wait_for(gen.__anext__(), 5)) assert time.time() - paused_at >= 0.5 # Cancelling and restarting monitor.cancel = True with pytest.raises(TransferCancelledError): await gen.__anext__() monitor.transferred += len(b"".join(data)) assert monitor.transferred == len(b"".join(data)) self._wait_monitor_thread_exited(monitor) left = original_data[len(data) :] left_size = len(b"".join(left)) monitor = TransferMonitor(left_size) gen = async_client._plain_data_generator(left, monitor) # Finish and integrity checks data += [chunk async for chunk in gen] assert data == original_data monitor.transferred = monitor.total_size self._verify_monitor_state_for_finished_transfer(monitor, left_size) async def test_encrypted_data_generator(self, async_client): original_data = b"x" * 4096 * 4 data_size = len(original_data) monitor = TransferMonitor(data_size) decryption_dict = {} gen = async_client._encrypted_data_generator( original_data, decryption_dict, monitor, ) encrypted_data = b"" # Pausing and resuming assert not monitor.pause encrypted_data += await gen.__anext__() async def unpause(): await asyncio.sleep(0.5) monitor.pause = False paused_at = time.time() monitor.pause = True asyncio.ensure_future(unpause()) encrypted_data += await asyncio.wait_for(gen.__anext__(), 5) assert time.time() - paused_at >= 0.5 # Cancelling monitor.cancel = True with pytest.raises(TransferCancelledError): await gen.__anext__() monitor.transferred += len(encrypted_data) assert monitor.transferred == len(encrypted_data) self._wait_monitor_thread_exited(monitor) # Restart from scratch (avoid encrypted data SHA mismatch) decryption_dict = {} monitor = TransferMonitor(data_size) gen = async_client._encrypted_data_generator( original_data, decryption_dict, monitor, ) # Finish and integrity checks encrypted_data = b"".join([chunk async for chunk in gen]) assert encrypted_data assert "key" in decryption_dict assert "hashes" in decryption_dict assert "iv" in decryption_dict decrypted_data = decrypt_attachment( encrypted_data, decryption_dict["key"]["k"], decryption_dict["hashes"]["sha256"], decryption_dict["iv"], ) assert decrypted_data == original_data monitor.transferred = monitor.total_size self._verify_monitor_state_for_finished_transfer(monitor, data_size) async def test_transfer_monitor_callbacks(self): called = {"transferred": (0, 0), "speed_changed": 0} def on_transferred(transferred: int): called["transferred"] = (called["transferred"][0] + 1, transferred) def on_speed_changed(speed: float): called["speed_changed"] += 1 monitor = TransferMonitor(100, on_transferred, on_speed_changed) monitor.transferred += 50 slept = 0 while not called["transferred"] or not called["speed_changed"]: await asyncio.sleep(0.1) slept += 0.1 if slept >= 1: raise RuntimeError("1+ callback not called after 1s", called) assert called["transferred"] == (1, 50) assert called["speed_changed"] == 1 monitor.transferred += 50 self._verify_monitor_state_for_finished_transfer(monitor, 100) async def test_transfer_monitor_bad_remaining_time(self): monitor = TransferMonitor(100) assert monitor.average_speed == 0.0 assert monitor.remaining_time is None monitor.total_size = math.inf assert monitor.remaining_time is None @staticmethod def _wait_monitor_thread_exited(monitor): for _ in range(100): if not monitor._updater.is_alive(): break time.sleep(0.1) else: raise RuntimeError("monitor._updater still alive after 10s") def _verify_monitor_state_for_finished_transfer(self, monitor, data_size): self._wait_monitor_thread_exited(monitor) assert monitor.total_size == data_size assert monitor.start_time assert monitor.end_time assert monitor.average_speed > 0 assert monitor.transferred == data_size assert monitor.percent_done == 100 assert monitor.remaining == 0 assert monitor.spent_time.microseconds > 0 assert monitor.remaining_time.microseconds == 0 assert monitor.done is True async def test_download(self, async_client, aioresponse): def _extract_parts(_mxc: str) -> Tuple[str, str]: url = urlparse(mxc) _server_name = url.netloc _media_id = url.path.replace("/", "") return _server_name, _media_id mxc = "mxc://example.org/ascERGshawAWawugaAcauga" filename = "example&.png" # has unsafe character to test % encoding server_name, media_id = _extract_parts(mxc) aioresponse.get( f"https://example.org/_matrix/media/r0/download/{server_name}/{media_id}?allow_remote=true", status=200, content_type="image/png", body=self.file_response, ) resp = await async_client.download(mxc=mxc) assert isinstance(resp, DownloadResponse) assert resp.body == self.file_response assert resp.filename is None aioresponse.get( f"https://example.org/_matrix/media/r0/download/{server_name}/{media_id}/{filename}?allow_remote=true", status=200, content_type="image/png", headers={"content-disposition": f'inline; filename="{filename}"'}, body=self.file_response, ) resp = await async_client.download(mxc=mxc, filename=filename) assert isinstance(resp, DownloadResponse) assert resp.body == self.file_response assert resp.filename == filename async_client.config = AsyncClientConfig(max_limit_exceeded=0) aioresponse.get( f"https://example.org/_matrix/media/r0/download/{server_name}/{media_id}?allow_remote=true", status=429, content_type="application/json", body=b'{"errcode": "M_LIMIT_EXCEEDED", "retry_after_ms": 1}', repeat=True, ) resp = await async_client.download(mxc=mxc) assert isinstance(resp, DownloadError) async def test_thumbnail(self, async_client, aioresponse): server_name = "example.org" media_id = "ascERGshawAWawugaAcauga" width = 32 height = 32 method = ResizingMethod.crop aioresponse.get( f"https://example.org/_matrix/media/r0/thumbnail/{server_name}/{media_id}" f"?width={width}&height={height}&method={method.value}&allow_remote=true", status=200, content_type="image/png", body=self.file_response, ) resp = await async_client.thumbnail( server_name, media_id, width, height, method ) assert isinstance(resp, ThumbnailResponse) assert resp.body == self.file_response async_client.config = AsyncClientConfig(max_limit_exceeded=0) aioresponse.get( f"https://example.org/_matrix/media/r0/thumbnail/{server_name}/{media_id}" f"?width={width}&height={height}&method={method.value}&allow_remote=true", status=429, content_type="application/json", body=b'{"errcode": "M_LIMIT_EXCEEDED", "retry_after_ms": 1}', repeat=True, ) resp = await async_client.thumbnail( server_name, media_id, width, height, method ) assert isinstance(resp, ThumbnailError) async def test_event_callback_coroutine(self, async_client): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) class CallbackException(Exception): pass async def cb(_, event): if isinstance(event, RoomMemberEvent): raise CallbackException async_client.add_event_callback(cb, (RoomMemberEvent, RoomEncryptionEvent)) with pytest.raises(CallbackException): await async_client.receive_response(self.encryption_sync_response) async def test_event_callback_awaitable_class(self, async_client): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) class CallbackException(Exception): pass class CommandCallback: async def __call__(self, room, event): if isinstance(event, RoomMemberEvent): raise CallbackException cb = CommandCallback() async_client.add_event_callback(cb, (RoomMemberEvent, RoomEncryptionEvent)) with pytest.raises(CallbackException): await async_client.receive_response(self.encryption_sync_response) async def test_room_account_data_cb(self, async_client): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) class CallbackException(Exception): pass async def cb(_, event): raise CallbackException async_client.add_room_account_data_callback(cb, FullyReadEvent) with pytest.raises(CallbackException): await async_client.receive_response( SyncResponse.from_dict(self.sync_response) ) async def test_handle_account_data(self, async_client): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) await async_client.receive_response(SyncResponse.from_dict(self.sync_response)) room = async_client.rooms["!SVkFJHzfwvuaIEawgC:localhost"] assert room.fully_read_marker == "event_id_2" assert room.tags == {"u.test": {"order": 1}} async def test_get_profile( self, async_client: AsyncClient, aioresponse: aioresponses ): base_url = "https://example.org/_matrix/client/r0" name = faker.name() avatar = faker.avatar_url().replace("#auto", "") async_client.user_id = ALICE_ID aioresponse.get( f"{base_url}/profile/{async_client.user_id}", status=200, payload=self.get_profile_response(name, avatar), ) resp = await async_client.get_profile() assert isinstance(resp, ProfileGetResponse) assert resp.displayname == name assert resp.avatar_url.replace("#auto", "") == avatar async def test_get_profile_auth_required( self, async_client: AsyncClient, aioresponse: aioresponses ): login = self.login_response token = login["access_token"] user_id = login["user_id"] name = faker.name() avatar = faker.avatar_url().replace("#auto", "") base_url = "https://example.org/_matrix/client/r0" url = f"{base_url}/profile/{user_id}" aioresponse.get( url, status=401, payload=self.get_profile_unauth_error_response() ) aioresponse.get( f"{url}?access_token={token}", status=200, payload=self.get_profile_response(name, avatar), ) resp = await async_client.get_profile(user_id) assert isinstance(resp, ProfileGetError) await async_client.receive_response(LoginResponse.from_dict(login)) assert async_client.logged_in resp = await async_client.get_profile() assert isinstance(resp, ProfileGetResponse) async def test_get_presence(self, async_client, aioresponse): """Test if we can get the presence state of a user""" await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in user_id = "@alice:example.com" aioresponse.get( f"https://example.org/_matrix/client/r0/presence/{user_id}/status?access_token={async_client.access_token}", status=200, payload={"presence": "unavailable", "last_active_ago": 420845}, ) resp = await async_client.get_presence(user_id) assert isinstance(resp, PresenceGetResponse) assert resp.user_id == user_id assert resp.presence == "unavailable" assert resp.last_active_ago == 420845 assert not resp.currently_active assert not resp.status_msg aioresponse.get( f"https://example.org/_matrix/client/r0/presence/{user_id}/status?access_token={async_client.access_token}", status=200, payload={ "presence": "online", "last_active_ago": 0, "currently_active": True, "status_msg": "I am here.", }, ) resp = await async_client.get_presence(user_id) assert isinstance(resp, PresenceGetResponse) assert resp.user_id == user_id assert resp.presence == "online" assert resp.last_active_ago == 0 assert resp.currently_active assert resp.status_msg == "I am here." async def test_set_presence(self, async_client, aioresponse): """Test if we can set the presence state of user""" await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in aioresponse.put( f"https://example.org/_matrix/client/r0/presence/{async_client.user_id}/" f"status?access_token={async_client.access_token}", status=200, payload={}, ) resp = await async_client.set_presence("online", "I am here.") assert isinstance(resp, PresenceSetResponse) async def test_presence_callback(self, async_client, aioresponse): """Test if we can add a presence callback and if it get´s called""" await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) class CallbackException(Exception): pass async def cb(event): if isinstance(event, PresenceEvent): raise CallbackException async_client.add_presence_callback(cb, PresenceEvent) url = r"^https://example\.org/_matrix/client/r0/" r"sync\?access_token=abc123" aioresponse.get(re.compile(rf"{url}$"), status=200, payload=self.sync_response) with pytest.raises(CallbackException): await async_client.sync() async def test_devices(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) base_url = "https://example.org/_matrix/client/r0" delete_auth = { "flows": [{"stages": ["m.login.password"]}], "params": {}, "session": "DBVNTKnPYYEVIvazoJwLqsNJ", } devices = { "devices": [ { "device_id": "ADJOYJBBHJ", "display_name": None, "last_seen_ip": "-", "last_seen_ts": 1573294480287, "user_id": "@example:localhost", } ] } aioresponse.post( f"{base_url}/delete_devices?access_token=abc123", status=401, payload=delete_auth, ) aioresponse.post( f"{base_url}/delete_devices?access_token=abc123", status=200, payload={} ) aioresponse.get( f"{base_url}/devices?access_token=abc123", status=200, payload=devices ) resp = await async_client.devices() assert isinstance(resp, DevicesResponse) assert len(resp.devices) == 1 devices = [resp.devices[0].id] resp = await async_client.delete_devices(devices) assert isinstance(resp, DeleteDevicesAuthResponse) resp = await async_client.delete_devices(devices) assert isinstance(resp, DeleteDevicesResponse) async def test_update_device( self, async_client: AsyncClient, aioresponse: aioresponses ): """Test that we can update a device""" await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in device_id = "QBUAZIFURK" content = {"display_name": "My new device"} aioresponse.put( f"https://example.org/_matrix/client/r0/devices/{device_id}?access_token={async_client.access_token}", status=200, payload={}, ) resp = await async_client.update_device(device_id, content) assert isinstance(resp, UpdateDeviceResponse) async def test_get_set_displayname(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in base_url = "https://example.org/_matrix/client/r0" url = f"{base_url}/profile/{async_client.user_id}/displayname?access_token={async_client.access_token}" aioresponse.get(url, status=200, payload=self.get_displayname_response(None)) resp = await async_client.get_displayname() assert isinstance(resp, ProfileGetDisplayNameResponse) assert not resp.displayname aioresponse.put(url, status=200, payload={}) new_name = faker.name() resp2 = await async_client.set_displayname(new_name) assert isinstance(resp2, ProfileSetDisplayNameResponse) aioresponse.get( url, status=200, payload=self.get_displayname_response(new_name) ) resp3 = await async_client.get_displayname() assert isinstance(resp3, ProfileGetDisplayNameResponse) assert resp3.displayname == new_name async def test_get_set_avatar(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.logged_in base_url = "https://example.org/_matrix/client/r0" url = f"{base_url}/profile/{async_client.user_id}/avatar_url?access_token={async_client.access_token}" aioresponse.get(url, status=200, payload=self.get_avatar_response(None)) resp = await async_client.get_avatar() assert isinstance(resp, ProfileGetAvatarResponse) assert not resp.avatar_url aioresponse.put(url, status=200, payload={}) new_avatar = faker.avatar_url().replace("#auto", "") resp2 = await async_client.set_avatar(new_avatar) assert isinstance(resp2, ProfileSetAvatarResponse) aioresponse.get(url, status=200, payload=self.get_avatar_response(new_avatar)) resp3 = await async_client.get_avatar() assert isinstance(resp3, ProfileGetAvatarResponse) assert resp3.avatar_url.replace("#auto", "") == new_avatar async def test_room_resolve_alias(self, async_client, aioresponse): aioresponse.get( "https://example.org/_matrix/client/r0/directory/room/%23test%3Aexample.org", status=200, payload=self.room_resolve_alias_response, ) resp = await async_client.room_resolve_alias("#test:example.org") assert isinstance(resp, RoomResolveAliasResponse) async def test_room_delete_alias(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) aioresponse.delete( f"https://example.org/_matrix/client/r0/directory/room/%23test%3Aexample.org?access_token={async_client.access_token}", status=200, payload={}, ) resp = await async_client.room_delete_alias("#test:example.org") assert isinstance(resp, RoomDeleteAliasResponse) async def test_room_put_alias(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) aioresponse.put( f"https://example.org/_matrix/client/r0/directory/room/%23test%3Aexample.org?access_token={async_client.access_token}", status=200, payload={ "room_id": "!foobar:example.org", }, ) resp = await async_client.room_put_alias( "#test:example.org", "!foobar:example.org" ) assert isinstance(resp, RoomPutAliasResponse) async def test_room_get_visibility(self, async_client, aioresponse): aioresponse.get( "https://example.org/_matrix/client/r0/directory/list/room/!foobar:example.org", status=200, payload={ "room_id": "!foobar:example.org", "visibility": "private", }, ) resp = await async_client.room_get_visibility("!foobar:example.org") assert isinstance(resp, RoomGetVisibilityResponse) async def test_limit_exceeded(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=429, ) aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.limit_exceeded_error_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) got_error = [] async def on_error(resp): assert isinstance(resp, ErrorResponse) expected = None if len(got_error) == 1: expected = self.limit_exceeded_error_response["retry_after_ms"] assert resp.retry_after_ms == expected got_error.append(True) async_client.add_response_callback(on_error, ErrorResponse) resp = await async_client.login("wordpass") assert got_error == [True, True] assert isinstance(resp, LoginResponse) assert async_client.logged_in async def test_max_limit_exceeded(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=429, payload=self.limit_exceeded_error_response, repeat=True, ) async_client.config = AsyncClientConfig(max_limit_exceeded=2) got_error = [] async def on_error(_): got_error.append(True) async_client.add_response_callback(on_error, ErrorResponse) resp = await async_client.login("wordpass") assert got_error == [True, True] assert isinstance(resp, ErrorResponse) assert resp.retry_after_ms assert not async_client.logged_in async def test_timeout(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, timeout=True, ) aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, ) async_client.config = AsyncClientConfig(max_timeouts=3) resp = await async_client.login("wordpass") assert isinstance(resp, LoginResponse) assert async_client.access_token assert async_client.logged_in async def test_max_timeouts(self, async_client, aioresponse): aioresponse.post( "https://example.org/_matrix/client/r0/login", status=200, payload=self.login_response, timeout=True, repeat=True, ) async_client.config = AsyncClientConfig(max_timeouts=3) try: await async_client.login("wordpass") except asyncio.TimeoutError: return raise RuntimeError("Did not get asyncio.TimeoutError") async def test_exponential_backoff(self, async_client): async_client.config = AsyncClientConfig( backoff_factor=0.2, max_timeout_retry_wait_time=30 ) get_time = async_client.get_timeout_retry_wait_time times = [await get_time(retries) for retries in range(1, 12)] assert times == [0.0, 0.4, 0.8, 1.6, 3.2, 6.4, 12.8, 25.6, 30, 30, 30] assert await get_time(999_999_999) == 30 async def test_sync_forever(self, async_client, aioresponse, event_loop): sync_url = re.compile( r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" ) aioresponse.get( sync_url, status=200, payload=self.sync_response, ) aioresponse.get(sync_url, status=200, payload=self.empty_sync, repeat=True) aioresponse.post( "https://example.org/_matrix/client/r0/keys/upload?access_token=abc123", status=200, payload=self.final_keys_upload_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/keys/query?access_token=abc123", status=200, payload=self.keys_query_response, repeat=True, ) await async_client.receive_response( LoginResponse.from_dict(self.login_response) ) assert async_client.should_upload_keys task: asyncio.Task = event_loop.create_task( async_client.sync_forever(loop_sleep_time=100) ) await async_client.synced.wait() assert not async_client.should_upload_keys task.cancel() with pytest.raises(asyncio.CancelledError): await task async def test_session_unwedging(self, async_client_pair, aioresponse): alice, bob = async_client_pair assert alice.logged_in assert bob.logged_in await alice.receive_response( self.synce_response_for(alice.user_id, bob.user_id) ) await bob.receive_response(self.synce_response_for(bob.user_id, alice.user_id)) alice_device = OlmDevice( alice.user_id, alice.device_id, alice.olm.account.identity_keys ) bob_device = OlmDevice( bob.user_id, bob.device_id, bob.olm.account.identity_keys ) alice.olm.device_store.add(bob_device) bob.olm.device_store.add(alice_device) alice_to_share = alice.olm.share_keys() alice_one_time = list(alice_to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } to_device_for_alice = None to_device_for_bob = None sync_url = re.compile( r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" ) bob_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room.encrypted/[0-9a-fA-f-]*\?access_token=bob_1234", ) alice_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room.encrypted/[0-9a-f-A-f-]*\?access_token=alice_1234", ) def alice_to_device_cb(url, data, **kwargs): nonlocal to_device_for_alice to_device_for_alice = json.loads(data) return CallbackResult(status=200, payload={}) def bob_to_device_cb(url, data, **kwargs): nonlocal to_device_for_bob to_device_for_bob = json.loads(data) return CallbackResult(status=200, payload={}) aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) aioresponse.put(bob_to_device_url, callback=alice_to_device_cb, repeat=True) aioresponse.put(alice_to_device_url, callback=bob_to_device_cb, repeat=True) session = alice.olm.session_store.get(bob_device.curve25519) assert not session # Share a group session for the room we're sharing with Alice. # This implicitly claims one-time keys since we don't have an Olm # session with Alice response = await bob.share_group_session(TEST_ROOM_ID, True) assert isinstance(response, ShareGroupSessionResponse) # Check that the group session is indeed marked as shared. group_session = bob.olm.outbound_group_sessions[TEST_ROOM_ID] assert group_session.shared assert to_device_for_alice aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event(to_device_for_alice, alice, bob) ), ) # Run a sync for Alice, the sync will now contain the to-device message # containing the group session. await alice.sync() # Check that an Olm session was created. session = alice.olm.session_store.get(bob_device.curve25519) assert session # Let us pickle our session with bob here so we can later unpickle it # and wedge our session. alice_pickle = session.pickle("") # Check that we successfully received the group session as well. alice_group_session = alice.olm.inbound_group_store.get( TEST_ROOM_ID, bob_device.curve25519, group_session.id ) assert alice_group_session.id == group_session.id # Now let's share a session from alice to bob response = await alice.share_group_session(TEST_ROOM_ID, True) assert isinstance(response, ShareGroupSessionResponse) aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event(to_device_for_bob, bob, alice) ), ) group_session = alice.olm.outbound_group_sessions[TEST_ROOM_ID] assert group_session.shared # Bob syncs and receives a the group session. await bob.sync() bob_group_session = bob.olm.inbound_group_store.get( TEST_ROOM_ID, alice_device.curve25519, group_session.id ) assert bob_group_session.id == group_session.id to_device_for_bob = None # Let us wedge the session now session = alice.olm.session_store.get(bob_device.curve25519) alice.olm.session_store[bob_device.curve25519][0] = Session.from_pickle( alice_pickle, session.creation_time, "", session.use_time ) # Invalidate the current outbound group session alice.invalidate_outbound_session(TEST_ROOM_ID) assert TEST_ROOM_ID not in alice.olm.outbound_group_sessions # Let us try to share a session again. response = await alice.share_group_session(TEST_ROOM_ID, True) assert isinstance(response, ShareGroupSessionResponse) group_session = alice.olm.outbound_group_sessions[TEST_ROOM_ID] assert group_session.shared assert to_device_for_bob # Bob syncs, gets a new Olm message. aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event(to_device_for_bob, bob, alice), "2" ), ) assert not bob.outgoing_to_device_messages assert not bob.should_claim_keys # Set the creation time to be older than an hour, otherwise we will not # be able to unwedge the session. alice_session = bob.olm.session_store.get(alice_device.curve25519) alice_session.creation_time = datetime.now() - timedelta(hours=2) await bob.sync() # Check that bob was unable to decrypt the new group session. bob_group_session = bob.olm.inbound_group_store.get( TEST_ROOM_ID, alice_device.curve25519, group_session.id ) assert not bob_group_session # Check that alice was marked as wedged. assert alice_device in bob.olm.wedged_devices # Bob now needs to create a new Olm session with Alice, to do so he # needs to claim new one-time keys for the wedged devices. # Make sure that we don't reuse the first key. alice_one_time = list(alice_to_share["one_time_keys"].items())[1] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) assert not bob.outgoing_to_device_messages assert bob.should_claim_keys await bob.keys_claim(bob.get_users_for_key_claiming()) # Now that bob created a new session, there should be a to-device # message waiting to be sent out to Alice assert not bob.olm.wedged_devices assert bob.outgoing_to_device_messages to_device_for_alice = None # Let's send out that message. await bob.send_to_device_messages() aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event(to_device_for_alice, alice, bob), "3" ), ) # Take out the wedged session assert len(alice.olm.session_store[bob_device.curve25519]) == 1 wedged_session = alice.olm.session_store.get(bob_device.curve25519) await alice.sync() # Check that there are now two sessions with bob assert len(alice.olm.session_store[bob_device.curve25519]) == 2 # Check that the preferred session isn't the wedged one. new_session = alice.olm.session_store.get(bob_device.curve25519) assert new_session != wedged_session assert new_session.use_time > wedged_session.use_time async def test_key_sharing(self, async_client_pair_same_user, aioresponse): alice, bob = async_client_pair_same_user assert alice.logged_in assert bob.logged_in await alice.receive_response( self.synce_response_for(alice.user_id, bob.user_id) ) await bob.receive_response(self.synce_response_for(bob.user_id, alice.user_id)) alice_to_share = alice.olm.share_keys() alice_one_time = list(alice_to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } to_device_for_alice = None to_device_for_bob = None sync_url = re.compile( r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" ) bob_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room.encrypted/[0-9a-fA-f-]*\?access_token=bob_1234", ) alice_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room[\._][_a-z]+/[0-9a-fA-f-]*\?access_token=alice_1234", ) def alice_to_device_cb(url, data, **kwargs): nonlocal to_device_for_alice to_device_for_alice = json.loads(data) return CallbackResult(status=200, payload={}) def bob_to_device_cb(url, data, **kwargs): nonlocal to_device_for_bob to_device_for_bob = json.loads(data) return CallbackResult(status=200, payload={}) aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) aioresponse.put(bob_to_device_url, callback=alice_to_device_cb, repeat=True) aioresponse.put(alice_to_device_url, callback=bob_to_device_cb, repeat=True) bob_device = alice.device_store[bob.user_id][bob.device_id] session = alice.olm.session_store.get(bob_device.curve25519) assert not session # Share a group session for the room we're sharing with Alice. # This implicitly claims one-time keys since we don't have an Olm # session with Alice response = await bob.share_group_session(TEST_ROOM_ID, True) assert isinstance(response, ShareGroupSessionResponse) # Check that the group session is indeed marked as shared. group_session = bob.olm.outbound_group_sessions[TEST_ROOM_ID] assert group_session.shared assert to_device_for_alice to_device_for_alice = None to_device_for_bob = None # We deliberately don't share the message with alice message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = bob.olm.group_encrypt(TEST_ROOM_ID, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM_ID, } aioresponse.get( sync_url, status=200, payload=self.sync_with_room_event(encrypted_message, "3"), ) response = await alice.sync() assert isinstance(response, SyncResponse) # Alice received the event but wasn't able to decrypt it. event = response.rooms.join[TEST_ROOM_ID].timeline.events[0] assert isinstance(event, MegolmEvent) assert not to_device_for_bob # Let us request the key from bob again. await alice.request_room_key(event) # Check that bob will receive a message. assert to_device_for_bob # The client doesn't for now know how to re-request keys from bob, so # modify the message here. to_device_for_bob = { "messages": { bob.user_id: { bob.device_id: to_device_for_bob["messages"][alice.user_id]["*"] } } } aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.room_key_request" ), "4", ), ) assert not bob.outgoing_to_device_messages # Bob syncs and receives a message. await bob.sync() # The key is now queued up for alice. assert bob.outgoing_to_device_messages assert not to_device_for_alice # Let's send out that message. await bob.send_to_device_messages() assert to_device_for_alice aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event(to_device_for_alice, alice, bob), "5" ), ) # Alice syncs and receives the forwarded key. await alice.sync() # Alice tries to decrypt the previous event again. decrypted_event = alice.decrypt_event(event) assert isinstance(decrypted_event, RoomMessageText) assert decrypted_event.body == "It's a secret to everybody." async def test_sas_verification(self, async_client_pair, aioresponse): alice, bob = async_client_pair assert alice.logged_in assert bob.logged_in await alice.receive_response( self.synce_response_for(alice.user_id, bob.user_id) ) await bob.receive_response(self.synce_response_for(bob.user_id, alice.user_id)) alice_device = OlmDevice( alice.user_id, alice.device_id, alice.olm.account.identity_keys ) bob_device = OlmDevice( bob.user_id, bob.device_id, bob.olm.account.identity_keys ) alice.olm.device_store.add(bob_device) bob.olm.device_store.add(alice_device) alice_to_share = alice.olm.share_keys() alice_one_time = list(alice_to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } to_device_for_alice = None to_device_for_bob = None sync_url = re.compile( r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" ) bob_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.(room|key)[a-z_\.]+/[0-9a-fA-f-]*\?access_token=bob_1234", ) alice_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.(room|key)[a-z_\.]+/[0-9a-fA-f-]*\?access_token=alice_1234", ) def alice_to_device_cb(url, data, **kwargs): nonlocal to_device_for_alice to_device_for_alice = json.loads(data) return CallbackResult(status=200, payload={}) def bob_to_device_cb(url, data, **kwargs): nonlocal to_device_for_bob to_device_for_bob = json.loads(data) return CallbackResult(status=200, payload={}) aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) aioresponse.put(bob_to_device_url, callback=alice_to_device_cb, repeat=True) aioresponse.put(alice_to_device_url, callback=bob_to_device_cb, repeat=True) session = alice.olm.session_store.get(bob_device.curve25519) assert not session # Share a group session for the room we're sharing with Alice. # This implicitly claims one-time keys since we don't have an Olm # session with Alice with pytest.raises(OlmTrustError): await bob.share_group_session(TEST_ROOM_ID) to_device_for_alice = None await bob.start_key_verification(alice_device) assert to_device_for_alice aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_alice, alice, bob, "m.key.verification.start" ), "4", ), ) assert not alice.key_verifications await alice.sync() assert alice.key_verifications assert not to_device_for_bob await alice.accept_key_verification(list(alice.key_verifications.keys())[0]) assert to_device_for_bob aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.key.verification.accept" ), "5", ), ) to_device_for_alice = None assert not bob.outgoing_to_device_messages await bob.sync() assert bob.outgoing_to_device_messages await bob.send_to_device_messages() assert to_device_for_alice aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_alice, alice, bob, "m.key.verification.key" ), "6", ), ) assert not bob.outgoing_to_device_messages await alice.sync() assert alice.outgoing_to_device_messages await alice.send_to_device_messages() aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.key.verification.key" ), "7", ), ) await bob.sync() alice_sas = list(alice.key_verifications.values())[0] bob_sas = list(bob.key_verifications.values())[0] assert alice_sas.get_emoji() == bob_sas.get_emoji() assert not alice_device.verified assert not bob_device.verified await alice.confirm_short_auth_string(alice_sas.transaction_id) aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.key.verification.mac" ), "8", ), ) await bob.sync() await bob.confirm_short_auth_string(bob_sas.transaction_id) aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_alice, alice, bob, "m.key.verification.mac" ), "8", ), ) await alice.sync() assert alice_device.verified assert bob_device.verified await bob.share_group_session(TEST_ROOM_ID) async def test_key_sharing_callbacks( self, async_client_pair_same_user, aioresponse ): alice, bob = async_client_pair_same_user assert alice.logged_in assert bob.logged_in # Key sharing callbacks will only be called for our own users and if a # device isn't trusted. Change the clients user names here. bob.user_id = alice.user_id bob.olm.user_id = alice.user_id await alice.receive_response( self.synce_response_for(alice.user_id, bob.user_id) ) await bob.receive_response(self.synce_response_for(bob.user_id, alice.user_id)) alice_device = OlmDevice( alice.user_id, alice.device_id, alice.olm.account.identity_keys ) bob_device = OlmDevice( bob.user_id, bob.device_id, bob.olm.account.identity_keys ) bob.olm.verify_device(alice_device) alice.olm.verify_device(bob_device) def key_request_cb(event): print(event) bob.verify_device(alice_device) for key_share in bob.get_active_key_requests( event.sender, event.requesting_device_id ): bob.continue_key_share(key_share) bob.add_to_device_callback(key_request_cb, RoomKeyRequest) alice.olm.device_store.add(bob_device) bob.olm.device_store.add(alice_device) alice_to_share = alice.olm.share_keys() alice_one_time = list(alice_to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } to_device_for_alice = None to_device_for_bob = None sync_url = re.compile( r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" ) bob_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room.encrypted/[0-9a-fA-f-]*\?access_token=bob_1234", ) alice_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room[\._][_a-z]+/[0-9a-fA-f-]*\?access_token=alice_1234", ) def alice_to_device_cb(url, data, **kwargs): nonlocal to_device_for_alice to_device_for_alice = json.loads(data) return CallbackResult(status=200, payload={}) def bob_to_device_cb(url, data, **kwargs): nonlocal to_device_for_bob to_device_for_bob = json.loads(data) return CallbackResult(status=200, payload={}) aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) aioresponse.put(bob_to_device_url, callback=alice_to_device_cb, repeat=True) aioresponse.put(alice_to_device_url, callback=bob_to_device_cb, repeat=True) session = alice.olm.session_store.get(bob_device.curve25519) assert not session # Share a group session for the room we're sharing with Alice. # This implicitly claims one-time keys since we don't have an Olm # session with Alice response = await bob.share_group_session(TEST_ROOM_ID, True) assert isinstance(response, ShareGroupSessionResponse) # Check that the group session is indeed marked as shared. group_session = bob.olm.outbound_group_sessions[TEST_ROOM_ID] assert group_session.shared assert to_device_for_alice to_device_for_alice = None to_device_for_bob = None # We deliberately don't share the message with alice message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = bob.olm.group_encrypt(TEST_ROOM_ID, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM_ID, } aioresponse.get( sync_url, status=200, payload=self.sync_with_room_event(encrypted_message, "3"), ) response = await alice.sync() assert isinstance(response, SyncResponse) # Alice received the event but wasn't able to decrypt it. event = response.rooms.join[TEST_ROOM_ID].timeline.events[0] assert isinstance(event, MegolmEvent) assert not to_device_for_bob # Let us request the key from bob again. await alice.request_room_key(event) # Check that bob will receive a message. assert to_device_for_bob # The client doesn't for now know how to re-request keys from bob, so # modify the message here. to_device_for_bob = { "messages": { bob_device.user_id: { bob_device.device_id: to_device_for_bob["messages"][ alice_device.user_id ]["*"] } } } aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.room_key_request" ), "4", ), ) assert not bob.outgoing_to_device_messages # Bob syncs and receives a message. await bob.sync() # The key is now queued up for alice. assert bob.outgoing_to_device_messages assert not to_device_for_alice # Let's send out that message. await bob.send_to_device_messages() assert to_device_for_alice aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event(to_device_for_alice, alice, bob), "5" ), ) # Alice syncs and receives the forwarded key. await alice.sync() # Alice tries to decrypt the previous event again. decrypted_event = alice.decrypt_event(event) assert isinstance(decrypted_event, RoomMessageText) assert decrypted_event.body == "It's a secret to everybody." async def test_key_invalidation(self, async_client_pair, aioresponse): alice, bob = async_client_pair await alice.receive_response( self.synce_response_for(alice.user_id, bob.user_id) ) await bob.receive_response(self.synce_response_for(bob.user_id, alice.user_id)) alice_device = OlmDevice( alice.user_id, alice.device_id, alice.olm.account.identity_keys ) bob_device = OlmDevice( bob.user_id, bob.device_id, bob.olm.account.identity_keys ) alice.olm.device_store.add(bob_device) bob.olm.device_store.add(alice_device) alice_to_share = alice.olm.share_keys() alice_one_time = list(alice_to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } bob_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.(room|key)[a-z_\.]+/[0-9a-fA-f-]*\?access_token=bob_1234", ) aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) aioresponse.put(bob_to_device_url, payload={}, repeat=True) await bob.share_group_session(TEST_ROOM_ID, True) assert TEST_ROOM_ID in bob.olm.outbound_group_sessions bob.unignore_device(alice_device) assert TEST_ROOM_ID not in bob.olm.outbound_group_sessions bob.verify_device(alice_device) await bob.share_group_session(TEST_ROOM_ID) assert TEST_ROOM_ID in bob.olm.outbound_group_sessions bob.unverify_device(alice_device) assert TEST_ROOM_ID not in bob.olm.outbound_group_sessions bob.blacklist_device(alice_device) await bob.share_group_session(TEST_ROOM_ID) assert TEST_ROOM_ID in bob.olm.outbound_group_sessions bob.unblacklist_device(alice_device) assert TEST_ROOM_ID not in bob.olm.outbound_group_sessions bob.ignore_device(alice_device) await bob.share_group_session(TEST_ROOM_ID) assert TEST_ROOM_ID in bob.olm.outbound_group_sessions bob.verify_device(alice_device) assert TEST_ROOM_ID not in bob.olm.outbound_group_sessions async def test_key_sharing_cancellation(self, async_client_pair, aioresponse): alice, bob = async_client_pair alice.user_id = bob.user_id alice.olm.user_id = bob.user_id assert alice.logged_in assert bob.logged_in await alice.receive_response( self.synce_response_for(alice.user_id, bob.user_id) ) await bob.receive_response(self.synce_response_for(bob.user_id, alice.user_id)) alice_device = OlmDevice( alice.user_id, alice.device_id, alice.olm.account.identity_keys ) bob_device = OlmDevice( bob.user_id, bob.device_id, bob.olm.account.identity_keys ) alice.olm.device_store.add(bob_device) bob.olm.device_store.add(alice_device) alice_to_share = alice.olm.share_keys() alice_one_time = list(alice_to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } to_device_for_alice = None to_device_for_bob = None sync_url = re.compile( r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" ) bob_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room.encrypted/[0-9a-fA-f-]*\?access_token=bob_1234", ) alice_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room[\._][_a-z]+/[0-9a-fA-f-]*\?access_token=alice_1234", ) def alice_to_device_cb(url, data, **kwargs): nonlocal to_device_for_alice to_device_for_alice = json.loads(data) return CallbackResult(status=200, payload={}) def bob_to_device_cb(url, data, **kwargs): nonlocal to_device_for_bob to_device_for_bob = json.loads(data) return CallbackResult(status=200, payload={}) aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) aioresponse.put(bob_to_device_url, callback=alice_to_device_cb, repeat=True) aioresponse.put(alice_to_device_url, callback=bob_to_device_cb, repeat=True) session = alice.olm.session_store.get(bob_device.curve25519) assert not session # Share a group session for the room we're sharing with Alice. # This implicitly claims one-time keys since we don't have an Olm # session with Alice response = await bob.share_group_session(TEST_ROOM_ID, True) assert isinstance(response, ShareGroupSessionResponse) # Check that the group session is indeed marked as shared. group_session = bob.olm.outbound_group_sessions[TEST_ROOM_ID] assert group_session.shared assert to_device_for_alice to_device_for_alice = None to_device_for_bob = None # We deliberately don't share the message with alice message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = bob.olm.group_encrypt(TEST_ROOM_ID, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM_ID, } aioresponse.get( sync_url, status=200, payload=self.sync_with_room_event(encrypted_message, "3"), ) bob.invalidate_outbound_session(TEST_ROOM_ID) assert TEST_ROOM_ID not in bob.olm.outbound_group_sessions response = await alice.sync() assert isinstance(response, SyncResponse) # Alice received the event but wasn't able to decrypt it. event = response.rooms.join[TEST_ROOM_ID].timeline.events[0] assert isinstance(event, MegolmEvent) assert not to_device_for_bob # Let us request the key from bob again. await alice.request_room_key(event) # Check that bob will receive a message. assert to_device_for_bob # The client doesn't for now know how to re-request keys from bob, so # modify the message here. to_device_for_bob = { "messages": { bob_device.user_id: { bob_device.device_id: to_device_for_bob["messages"][ alice_device.user_id ]["*"] } } } aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.room_key_request" ), "4", ), ) assert not bob.outgoing_to_device_messages # Bob syncs and receives a message. await bob.sync() assert not bob.outgoing_to_device_messages assert bob.olm.key_request_from_untrusted key_share = bob.get_active_key_requests(alice.user_id, alice.device_id) bob.cancel_key_share(key_share[0]) assert not bob.outgoing_to_device_messages assert not bob.olm.key_request_from_untrusted async def test_sas_verification_cancel(self, async_client_pair, aioresponse): alice, bob = async_client_pair assert alice.logged_in assert bob.logged_in await alice.receive_response( self.synce_response_for(alice.user_id, bob.user_id) ) await bob.receive_response(self.synce_response_for(bob.user_id, alice.user_id)) alice_device = OlmDevice( alice.user_id, alice.device_id, alice.olm.account.identity_keys ) bob_device = OlmDevice( bob.user_id, bob.device_id, bob.olm.account.identity_keys ) alice.olm.device_store.add(bob_device) bob.olm.device_store.add(alice_device) alice_to_share = alice.olm.share_keys() alice_one_time = list(alice_to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } to_device_for_alice = None to_device_for_bob = None sync_url = re.compile( r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" ) bob_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.(room|key)[a-z_\.]+/[0-9a-fA-f-]*\?access_token=bob_1234", ) alice_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.(room|key)[a-z_\.]+/[0-9a-fA-f-]*\?access_token=alice_1234", ) def alice_to_device_cb(url, data, **kwargs): nonlocal to_device_for_alice to_device_for_alice = json.loads(data) return CallbackResult(status=200, payload={}) def bob_to_device_cb(url, data, **kwargs): nonlocal to_device_for_bob to_device_for_bob = json.loads(data) return CallbackResult(status=200, payload={}) aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) aioresponse.put(bob_to_device_url, callback=alice_to_device_cb, repeat=True) aioresponse.put(alice_to_device_url, callback=bob_to_device_cb, repeat=True) session = alice.olm.session_store.get(bob_device.curve25519) assert not session # Share a group session for the room we're sharing with Alice. # This implicitly claims one-time keys since we don't have an Olm # session with Alice with pytest.raises(OlmTrustError): await bob.share_group_session(TEST_ROOM_ID) to_device_for_alice = None await bob.start_key_verification(alice_device) assert to_device_for_alice aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_alice, alice, bob, "m.key.verification.start" ), "4", ), ) assert not alice.key_verifications await alice.sync() assert alice.key_verifications assert not to_device_for_bob await alice.accept_key_verification(list(alice.key_verifications.keys())[0]) assert to_device_for_bob aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.key.verification.accept" ), "5", ), ) to_device_for_alice = None assert not bob.outgoing_to_device_messages await bob.sync() assert bob.outgoing_to_device_messages await bob.send_to_device_messages() assert to_device_for_alice aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_alice, alice, bob, "m.key.verification.key" ), "6", ), ) assert not bob.outgoing_to_device_messages await alice.sync() assert alice.outgoing_to_device_messages await alice.send_to_device_messages() aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.key.verification.key" ), "7", ), ) await bob.sync() alice_sas = list(alice.key_verifications.values())[0] bob_sas = list(bob.key_verifications.values())[0] assert alice_sas.get_emoji() == bob_sas.get_emoji() assert not alice_device.verified assert not bob_device.verified await alice.cancel_key_verification(alice_sas.transaction_id) aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event( to_device_for_bob, bob, alice, "m.key.verification.cancel" ), "8", ), ) await bob.sync() assert not alice_device.verified assert not bob_device.verified assert alice_sas.canceled assert bob_sas.canceled async def test_e2e_sending(self, async_client_pair, aioresponse): alice, bob = async_client_pair assert alice.logged_in assert bob.logged_in await alice.receive_response( self.synce_response_for(alice.user_id, bob.user_id) ) await bob.receive_response(self.synce_response_for(bob.user_id, alice.user_id)) cb_ran = False def alice_event_cb(room, event): nonlocal cb_ran cb_ran = True assert isinstance(event, RoomMessageText) assert event.body == "It's a secret to everybody." alice.add_event_callback(alice_event_cb, (RoomMessageText, MegolmEvent)) alice_device = OlmDevice( alice.user_id, alice.device_id, alice.olm.account.identity_keys ) bob_device = OlmDevice( bob.user_id, bob.device_id, bob.olm.account.identity_keys ) alice.olm.device_store.add(bob_device) bob.olm.device_store.add(alice_device) alice_to_share = alice.olm.share_keys() alice_one_time = list(alice_to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {alice_one_time[0]: alice_one_time[1]}, }, }, "failures": {}, } to_device_for_alice = None to_device_for_bob = None room_event_for_alice = None sync_url = re.compile( r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" ) bob_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room.encrypted/[0-9a-fA-f-]*\?access_token=bob_1234", ) alice_to_device_url = re.compile( r"https://example\.org/_matrix/client/r0/sendToDevice/m\.room\.encrypted/[0-9]\?access_token=alice_1234", ) bob_room_send_url = re.compile( rf"https://example\.org/_matrix/client/r0/rooms/{TEST_ROOM_ID}/" rf"send/m\.room\.encrypted/[0-9]\?access_token=bob_1234", ) def alice_to_device_cb(url, data, **kwargs): nonlocal to_device_for_alice to_device_for_alice = json.loads(data) return CallbackResult(status=200, payload={}) def bob_to_device_cb(url, data, **kwargs): nonlocal to_device_for_bob to_device_for_bob = json.loads(data) return CallbackResult(status=200, payload={}) def alice_room_send_cb(url, data, **kwargs): nonlocal room_event_for_alice room_event_for_alice = json.loads(data) return CallbackResult(status=200, payload={}) aioresponse.get( f"https://example.org/_matrix/client/r0/rooms/{TEST_ROOM_ID}/" f"joined_members?access_token=bob_1234", status=200, payload=self.joined_members_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/keys/query?" "access_token=bob_1234", status=200, payload=self.keys_query_response, ) aioresponse.post( "https://example.org/_matrix/client/r0/keys/claim?access_token=bob_1234", status=200, payload=key_claim_dict, ) aioresponse.put(bob_to_device_url, callback=alice_to_device_cb, repeat=True) aioresponse.put(alice_to_device_url, callback=bob_to_device_cb, repeat=True) aioresponse.put(bob_room_send_url, callback=alice_room_send_cb, repeat=True) session = alice.olm.session_store.get(bob_device.curve25519) assert not session await bob.room_send( TEST_ROOM_ID, "m.room.message", {"msgtype": "m.text", "body": "It's a secret to everybody."}, "1", ignore_unverified_devices=True, ) group_session = bob.olm.outbound_group_sessions[TEST_ROOM_ID] assert group_session.shared assert to_device_for_alice aioresponse.get( sync_url, status=200, payload=self.sync_with_to_device_events( self.olm_message_to_event(to_device_for_alice, alice, bob) ), ) # Run a sync for Alice, the sync will now contain the to-device message # containing the group session. await alice.sync() # Check that an Olm session was created. session = alice.olm.session_store.get(bob_device.curve25519) assert session # Check that we successfully received the group session as well. alice_group_session = alice.olm.inbound_group_store.get( TEST_ROOM_ID, bob_device.curve25519, group_session.id ) assert alice_group_session.id == group_session.id encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": room_event_for_alice, "room_id": TEST_ROOM_ID, } aioresponse.get( sync_url, status=200, payload=self.sync_with_room_event(encrypted_message, "3"), ) response = await alice.sync() assert isinstance(response, SyncResponse) # Alice received the event but wasn't able to decrypt it. event = response.rooms.join[TEST_ROOM_ID].timeline.events[0] assert isinstance(event, RoomMessageText) assert event.body == "It's a secret to everybody." assert cb_ran async def test_connect_wrapper(self, async_client, aioresponse): domain = "https://example.org" aioresponse.post( f"{domain}/_matrix/client/r0/login", status=200, payload=self.login_response ) await async_client.login("wordpass") assert async_client.client_session conn = await connect_wrapper( self=async_client.client_session.connector, req=ClientRequest(method="GET", url=URL(domain)), traces=[], timeout=ClientTimeout(), ) # Python 3.9 fixes [a bug](https://github.com/python/cpython/issues/90645) for correctly accessing buffer limits # from SSL transport ssl_transport = ( conn.transport if sys.version_info[0:2] >= (3, 9) else conn.transport._ssl_protocol._transport ) assert ssl_transport.get_write_buffer_limits() == (4 * 1024, 16 * 1024) async def test_upload_filter(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in aioresponse.post( f"https://example.org/_matrix/client/r0/user/" f"{async_client.user_id}/filter?access_token=abc123", status=200, payload={"filter_id": "abc123"}, ) resp = await async_client.upload_filter( event_fields=["content.body"], event_format=EventFormat.federation, room={"timeline": {"limit": 1}}, ) assert isinstance(resp, UploadFilterResponse) assert resp.filter_id == "abc123" async def test_global_account_data_cb(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in class CallbackCalled(Exception): pass async def cb(_event): raise CallbackCalled async_client.add_global_account_data_callback(cb, PushRulesEvent) aioresponse.get( "https://example.org/_matrix/client/r0/sync?access_token=abc123", status=200, payload=self.sync_response, ) with pytest.raises(CallbackCalled): await async_client.sync() async def test_set_pushrule(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in override = ("global", PushRuleKind.override, "foo") content = ("global", PushRuleKind.content, "bar") # Ensure before and after can't be specified together with pytest.raises(TypeError): await async_client.set_pushrule(*override, before="x", after="y") # Test before + override with condition aioresponse.put( "https://example.org/_matrix/client/r0/pushrules/" "global/override/foo?access_token=abc123&before=ov1", body={ "actions": [], "conditions": [{"kind": "contains_display_name"}], }, status=200, payload={}, ) resp = await async_client.set_pushrule( *override, before="ov1", conditions=[PushContainsDisplayName()], ) assert isinstance(resp, SetPushRuleResponse) # Test after + override with action aioresponse.put( "https://example.org/_matrix/client/r0/pushrules/" "global/override/foo?access_token=abc123&after=ov1", body={"actions": ["notify"], "conditions": []}, status=200, payload={}, ) resp = await async_client.set_pushrule( *override, after="ov1", actions=[PushNotify()], conditions=[], ) assert isinstance(resp, SetPushRuleResponse) # Ensure conditions can't be specified with non-override/underride rule with pytest.raises(TypeError): await async_client.set_pushrule(*content, conditions=()) # Ensure pattern can't be specified with non-content rule with pytest.raises(TypeError): await async_client.set_pushrule(*override, pattern="notContent!") # Test content pattern rule aioresponse.put( "https://example.org/_matrix/client/r0/pushrules/" "global/content/bar?access_token=abc123", body={"actions": [], "pattern": "foo*bar"}, status=200, payload={}, ) resp = await async_client.set_pushrule(*content, pattern="foo*bar") assert isinstance(resp, SetPushRuleResponse) async def test_delete_pushrule(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in aioresponse.delete( "https://example.org/_matrix/client/r0/pushrules/" "global/override/foo?access_token=abc123", status=200, payload={}, ) resp = await async_client.delete_pushrule( "global", PushRuleKind.override, "foo", ) assert isinstance(resp, DeletePushRuleResponse) async def test_enable_pushrule(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in aioresponse.put( "https://example.org/_matrix/client/r0/pushrules/" "global/override/foo/enabled?access_token=abc123", body={"enabled": True}, status=200, payload={}, ) resp = await async_client.enable_pushrule( "global", PushRuleKind.override, "foo", enable=True, ) assert isinstance(resp, EnablePushRuleResponse) async def test_set_pushrule_actions(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in aioresponse.put( "https://example.org/_matrix/client/r0/pushrules/" "global/override/foo/actions?access_token=abc123", body={"actions": [{"set_tweak": "highlight", "value": True}]}, status=200, payload={}, ) tweak = PushSetTweak("highlight", True) resp = await async_client.set_pushrule_actions( "global", PushRuleKind.override, "foo", [tweak], ) assert isinstance(resp, SetPushRuleActionsResponse) async def test_async_mockable(self): mock = AsyncMock(spec=AsyncClient) assert asyncio.iscoroutinefunction( mock.room_send ), "logged_in method should be awaitable" assert not asyncio.iscoroutinefunction( mock.restore_login ), "not logged_in method should not be awaitable" async def test_space_get_hierarchy(self, async_client, aioresponse): await async_client.receive_response( LoginResponse.from_dict(self.login_response), ) assert async_client.logged_in base_url = "https://example.org/_matrix/client/v1" aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/hierarchy?access_token=abc123", status=200, payload=self.hierarchy_response, ) resp = await async_client.space_get_hierarchy(TEST_ROOM_ID) assert isinstance(resp, SpaceGetHierarchyResponse) assert isinstance(resp.rooms, list) aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/hierarchy?access_token=abc123", status=403, payload={ "errcode": "M_FORBIDDEN", "error": "You are not allowed to view this room.", }, ) resp = await async_client.space_get_hierarchy(TEST_ROOM_ID) assert isinstance(resp, SpaceGetHierarchyError) aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/hierarchy?access_token=abc123&from=invalid", status=400, payload={ "errcode": "M_INVALID_PARAM", "error": "suggested_only and max_depth cannot change on paginated requests", }, ) resp = await async_client.space_get_hierarchy(TEST_ROOM_ID, from_page="invalid") assert isinstance(resp, SpaceGetHierarchyError) async_client.config = AsyncClientConfig(max_limit_exceeded=0) aioresponse.get( f"{base_url}/rooms/{TEST_ROOM_ID}/hierarchy?access_token=abc123", status=429, payload={ "errcode": "M_LIMIT_EXCEEDED", "error": "Too many requests", "retry_after_ms": 1, }, repeat=True, ) resp = await async_client.space_get_hierarchy(TEST_ROOM_ID) assert isinstance(resp, SpaceGetHierarchyError) matrix-nio-0.24.0/tests/attachment_test.py000066400000000000000000000045271455215747700206200ustar00rootroot00000000000000import pytest import unpaddedbase64 from Crypto import Random from nio import EncryptionError from nio.crypto import decrypt_attachment, encrypt_attachment class TestClass: def test_encrypt(self): data = b"Test bytes" ciphertext, keys = encrypt_attachment(data) plaintext = decrypt_attachment( ciphertext, keys["key"]["k"], keys["hashes"]["sha256"], keys["iv"] ) assert data == plaintext def test_hash_verification(self): data = b"Test bytes" ciphertext, keys = encrypt_attachment(data) with pytest.raises(EncryptionError): decrypt_attachment(ciphertext, keys["key"]["k"], "Fake hash", keys["iv"]) def test_invalid_key(self): data = b"Test bytes" ciphertext, keys = encrypt_attachment(data) with pytest.raises(EncryptionError): decrypt_attachment( ciphertext, "Fake key", keys["hashes"]["sha256"], keys["iv"] ) def test_invalid_iv(self): data = b"Test bytes" ciphertext, keys = encrypt_attachment(data) with pytest.raises(EncryptionError): decrypt_attachment( ciphertext, keys["key"]["k"], keys["hashes"]["sha256"], "Fake iv" ) def test_short_key(self): data = b"Test bytes" ciphertext, keys = encrypt_attachment(data) with pytest.raises(EncryptionError): decrypt_attachment( ciphertext, unpaddedbase64.encode_base64(b"Fake key", urlsafe=True), keys["hashes"]["sha256"], keys["iv"], ) def test_short_iv(self): data = b"Test bytes" ciphertext, keys = encrypt_attachment(data) plaintext = decrypt_attachment( ciphertext, keys["key"]["k"], keys["hashes"]["sha256"], unpaddedbase64.encode_base64(b"F" + b"\x00" * 8), ) assert plaintext != data def test_fake_key(self): data = b"Test bytes" ciphertext, keys = encrypt_attachment(data) fake_key = Random.new().read(32) plaintext = decrypt_attachment( ciphertext, unpaddedbase64.encode_base64(fake_key, urlsafe=True), keys["hashes"]["sha256"], keys["iv"], ) assert plaintext != data matrix-nio-0.24.0/tests/client_test.py000066400000000000000000001223701455215747700177430ustar00rootroot00000000000000import json import random from uuid import uuid4 import pytest from helpers import FrameFactory, ephemeral, ephemeral_dir, faker from nio import ( Client, ClientConfig, DeviceList, DeviceOneTimeKeyCount, DownloadResponse, EncryptionError, FullyReadEvent, HttpClient, InviteInfo, InviteMemberEvent, JoinedMembersResponse, KeysQueryResponse, KeysUploadResponse, LocalProtocolError, LoginResponse, LogoutResponse, MegolmEvent, PresenceEvent, ProfileGetAvatarResponse, ProfileGetDisplayNameResponse, ProfileGetResponse, ProfileSetAvatarResponse, ProfileSetDisplayNameResponse, PushRulesEvent, Receipt, ReceiptEvent, RoomCreateResponse, RoomEncryptionEvent, RoomForgetResponse, RoomInfo, RoomKeyRequestResponse, RoomMember, RoomMemberEvent, RoomRedactResponse, Rooms, RoomSummary, RoomTypingResponse, ShareGroupSessionResponse, SyncResponse, TagEvent, ThumbnailResponse, Timeline, TransportType, TypingNoticeEvent, ) from nio.event_builders import ToDeviceMessage HOST = "example.org" USER = "example" DEVICE_ID = "DEVICEID" BOB_ID = "@bob:example.org" TEST_ROOM_ID = "!testroom:example.org" TEST_EVENT_ID = "$15163622445EBvZJ:localhost" ALICE_ID = "@alice:example.org" ALICE_DEVICE_ID = "JLAFKJWSCS" CAROL_ID = "@carol:example.org" @pytest.fixture def synced_client(tempdir): http_client = HttpClient("example.org", "ephemeral", "DEVICEID", tempdir) http_client.connect(TransportType.HTTP2) http_client.login("1234") http_client.receive(TestClass().login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" http_client.sync() http_client.receive(TestClass().sync_byte_response) response = http_client.next_response() assert isinstance(response, SyncResponse) assert http_client.access_token == "ABCD" return http_client class TestClass: example_response_headers = [(":status", "200"), ("server", "fake-serv/0.1.0")] @property def login_response(self): return LoginResponse("@ephemeral:example.org", "DEVICEID", "abc123") @property def logout_response(self): return LogoutResponse() @staticmethod def _load_response(filename): with open(filename) as f: return json.loads(f.read()) @staticmethod def _load_byte_response(filename): with open(filename, "rb") as f: return f.read() @property def login_byte_response(self): frame_factory = FrameFactory() f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=1 ) login_body = json.dumps( { "user_id": "@ephemeral:example.org", "access_token": "ABCD", "device_id": "DEVICEID", } ).encode("utf-8") data = frame_factory.build_data_frame( data=login_body, stream_id=1, flags=["END_STREAM"] ) return f.serialize() + data.serialize() @property def sync_byte_response(self): frame_factory = FrameFactory() f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=3 ) body = self._load_byte_response("tests/data/sync.json") data = frame_factory.build_data_frame( data=body, stream_id=3, flags=["END_STREAM"] ) return f.serialize() + data.serialize() def file_byte_response(self, stream_id=5, header_filename=""): frame_factory = FrameFactory() headers = self.example_response_headers + [("content-type", "image/png")] if header_filename: headers.append( ( "content-disposition", f'inline; filename="{header_filename}"', ), ) f = frame_factory.build_headers_frame(headers=headers, stream_id=stream_id) body = self._load_byte_response("tests/data/file_response") data = frame_factory.build_data_frame( data=body, stream_id=stream_id, flags=["END_STREAM"] ) return f.serialize() + data.serialize() def empty_response(self, stream_id=5): frame_factory = FrameFactory() f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=stream_id ) body = b"{}" data = frame_factory.build_data_frame( data=body, stream_id=stream_id, flags=["END_STREAM"] ) return f.serialize() + data.serialize() def room_id_response(self, stream_id=5, room_id=TEST_ROOM_ID): frame_factory = FrameFactory() f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=stream_id ) body = json.dumps({"room_id": room_id}).encode() data = frame_factory.build_data_frame( data=body, stream_id=stream_id, flags=["END_STREAM"] ) return f.serialize() + data.serialize() def event_id_response(self, stream_id=5, event_id=TEST_EVENT_ID): frame_factory = FrameFactory() f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=stream_id ) body = json.dumps({"event_id": event_id}).encode() data = frame_factory.build_data_frame( data=body, stream_id=stream_id, flags=["END_STREAM"], ) return f.serialize() + data.serialize() def get_displayname_byte_response(self, displayname, stream_id=5): frame_factory = FrameFactory() f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=stream_id ) body = json.dumps({"displayname": displayname}).encode("utf-8") data = frame_factory.build_data_frame( data=body, stream_id=stream_id, flags=["END_STREAM"] ) return f.serialize() + data.serialize() def get_avatar_byte_response(self, avatar_url, stream_id=5): frame_factory = FrameFactory() f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=stream_id ) body = json.dumps({"avatar_url": avatar_url}).encode("utf-8") data = frame_factory.build_data_frame( data=body, stream_id=stream_id, flags=["END_STREAM"] ) return f.serialize() + data.serialize() def get_profile_byte_response(self, displayname, avatar_url, stream_id=5): frame_factory = FrameFactory() f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=stream_id ) body = json.dumps( {"displayname": displayname, "avatar_url": avatar_url} ).encode("utf-8") data = frame_factory.build_data_frame( data=body, stream_id=stream_id, flags=["END_STREAM"] ) return f.serialize() + data.serialize() @property def sync_response(self): timeline = Timeline( [ RoomMemberEvent( { "event_id": "event_id_1", "sender": ALICE_ID, "origin_server_ts": 1516809890615, }, ALICE_ID, "join", None, {"membership": "join"}, ), RoomMemberEvent( { "event_id": "event_id_2", "sender": ALICE_ID, "origin_server_ts": 1516809890615, }, CAROL_ID, "invite", None, {"membership": "invite"}, ), RoomEncryptionEvent( { "event_id": "event_id_3", "sender": ALICE_ID, "origin_server_ts": 1516809890615, } ), ], False, "prev_batch_token", ) test_room_info = RoomInfo( timeline=timeline, state=[], ephemeral=[ TypingNoticeEvent([ALICE_ID]), ReceiptEvent( [ Receipt( event_id="event_id_3", receipt_type="m.read", user_id=ALICE_ID, timestamp=1516809890615, ) ] ), ], account_data=[ FullyReadEvent(event_id="event_id_2"), TagEvent(tags={"u.test": {"order": 1}}), ], summary=RoomSummary( invited_member_count=1, joined_member_count=2, ), ) rooms = Rooms(invite={}, join={TEST_ROOM_ID: test_room_info}, leave={}) return SyncResponse( next_batch="token123", rooms=rooms, device_key_count=DeviceOneTimeKeyCount(49, 50), device_list=DeviceList([ALICE_ID], []), to_device_events=[ RoomEncryptionEvent( { "event_id": "event_id_2", "sender": ALICE_ID, "origin_server_ts": 1516809890615, } ) ], presence_events=[ PresenceEvent(ALICE_ID, "online", 1337, True, "I am here.") ], account_data_events=[ PushRulesEvent(), ], ) @property def sync_invite_response(self): state = [ InviteMemberEvent( {}, "@BOB:example.org", ALICE_ID, "invite", None, { "membership": "invite", "display_name": None, }, ) ] test_room_info = InviteInfo(state) rooms = Rooms({TEST_ROOM_ID: test_room_info}, {}, {}) return SyncResponse( "token123", rooms, DeviceOneTimeKeyCount(49, 50), DeviceList([ALICE_ID], []), [], [], ) @property def downgrade_sync(self): timeline = Timeline( [ RoomMemberEvent( { "event_id": "event_id_1", "sender": ALICE_ID, "origin_server_ts": 1516809890615, }, ALICE_ID, "join", None, {"membership": "join"}, ), ], False, "prev_batch_token", ) test_room_info = RoomInfo(timeline, [], [], [], RoomSummary(1, 2, [])) rooms = Rooms({}, {TEST_ROOM_ID: test_room_info}, {}) return SyncResponse( "token123", rooms, DeviceOneTimeKeyCount(49, 50), DeviceList([ALICE_ID], []), [], [], ) @property def second_sync(self): timeline = Timeline( [ RoomMemberEvent( { "event_id": "event_id_1", "sender": ALICE_ID, "origin_server_ts": 1516809890615, }, ALICE_ID, "join", None, {"membership": "join"}, ), RoomEncryptionEvent( { "event_id": "event_id_2", "sender": ALICE_ID, "origin_server_ts": 1516809890615, } ), ], True, "prev_batch_token", ) test_room_info = RoomInfo(timeline, [], [], [], RoomSummary(1, 2, [])) rooms = Rooms({}, {TEST_ROOM_ID: test_room_info}, {}) return SyncResponse( "token123", rooms, DeviceOneTimeKeyCount(49, 50), DeviceList([], []), [], [] ) @property def keys_query_response(self): parsed_dict = TestClass._load_response("tests/data/keys_query.json") return KeysQueryResponse.from_dict(parsed_dict) @property def joined_members(self): return JoinedMembersResponse( [ RoomMember(BOB_ID, None, None), # joined RoomMember(ALICE_ID, None, None), # joined RoomMember(CAROL_ID, None, None), # invited ], TEST_ROOM_ID, ) def test_client_protocol_error(self): client = Client(USER, DEVICE_ID) with pytest.raises(LocalProtocolError): client.olm_account_shared with pytest.raises(LocalProtocolError): client.blacklist_device(faker.olm_device()) with pytest.raises(LocalProtocolError): client.unblacklist_device(faker.olm_device()) with pytest.raises(LocalProtocolError): client.verify_device(faker.olm_device()) with pytest.raises(LocalProtocolError): client.unverify_device(faker.olm_device()) with pytest.raises(LocalProtocolError): client.decrypt_event(None) with pytest.raises(LocalProtocolError): client.decrypt_event(None) with pytest.raises(LocalProtocolError): client.device_store client = HttpClient(HOST, USER, DEVICE_ID) with pytest.raises(LocalProtocolError): client.share_group_session(None) with pytest.raises(LocalProtocolError): client.keys_claim(None) with pytest.raises(LocalProtocolError): client.keys_query(None) def test_client_create(self, client): assert isinstance(client, Client) assert not client.store def test_client_invalid_response(self, client): with pytest.raises(ValueError, match="Invalid response received"): client.receive_response(None) def test_client_login(self, client): assert not client.access_token assert not client.store assert not client.olm client.receive_response(self.login_response) assert client.access_token assert client.store assert client.olm def test_client_restore_login(self, tempdir): client = Client(BOB_ID, store_path=tempdir) assert not client.user_id assert not client.device_id assert not client.access_token assert not client.store assert not client.olm client.restore_login(BOB_ID, DEVICE_ID, "ABCD") assert client.user_id assert client.device_id assert client.access_token assert client.store assert client.olm def test_client_logout(self, client): client.receive_response(self.login_response) assert client.access_token client.receive_response(self.logout_response) assert client.access_token == "" def test_client_account_sharing(self, client): client.receive_response(self.login_response) with pytest.raises( ValueError, match="Invalid event, this function can only decrypt MegolmEvents", ): client.decrypt_event(None) assert not client.olm_account_shared assert client.should_upload_keys assert client.device_store client.receive_response(KeysUploadResponse(49, 49)) assert client.should_upload_keys client.receive_response(KeysUploadResponse(50, 50)) assert not client.should_upload_keys def test_client_room_creation(self, client): client.receive_response(self.login_response) client.receive_response(KeysUploadResponse(50, 50)) assert not client.should_query_keys client.receive_response(self.sync_response) assert client.rooms[TEST_ROOM_ID] room = client.rooms[TEST_ROOM_ID] assert room.encrypted assert client.should_query_keys def test_device_store(self, tempdir): client = Client("ephemeral", "DEVICEID", tempdir) client.receive_response(self.login_response) client.receive_response(KeysUploadResponse(50, 50)) assert not client.should_query_keys client.receive_response(self.sync_response) client.receive_response(self.keys_query_response) assert list(client.device_store.users) == [ALICE_ID, CAROL_ID] alice_device = client.device_store[ALICE_ID][ALICE_DEVICE_ID] assert alice_device client = Client("ephemeral", "DEVICEID", tempdir) client.receive_response(self.login_response) assert list(client.device_store.users) == [ALICE_ID] alice_device = client.device_store[ALICE_ID][ALICE_DEVICE_ID] assert alice_device def test_client_key_query(self, client): assert not client.should_query_keys client.receive_response(self.login_response) client.receive_response(KeysUploadResponse(50, 50)) assert not client.should_query_keys client.receive_response(self.sync_response) assert not client.device_store.users assert client.rooms[TEST_ROOM_ID] room = client.rooms[TEST_ROOM_ID] assert room.encrypted assert room.summary assert len(room.users) == 2 assert room.invited_count == 1 assert room.joined_count == 2 assert room.member_count == 3 assert room.summary.invited_member_count == 1 assert room.summary.joined_member_count == 2 assert client.should_query_keys assert not client.device_store.users client.receive_response(self.keys_query_response) assert not client.should_query_keys assert client.device_store.users assert not room.members_synced client.receive_response(self.joined_members) assert room.members_synced assert client.should_query_keys assert client.users_for_key_query == {BOB_ID} @ephemeral def test_query_rule(self): client = Client("ephemeral", "DEVICEID", ephemeral_dir) client.receive_response(self.login_response) assert client.store is not None client.receive_response(KeysUploadResponse(50, 50)) assert not client.should_query_keys client.receive_response(self.sync_response) assert client.should_query_keys client.receive_response(self.keys_query_response) assert client.olm.tracked_users == {ALICE_ID, CAROL_ID} assert list(client.device_store.users) == [ALICE_ID, CAROL_ID] assert not client.should_query_keys del client client = Client("ephemeral", "DEVICEID", ephemeral_dir) client.receive_response(self.login_response) assert not client.should_upload_keys assert not client.should_query_keys assert list(client.device_store.users) == [ALICE_ID] assert client.device_store.active_user_devices(ALICE_ID) alice_device = client.device_store[ALICE_ID][ALICE_DEVICE_ID] assert alice_device client.receive_response(self.second_sync) assert client.should_query_keys client.users_for_key_query == {ALICE_ID} client.receive_response(self.joined_members) client.users_for_key_query == {ALICE_ID, BOB_ID} client.receive_response(self.keys_query_response) assert client.olm.tracked_users == {ALICE_ID, CAROL_ID} assert client.users_for_key_query == {BOB_ID} assert client.should_query_keys @ephemeral def test_early_store_loading(self): client = Client("ephemeral") with pytest.raises(LocalProtocolError): client.load_store() client = Client("ephemeral", store_path=ephemeral_dir) client.user_id = "@ephemeral:example.org" with pytest.raises(LocalProtocolError): client.load_store() client.user_id = None client.device_id = "DEVICEID" with pytest.raises(LocalProtocolError): client.load_store() client.receive_response(self.login_response) del client client = Client("ephemeral", "DEVICEID", ephemeral_dir) client.user_id = "@ephemeral:example.org" assert not client.store assert not client.olm client.load_store() assert client.store assert client.olm def test_marking_sessions_as_shared(self, client): client.receive_response(self.login_response) client.receive_response(self.sync_response) client.receive_response(self.joined_members) client.receive_response(self.keys_query_response) room = client.rooms[TEST_ROOM_ID] assert room.encrypted assert len(room.users) == 3 assert ALICE_ID in client.device_store.users assert BOB_ID not in client.device_store.users with pytest.raises(EncryptionError): client.olm.share_group_session(TEST_ROOM_ID, room.users) shared_with, to_device = client.olm.share_group_session( TEST_ROOM_ID, room.users, True ) session = client.olm.outbound_group_sessions[TEST_ROOM_ID] assert (ALICE_ID, ALICE_DEVICE_ID) in session.users_ignored response = ShareGroupSessionResponse.from_dict({}, TEST_ROOM_ID, set()) client.receive_response(response) assert session.shared def test_storing_room_encryption_state(self, client): client.receive_response(self.login_response) assert not client.encrypted_rooms client.receive_response(self.sync_response) assert TEST_ROOM_ID in client.encrypted_rooms encrypted_rooms = client.store.load_encrypted_rooms() assert TEST_ROOM_ID in encrypted_rooms client2 = Client(client.user, client.device_id, client.store_path) client2.receive_response(self.login_response) assert TEST_ROOM_ID in client2.encrypted_rooms client2.receive_response(self.downgrade_sync) room = client2.rooms[TEST_ROOM_ID] assert room.encrypted def test_http_client_login(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.login("1234") http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" def test_http_client_login_raw(self, http_client): http_client.connect(TransportType.HTTP2) auth_dict = { "type": "m.login.password", "identifier": { "type": "m.id.thirdparty", "medium": "email", "address": "testemail@mail.org", }, "password": "PASSWORDABCD", "initial_device_display_name": "Citadel bot", } _, _ = http_client.login_raw(auth_dict) http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" def test_http_client_login_raw_with_empty_dict(self, http_client): http_client.connect(TransportType.HTTP2) auth_dict = {} with pytest.raises(ValueError, match="Auth dictionary shall not be empty"): _, _ = http_client.login_raw(auth_dict) assert not http_client.access_token == "ABCD" def test_http_client_login_raw_with_none_dict(self, http_client): http_client.connect(TransportType.HTTP2) auth_dict = None with pytest.raises(ValueError, match="Auth dictionary shall not be empty"): _, _ = http_client.login_raw(auth_dict) assert not http_client.access_token == "ABCD" def test_http_client_sync(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.login("1234") http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" _, _ = http_client.sync() http_client.receive(self.sync_byte_response) response = http_client.next_response() assert isinstance(response, SyncResponse) assert http_client.access_token == "ABCD" def test_http_client_keys_query(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.login("1234") http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" _, _ = http_client.sync() http_client.receive(self.sync_byte_response) response = http_client.next_response() assert isinstance(response, SyncResponse) assert http_client.access_token == "ABCD" event = MegolmEvent.from_dict( self._load_response("tests/data/events/megolm.json") ) http_client.request_room_key(event) http_client.receive(self.empty_response(5)) response = http_client.next_response() assert isinstance(response, RoomKeyRequestResponse) assert ( "X3lUlvLELLYxeTx4yOVu6UDpasGEVO0Jbu+QFnm0cKQ" in http_client.outgoing_key_requests ) def test_http_client_room_create(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.login("1234") http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" _, _ = http_client.sync() http_client.receive(self.sync_byte_response) response = http_client.next_response() assert isinstance(response, SyncResponse) assert http_client.access_token == "ABCD" _, _ = http_client.room_create() http_client.receive(self.room_id_response(5)) response = http_client.next_response() assert isinstance(response, RoomCreateResponse) assert response.room_id == TEST_ROOM_ID def test_http_client_room_forget(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.login("1234") http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" _, _ = http_client.sync() http_client.receive(self.sync_byte_response) response = http_client.next_response() assert isinstance(response, SyncResponse) assert http_client.access_token == "ABCD" room_id = next(iter(http_client.rooms)) _, _ = http_client.room_forget(room_id) http_client.receive(self.empty_response(5)) response = http_client.next_response() assert isinstance(response, RoomForgetResponse) def test_http_client_room_redact(self, synced_client): room_id = next(iter(synced_client.rooms)) event_id = "$15163622445EBvZJ:localhost" tx_id = uuid4() reason = "for no reason" synced_client.room_redact(room_id, event_id, reason, tx_id) synced_client.receive(self.event_id_response(5)) response = synced_client.next_response() assert isinstance(response, RoomRedactResponse) def test_http_client_room_typing(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.login("1234") http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" _, _ = http_client.sync() http_client.receive(self.sync_byte_response) response = http_client.next_response() assert isinstance(response, SyncResponse) assert http_client.access_token == "ABCD" assert http_client.rooms room_id = list(http_client.rooms.keys())[0] _, _ = http_client.room_typing(room_id, typing_state=False) http_client.receive(self.empty_response(5)) response = http_client.next_response() assert isinstance(response, RoomTypingResponse) def test_http_client_download(self, http_client): http_client.connect(TransportType.HTTP2) server_name = "example.og" media_id = ("ascERGshawAWawugaAcauga",) filename = "example&.png" # has unsafe character to test % encoding _, _ = http_client.download(server_name, media_id, allow_remote=False) http_client.receive(self.file_byte_response(1)) response = http_client.next_response() assert isinstance(response, DownloadResponse) assert response.body == self._load_byte_response("tests/data/file_response") assert response.content_type == "image/png" assert response.filename is None _, _ = http_client.download(server_name, media_id, filename) http_client.receive(self.file_byte_response(3, filename)) response = http_client.next_response() assert isinstance(response, DownloadResponse) assert response.body == self._load_byte_response("tests/data/file_response") assert response.content_type == "image/png" assert response.filename == filename def test_http_client_thumbnail(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.thumbnail( "example.org", "ascERGshawAWawugaAcauga", 32, 32, allow_remote=False ) http_client.receive(self.file_byte_response(1)) response = http_client.next_response() assert isinstance(response, ThumbnailResponse) assert response.body == self._load_byte_response("tests/data/file_response") assert response.content_type == "image/png" def test_http_client_get_profile(self, http_client: HttpClient): http_client.connect(TransportType.HTTP2) name = faker.name() avatar = faker.avatar_url().replace("#auto", "") http_client.user_id = ALICE_ID _, _ = http_client.get_profile() http_client.receive(self.get_profile_byte_response(name, avatar, 1)) response = http_client.next_response() assert isinstance(response, ProfileGetResponse) assert response.displayname == name assert response.avatar_url.replace("#auto", "") == avatar def test_http_client_get_set_displayname(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.login("1234") http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" _, _ = http_client.sync() http_client.receive(self.sync_byte_response) response = http_client.next_response() assert isinstance(response, SyncResponse) assert http_client.access_token == "ABCD" _, _ = http_client.get_displayname() http_client.receive(self.get_displayname_byte_response(None, 5)) response = http_client.next_response() assert isinstance(response, ProfileGetDisplayNameResponse) assert not response.displayname new_name = faker.name() _, _ = http_client.set_displayname(new_name) http_client.receive(self.empty_response(7)) response = http_client.next_response() assert isinstance(response, ProfileSetDisplayNameResponse) _, _ = http_client.get_displayname() http_client.receive(self.get_displayname_byte_response(new_name, 9)) response = http_client.next_response() assert isinstance(response, ProfileGetDisplayNameResponse) assert response.displayname == new_name def test_http_client_get_set_avatar(self, http_client): http_client.connect(TransportType.HTTP2) _, _ = http_client.login("1234") http_client.receive(self.login_byte_response) response = http_client.next_response() assert isinstance(response, LoginResponse) assert http_client.access_token == "ABCD" _, _ = http_client.sync() http_client.receive(self.sync_byte_response) response = http_client.next_response() assert isinstance(response, SyncResponse) assert http_client.access_token == "ABCD" _, _ = http_client.get_avatar() http_client.receive(self.get_avatar_byte_response(None, 5)) response = http_client.next_response() assert isinstance(response, ProfileGetAvatarResponse) assert not response.avatar_url new_avatar = faker.avatar_url().replace("#auto", "") _, _ = http_client.set_avatar(new_avatar) http_client.receive(self.empty_response(7)) response = http_client.next_response() assert isinstance(response, ProfileSetAvatarResponse) _, _ = http_client.get_avatar() http_client.receive(self.get_avatar_byte_response(new_avatar, 9)) response = http_client.next_response() assert isinstance(response, ProfileGetAvatarResponse) assert response.avatar_url.replace("#auto", "") == new_avatar def test_event_callback(self, client): client.receive_response(self.login_response) class CallbackException(Exception): pass def cb(room, event): if isinstance(event, RoomMemberEvent): raise CallbackException client.add_event_callback(cb, (RoomMemberEvent, RoomEncryptionEvent)) with pytest.raises(CallbackException): client.receive_response(self.sync_response) def test_to_device_cb(self, client): client.receive_response(self.login_response) class CallbackException(Exception): pass def cb(event): if isinstance(event, RoomEncryptionEvent): raise CallbackException client.add_to_device_callback(cb, RoomEncryptionEvent) with pytest.raises(CallbackException): client.receive_response(self.sync_response) def test_ephemeral_cb(self, client): client.receive_response(self.login_response) class CallbackException(Exception): pass def cb(_, event): raise CallbackException client.add_ephemeral_callback(cb, TypingNoticeEvent) with pytest.raises(CallbackException): client.receive_response(self.sync_response) def test_many_ephemeral_cb(self, client): """Test that callbacks for multiple ephemeral events are properly handled. Generates a random selection of ephemeral events and produces unique callbacks and exceptions for each. Verifies that all of the callbacks are called, including for duplicate events. """ client.receive_response(self.login_response) ephemeral_events = [TypingNoticeEvent, ReceiptEvent] event_selection = random.choices( population=ephemeral_events, # By the pigeonhole princple, we'll have at least one duplicate Event k=len(ephemeral_events) + 1, ) # This will only print during a failure, at which point we want to know # what event selection caused an error. print(f"Random selection of EphemeralEvents: {event_selection}") exceptions = [] for index, event in enumerate(event_selection): exception_class = type( f"CbException{event.__name__}_{index}", (Exception,), {} ) exceptions.append(exception_class) def callback(_, event): raise exception_class client.add_ephemeral_callback(callback, event) with pytest.raises(tuple(exceptions)): client.receive_response(self.sync_response) def test_room_account_data_cb(self, client): client.receive_response(self.login_response) class CallbackException(Exception): pass def cb(_, event): raise CallbackException client.add_room_account_data_callback(cb, FullyReadEvent) with pytest.raises(CallbackException): client.receive_response(self.sync_response) def test_global_account_data_cb(self, client): client.receive_response(self.login_response) class CallbackCalled(Exception): pass def cb(_event): raise CallbackCalled client.add_global_account_data_callback(cb, PushRulesEvent) with pytest.raises(CallbackCalled): client.receive_response(self.sync_response) def test_handle_account_data(self, client): client.receive_response(self.login_response) client.receive_response(self.sync_response) room = client.rooms[TEST_ROOM_ID] assert room.fully_read_marker == "event_id_2" assert room.tags == {"u.test": {"order": 1}} def test_no_encryption(self, client_no_e2e): client_no_e2e.receive_response(self.login_response) assert client_no_e2e.logged_in assert not client_no_e2e.olm client_no_e2e.receive_response(self.sync_response) assert len(client_no_e2e.rooms) == 1 room = list(client_no_e2e.rooms.values())[0] assert room.encrypted client_no_e2e.receive_response(self.second_sync) with pytest.raises(LocalProtocolError): client_no_e2e.device_store with pytest.raises(LocalProtocolError): client_no_e2e.olm_account_shared assert not client_no_e2e.should_query_keys assert not client_no_e2e.users_for_key_query assert not client_no_e2e.key_verifications assert not client_no_e2e.outgoing_to_device_messages assert not client_no_e2e.get_active_sas(ALICE_ID, ALICE_DEVICE_ID) ToDeviceMessage("m.test", ALICE_ID, ALICE_DEVICE_ID, {}) client_no_e2e.room_contains_unverified(room.room_id) with pytest.raises(LocalProtocolError): client_no_e2e.invalidate_outbound_session(room.room_id) client_no_e2e.receive_response(self.keys_query_response) def test_event_cb_for_invited_rooms(self, client): client.receive_response(self.login_response) class CallbackException(Exception): pass def cb(_, event): raise CallbackException client.add_event_callback(cb, InviteMemberEvent) with pytest.raises(CallbackException): client.receive_response(self.sync_invite_response) def test_homeserver_url_parsing(self): host, path = HttpClient._parse_homeserver("https://example.org:8080") assert host == "example.org:8080" assert path == "" host, path = HttpClient._parse_homeserver("example.org:8080") assert host == "example.org:8080" assert path == "" host, path = HttpClient._parse_homeserver("example.org/_matrix") assert host == "example.org:443" assert path == "_matrix" host, path = HttpClient._parse_homeserver("https://example.org:8008/_matrix") assert host == "example.org:8008" assert path == "_matrix" def test_room_devices(self, client): client.receive_response(self.login_response) client.receive_response(self.sync_response) client.receive_response(self.keys_query_response) room_devices = client.room_devices(TEST_ROOM_ID) assert ALICE_ID in room_devices assert ALICE_DEVICE_ID in room_devices[ALICE_ID] alice_device = room_devices[ALICE_ID][ALICE_DEVICE_ID] assert alice_device def test_soft_logout(self, client): client.receive_response(self.login_response) assert client.logged_in error_response = SyncResponse.from_dict( { "errcode": "M_UNKNOWN_TOKEN", "error": "Access token has expired", "soft_logout": True, } ) client.receive_response(error_response) assert not client.logged_in def test_sync_token_restoring(self, client): user = client.user_id device_id = client.device_id path = client.store_path del client config = ClientConfig(store_sync_tokens=True) client = Client(user, device_id, path, config=config) client.receive_response(self.login_response) assert not client.next_batch assert not client.loaded_sync_token client.receive_response(self.sync_response) assert client.next_batch client = Client(user, device_id, path, config=config) client.receive_response(self.login_response) assert client.loaded_sync_token def test_presence_callback(self, client): client.receive_response(self.login_response) class CallbackException(Exception): pass def cb(event): if isinstance(event, PresenceEvent): raise CallbackException client.add_presence_callback(cb, PresenceEvent) client.add_presence_callback(cb, PresenceEvent) with pytest.raises(CallbackException): client.receive_response(self.sync_response) matrix-nio-0.24.0/tests/conftest.py000066400000000000000000000027671455215747700172620ustar00rootroot00000000000000import shutil import tempfile import helpers import pytest from conftest_async import ( # noqa: F401 aioresponse, async_client, async_client_pair, async_client_pair_same_user, ) from olm import Account from nio import Client, ClientConfig, HttpClient from nio.crypto import Olm, OlmDevice from nio.store import SqliteMemoryStore ALICE_ID = "@alice:example.org" ALICE_DEVICE_ID = "JLAFKJWSCS" BOB_DEVICE = "@bob:example.org" BOB_DEVICE_ID = "JLAFKJWSRS" @pytest.fixture def tempdir(): newpath = tempfile.mkdtemp() yield newpath shutil.rmtree(newpath) @pytest.fixture def client(tempdir): return Client("ephemeral", "DEVICEID", tempdir) @pytest.fixture def client_no_e2e(tempdir): config = ClientConfig(encryption_enabled=False) return Client("ephemeral", "DEVICEID", tempdir, config) @pytest.fixture def olm_machine(): key_pair = Account().identity_keys bob_device = OlmDevice(BOB_DEVICE, BOB_DEVICE_ID, key_pair) store = SqliteMemoryStore(ALICE_ID, ALICE_DEVICE_ID) client = Olm(ALICE_ID, ALICE_DEVICE_ID, store) client.device_store.add(bob_device) store.save_device_keys(client.device_store) return client @pytest.fixture def alice_client(tempdir): client = Client(ALICE_ID, ALICE_DEVICE_ID, tempdir) client.user_id = ALICE_ID return client @pytest.fixture def http_client(tempdir): return HttpClient("example.org", "ephemeral", "DEVICEID", tempdir) @pytest.fixture def frame_factory(): return helpers.FrameFactory() matrix-nio-0.24.0/tests/conftest_async.py000066400000000000000000000045061455215747700204500ustar00rootroot00000000000000import pytest_asyncio from aioresponses import aioresponses from nio import AsyncClient, AsyncClientConfig, LoginResponse from nio.crypto import OlmDevice @pytest_asyncio.fixture async def async_client(tempdir): client = AsyncClient( "https://example.org", "ephemeral", "DEVICEID", tempdir, config=AsyncClientConfig(max_timeouts=3), ) yield client await client.close() @pytest_asyncio.fixture async def async_client_pair(tempdir): ALICE_ID = "@alice:example.org" ALICE_DEVICE = "JLAFKJWSCS" BOB_ID = "@bob:example.org" BOB_DEVICE = "ASDFOEAK" config = AsyncClientConfig(max_timeouts=3) alice = AsyncClient( "https://example.org", ALICE_ID, ALICE_DEVICE, tempdir, config=config, ) bob = AsyncClient( "https://example.org", BOB_ID, BOB_DEVICE, tempdir, config=config, ) await alice.receive_response(LoginResponse(ALICE_ID, ALICE_DEVICE, "alice_1234")) await bob.receive_response(LoginResponse(BOB_ID, BOB_DEVICE, "bob_1234")) yield (alice, bob) await alice.close() await bob.close() @pytest_asyncio.fixture async def async_client_pair_same_user(tempdir): ALICE_ID = "@alice:example.org" FIRST_DEVICE = "JLAFKJWSCS" SECOND_DEVICE = "ASDFOEAK" config = AsyncClientConfig(max_timeouts=3) alice = AsyncClient( "https://example.org", ALICE_ID, FIRST_DEVICE, tempdir, config=config, ) bob = AsyncClient( "https://example.org", ALICE_ID, SECOND_DEVICE, tempdir, config=config, ) await alice.receive_response(LoginResponse(ALICE_ID, FIRST_DEVICE, "alice_1234")) await bob.receive_response(LoginResponse(ALICE_ID, SECOND_DEVICE, "bob_1234")) alice_device = OlmDevice( alice.user_id, alice.device_id, alice.olm.account.identity_keys ) bob_device = OlmDevice(bob.user_id, bob.device_id, bob.olm.account.identity_keys) alice.olm.device_store.add(bob_device) bob.olm.device_store.add(alice_device) alice.verify_device(bob_device) bob.verify_device(alice_device) yield (alice, bob) await alice.close() await bob.close() @pytest_asyncio.fixture def aioresponse(): with aioresponses() as m: yield m matrix-nio-0.24.0/tests/data/000077500000000000000000000000001455215747700157605ustar00rootroot00000000000000matrix-nio-0.24.0/tests/data/context.json000066400000000000000000000130571455215747700203450ustar00rootroot00000000000000{ "events_before": [], "events_after": [ { "content": { "body": "yeah, let's do that", "msgtype": "m.text" }, "event_id": "$15163623196QOZxj:localhost", "origin_server_ts": 1516362319505, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "type": "m.room.message", "unsigned": { "age": 43464955731 }, "user_id": "@example:localhost", "age": 43464955731 } ], "start": "t182-189_0_0_0_0_0_0_0_0", "end": "t184-190_0_0_0_0_0_0_0_0", "event": { "content": { "body": "ok, let's handle invites, joins and parts", "msgtype": "m.text" }, "event_id": "$15163622445EBvZJ:localhost", "origin_server_ts": 1516362244026, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example2:localhost", "type": "m.room.message", "unsigned": { "age": 43465031210 }, "user_id": "@example2:localhost", "age": 43465031210 }, "state": [ { "content": { "topic": "amazing work" }, "event_id": "$151568196747dxLZM:localhost", "origin_server_ts": 1515681967443, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "state_key": "", "type": "m.room.topic", "unsigned": { "replaces_state": "$151567214844LzHAk:localhost", "age": 44145307793 }, "user_id": "@example:localhost", "age": 44145307793, "replaces_state": "$151567214844LzHAk:localhost" }, { "content": { "aliases": [ "#tutorial:localhost" ] }, "event_id": "$15139375516NUgtD:localhost", "origin_server_ts": 1513937551720, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "state_key": "localhost", "type": "m.room.aliases", "unsigned": { "age": 45889723516 }, "user_id": "@example:localhost", "age": 45889723516 }, { "content": { "history_visibility": "shared" }, "event_id": "$15139375515VaJEY:localhost", "origin_server_ts": 1513937551613, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "state_key": "", "type": "m.room.history_visibility", "unsigned": { "age": 45889723623 }, "user_id": "@example:localhost", "age": 45889723623 }, { "content": { "join_rule": "public" }, "event_id": "$15139375514WsgmR:localhost", "origin_server_ts": 1513937551539, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "state_key": "", "type": "m.room.join_rules", "unsigned": { "age": 45889723697 }, "user_id": "@example:localhost", "age": 45889723697 }, { "content": { "alias": "#tutorial:localhost" }, "event_id": "$15139375513VdeRF:localhost", "origin_server_ts": 1513937551461, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "state_key": "", "type": "m.room.canonical_alias", "unsigned": { "age": 45889723775 }, "user_id": "@example:localhost", "age": 45889723775 }, { "content": { "ban": 50, "events": { "m.room.avatar": 50, "m.room.canonical_alias": 50, "m.room.history_visibility": 100, "m.room.name": 50, "m.room.power_levels": 100 }, "events_default": 0, "invite": 0, "kick": 50, "redact": 50, "state_default": 50, "users": { "@example:localhost": 100 }, "users_default": 0 }, "event_id": "$15139375512JaHAW:localhost", "origin_server_ts": 1513937551359, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "state_key": "", "type": "m.room.power_levels", "unsigned": { "age": 45889723877 }, "user_id": "@example:localhost", "age": 45889723877 }, { "content": { "creator": "@example:localhost" }, "event_id": "$15139375510KUZHi:localhost", "origin_server_ts": 1513937551203, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "state_key": "", "type": "m.room.create", "unsigned": { "age": 45889724033 }, "user_id": "@example:localhost", "age": 45889724033 }, { "content": { "avatar_url": null, "displayname": "example2", "membership": "join" }, "event_id": "$151396611913abyeC:localhost", "membership": "join", "origin_server_ts": 1513966119908, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example2:localhost", "state_key": "@example2:localhost", "type": "m.room.member", "unsigned": { "age": 45861155328 }, "user_id": "@example2:localhost", "age": 45861155328 }, { "content": { "avatar_url": null, "displayname": "example", "membership": "join" }, "event_id": "$15139375511GBYDY:localhost", "membership": "join", "origin_server_ts": 1513937551274, "room_id": "!SVkFJHzfwvuaIEawgC:localhost", "sender": "@example:localhost", "state_key": "@example:localhost", "type": "m.room.member", "unsigned": { "age": 45889723962 }, "user_id": "@example:localhost", "age": 45889723962 } ] } matrix-nio-0.24.0/tests/data/delete_devices.json000066400000000000000000000003551455215747700216220ustar00rootroot00000000000000{ "completed": [ "example.type.foo" ], "flows": [ { "stages": [ "example.type.foo" ] } ], "params": { "example.type.baz": { "example_key": "foobar" } }, "session": "xxxxxxyz" } matrix-nio-0.24.0/tests/data/devices.json000066400000000000000000000002671455215747700203020ustar00rootroot00000000000000{ "devices": [ { "device_id": "QBUAZIFURK", "display_name": "android", "last_seen_ip": "1.2.3.4", "last_seen_ts": 1474491775024 } ] } matrix-nio-0.24.0/tests/data/encryption/000077500000000000000000000000001455215747700201525ustar00rootroot00000000000000matrix-nio-0.24.0/tests/data/encryption/example_DEVICEID.db000066400000000000000000005300001455215747700233460ustar00rootroot00000000000000SQLite format 3@ K++CK.8  x+Lb8VI7YYbXxZimV7YB+LlizcpdrNMv477kR2MNocrfic6OFvQgABsfT4+gEPvZZgI/SnyrDWsZi+Lo9JCXZLxJVnOS3tlZlXsjvje6Y/cj7MLoJS0zkJvmMBpeUx90MN+HM8OMKztKYKT/uS/MG16VD3VTXCVx08Ob0Q6Mh0TnPWrsPM79nK7qB2a7y43l3wkDPDqDUGFtJZLqxjmF4o9iC5JVYhE8YZx6y842kKoYkh/rYv231gAexampleDEVICEID  exampleDEVICEID                        /+ 5 w G=!Q+indexsqlite_autoindex_forwardedchains_1forwardedchainsM7?indexmegolminbou/M7?indexmegolminboundsessions_account_idmegolminboundsessionsCREATE INDEX "megolminboundsessions_account_id" ON "megolminboundsessions" ("account_id")T77GtablemegolminboundsessionsmegolminboundsessionsCREATE TABLE "megolminboundsessions" ("session_id" TEXT NOT NULL PRIMARY KEY, "sender_key" TEXT NOT NULL, "account_id" INTEGER NOT NULL, "fp_key" TEXT NOT NULL, "room_id" TEXT NOT NULL, "session" BLOB NOT NULL, FOREIGN KEY ("account_id") REFERENCES "accounts" ("id") ON DELETE CASCADE)?)#indexencryptedrooms_account_idencryptedroomsCREATE INDEX "encryptedrooms_account_id" ON "encryptedrooms" ("account_id") ))MtableencryptedroomsencryptedroomsCREATE TABLE "encryptedrooms" ("id" INTEGER NOT NULL PRIMARY KEY, "room_id" TEXT NOT NULL, "account_id" INTEGER NOT NULL, FOREIGN KEY ("account_id") REFERENCES "accounts" ("id") ON DELETE CASCADE, UNIQUE(room_id,account_id))[ tableaccountsaccountsCREATE TABLE "accounts" ("id" INTEGER NOT NULL PRIMARY KEY, "account" BLOB NOT NULL, "user_id" TEXT NOT NULL, "device_id" TEXT NOT NULL, "shared" INTEGER NOT NULL, UNIQUE(user_id,device_id))I]7indexsqlite_autoindex_outgoing_key_requests_1outgoing_key_requestsY77Qtableoutgoing_key_requestsoutgoing_key_requestsCREATE TABLE outgoing_key_requests( device_id TEXT PRIMARY KEY, session_id TEXT, FOREIGN KEY(device_id) REFERENCES accounts(device_id) ON DELETE CASCADE )5I#indexsqlite_autoindex_sync_tokens_1sync_tokens6##3tablesync_tokenssync_tokensCREATE TABLE sync_tokens( device_id TEXT PRIMARY KEY, token TEXT, FOREIGN KEY(device_id) REFERENCES accounts(device_id) ON DELETE CASCADE )9M'indexsqlite_autoindex_tracked_users_1tracked_usersW''mtabletracked_userstracked_usersCREATE TABLE tracked_users( device_id TEXT, user_id TEXT, PRIMARY KEY(device_id, user_id), FOREIGN KEY(device_id) REFERENCES accounts(device_id) ON DELETE CASCADE )M a;indexsqlite_autoindex_megolm_outbound_devices_1megolm_outbound_devices @ ;;tablemegolm_outbound_devicesmegolm_outbound_devices CREATE TABLE megolm_outbound_devices( device_id TEXT, room_id TEXT, user_device_id TEXT, PRIMARY KEY(device_id, room_id, user_device_id), FOREIGN KEY(device_id, room_id) REFERENCES megolm_outbound_sessions(device_id, room_id) ON DELETE CASCADE )O c=indexsqlite_autoindex_megolm_outbound_sessions_1megolm_outbound_sessions a ==Utablemegolm_outbound_sessionsmegolm_outbound_sessions CREATE TABLE megolm_outbound_sessions( device_id TEXT, room_id TEXT, session BLOB, max_age_s FLOAT, max_messages INTEGER, creation_time TIMESTAMP, message_count INTEGER, PRIMARY KEY(device_id, room_id), FOREIGN KEY(device_id) REFERENCES accounts(device_id) ON DELETE CASCADE )?S-indexsqlite_autoindex_forwarded_chains_1forwarded_chains --5tableforwarded_chainsforwarded_chainsCREATE TABLE forwarded_chains( device_id TEXT, session_id TEXT, curve_key TEXT, PRIMARY KEY(device_id, session_id, curve_key), FOREIGN KEY(device_id) REFERENCES accounts(device_id) ON DELETE CASCADE )I]7indexsqlite_autoindex_megolminboundsessions_1megolminboundsessions;O)indexsqlite_autoindex_encryptedrooms_1encryptedrooms/Cindexsqlite_autoindex_accounts_1accounts D7 {t; # S0)windexkeys_device_idkeys(CREATE INDEX "keys_device_id" ON "keys" ("device_id")o.Atablekeyskeys&CREATE TABLE "keys" ("id" INTEGER NOT NULL PRIMARY KEY, "key_type" TEXT NOT NULL, "key" TEXT NOT NULL, "device_id" INTEGER NOT NULL, FOREIGN KEY ("device_id") REFERENCES "devicekeys" ("id"), UNIQUE(device_id,key_type))'/;indexsqlite_autoindex_keys_1keys'K---Itabledevicetruststatedevicetruststate%CREATE TABLE "devicetruststate" ("device_id" INTEGER NOT NULL PRIMARY KEY, "state" INTEGER NOT NULL, FOREIGN KEY ("device_id") REFERENCES "devicekeys" ("id")))%%Ctablestoreversionstoreversion$CREATE TABLE "storeversion" ("id" INTEGER NOT NULL PRIMARY KEY, "version" INTEGER NOT NULL)(I37indexoutgoingkeyrequests_account_idoutgoingkeyrequests#CREATE INDEX "outgoingkeyrequests_account_id" ON "outgoingkeyrequests" ("account_id")n&33tableoutgoingkeyrequestsoutgoingkeyrequests!CREATE TABLE "outgoingkeyrequests" ("id" INTEGER NOT NULL PRIMARY KEY, "request_id" TEXT NOT NULL, "session_id" TEXT NOT NULL, "room_id" TEXT NOT NULL, "algorithm" TEXT NOT NULL, "account_id" INTEGER NOT NULL, FOREIGN KEY ("account_id") REFERENCES "accounts" ("id") ON DELETE CASCADE, UNIQUE(request_id,account_id))E'Y3indexsqlite_autoindex_outgoingkeyrequests_1outgoingkeyrequests"s%9#indexolmsessions_account_idolmsessions CREATE INDEX "olmsessions_account_id" ON "olmsessions" ("account_id")E###QtableolmsessionsolmsessionsCREATE TABLE "olmsessions" ("session_id" TEXT NOT NULL PRIMARY KEY, "creation_time" TEXT NOT NULL, "last_usage_date" TEXT NOT NULL, "sender_key" TEXT NOT NULL, "account_id" INTEGER NOT NULL, "session" BLOB NOT NULL, FOREIGN KEY ("account_id") REFERENCES "accounts" ("id") ON DELETE CASCADE)5$I#indexsqlite_autoindex_olmsessions_1olmsessions "A+'indexforwardedchains_session_idforwardedchainsCREATE INDEX "forwardedchains_session_id" ON "forwardedchains" ("session_id")=!Q+indexsqlite_autoindex_forwardedchains_1forwardedchains$ ++tableforwardedchainsforwardedchainsCREATE TABLE "forwardedchains" ("id" INTEGER NOT NULL PRIMARY KEY, "sender_key" TEXT NOT NULL, "session_id" TEXT NOT NULL, FOREIGN KEY ("session_id") REFERENCES "megolminboundsessions" ("session_id") ON DELETE CASCADE, UNIQUE(sender_key,session_id))o27!indexsynctokens_account_idsynctokens*CREATE INDEX "synctokens_account_id" ON "synctokens" ("account_id")H1!![tablesynctokenssynctokens)CREATE TABLE "synctokens" ("account_id" INTEGER NOT NULL PRIMARY KEY, "token" TEXT NOT NULL, FOREIGN KEY ("account_id") REFERENCES "accounts" ("id") ON DELETE CASCADE)o,7!indexdevicekeys_account_iddevicekeysCREATE INDEX "devicekeys_account_id" ON "devicekeys" ("account_id")\*!!tabledevicekeysdevicekeysCREATE TABLE "devicekeys" ("id" INTEGER NOT NULL PRIMARY KEY, "device_id" TEXT NOT NULL, "user_id" TEXT NOT NULL, "display_name" TEXT NOT NULL, "deleted" INTEGER NOT NULL, "account_id" INTEGER NOT NULL, FOREIGN KEY ("account_id") REFERENCES "accounts" ("id") ON DELETE CASCADE, UNIQUE(account_id,user_id,device_id))3+G!indexsqlite_autoindex_devicekeys_1devicekeys1    rcAAc LEeEiqT9LjCtECaN7WTqcBQ7D5Dwm4+/L9Uxr1IyPAts2018-10-02 15:51:30.0929582018-10-02 15:51:30.092958+Qs131S/odNdWG6VJ8hiy9YZW0us24wnsDjYQbaxLk4xzJqGCndmbJVQ7N8zb9lVhye/sL/cXwOyXr5P/Xo/tfK9qMLXdY7vYMZlEA5AjIiIc35cLwDo3HDpRPCTcUH4Vd6naL+33iPQPuc0cY3A2lJVfYiOddqHkzi15WhZM/OZKgHYkGLx9My1lClVRjG378FTbQNK5nKgDE8UqMJUNVzVse3ujo+j0jo9dvEsc0EQ+z+bqMAb5Qp8mWYW7lrUqjR7sGHxt+yccifgeSRbMx5G7Y8TZS8LUe+N+cvY2H/vTXkbFSmGcbdSumFZ4S2XzJH3fjY2/+/8Cm8rtHnrc/UL+TZHU4j4frdmuXPTPCTS4A3Mv6Qapb+ADzcBaO2DV1BkbLscykf .c EeEiqT9LjCtECaN7WTqcBQ7D5Dwm4+/L9Uxr1IyPAts           ***))%(%#'&%$ #matrix-nio-0.24.0/tests/data/encryption/example_DEVICEID.known_devices000066400000000000000000000001261455215747700256200ustar00rootroot00000000000000@bob:example.org BOBDEVICE matrix-ed25519 hcqqtqTY9tfDkXViQmzyNp8LU0ExuHFCUUo25kU0SVc matrix-nio-0.24.0/tests/data/encryption/known_devices000066400000000000000000000004471455215747700227400ustar00rootroot00000000000000# Test device store file example DEVICEID matrix-ed25519 2MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA # Malformed line that gets ignored example DEVICEID matrix-ed25519 # Line with unknown key type, gets ignored as well example DEVICEID matrix-rsa AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA matrix-nio-0.24.0/tests/data/events/000077500000000000000000000000001455215747700172645ustar00rootroot00000000000000matrix-nio-0.24.0/tests/data/events/alias.json000066400000000000000000000004521455215747700212510ustar00rootroot00000000000000{ "content": { "alias": "#tutorial:localhost" }, "event_id": "$15139375513VdeRF:localhost", "origin_server_ts": 1513937551461, "sender": "@example:localhost", "state_key": "", "type": "m.room.canonical_alias", "unsigned": { "age": 7034220433 } } matrix-nio-0.24.0/tests/data/events/call_answer.json000066400000000000000000000007531455215747700224560ustar00rootroot00000000000000{ "content": { "answer": { "sdp": "v=0\r\no=- 6584580628695956864 2 IN IP4 127.0.0.1[...]", "type": "answer" }, "call_id": "12345", "lifetime": 60000, "version": 0 }, "event_id": "$143273582443PhrSn:example.org", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "sender": "@example:example.org", "type": "m.call.answer", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/call_candidates.json000066400000000000000000000010751455215747700232540ustar00rootroot00000000000000{ "content": { "call_id": "12345", "candidates": [ { "candidate": "candidate:863018703 1 udp 2122260223 10.9.64.156 43670 typ host generation 0", "sdpMLineIndex": 0, "sdpMid": "audio" } ], "version": 0 }, "event_id": "$143273582443PhrSn:example.org", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "sender": "@example:example.org", "type": "m.call.candidates", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/call_hangup.json000066400000000000000000000005071455215747700224360ustar00rootroot00000000000000{ "content": { "call_id": "12345", "version": 0 }, "event_id": "$143273582443PhrSn:example.org", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "sender": "@example:example.org", "type": "m.call.hangup", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/call_invite.json000066400000000000000000000007511455215747700224530ustar00rootroot00000000000000{ "content": { "call_id": "12345", "lifetime": 60000, "offer": { "sdp": "v=0\r\no=- 6584580628695956864 2 IN IP4 127.0.0.1[...]", "type": "offer" }, "version": 0 }, "event_id": "$143273582443PhrSn:example.org", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "sender": "@example:example.org", "type": "m.call.invite", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/create.json000066400000000000000000000005321455215747700214220ustar00rootroot00000000000000{ "content": { "creator": "@example:localhost", "m.federate": true, "room_version": "1" }, "event_id": "$151957878228ekrDs:localhost", "origin_server_ts": 1519578782185, "sender": "@example:localhost", "state_key": "", "type": "m.room.create", "unsigned": { "age": 1392989709 } } matrix-nio-0.24.0/tests/data/events/create_typed.json000066400000000000000000000005751455215747700226360ustar00rootroot00000000000000{ "content": { "creator": "@example:localhost", "m.federate": true, "room_version": "1", "type": "nio.matrix.test" }, "event_id": "$151957878228ekrDs:localhost", "origin_server_ts": 1519578782185, "sender": "@example:localhost", "state_key": "", "type": "m.room.create", "unsigned": { "age": 1392989709 } } matrix-nio-0.24.0/tests/data/events/dummy.json000066400000000000000000000002071455215747700213110ustar00rootroot00000000000000{ "content": { }, "sender": "@alice:example.org", "sender_device": "DEVICEID", "keys": {}, "type": "m.dummy" } matrix-nio-0.24.0/tests/data/events/forwarded_room_key.json000066400000000000000000000011461455215747700240420ustar00rootroot00000000000000{ "content": { "algorithm": "m.megolm.v1.aes-sha2", "forwarding_curve25519_key_chain": [ "hPQNcabIABgGnx3/ACv/jmMmiQHoeFfuLB17tzWp6Hw" ], "room_id": "!Cuyf34gef24t:localhost", "sender_claimed_ed25519_key": "aj40p+aw64yPIdsxoog8jhPu9i7l7NcFRecuOQblE3Y", "sender_key": "RF3s+E7RkTQTGF2d8Deol0FkQvgII2aJDf3/Jp5mxVU", "session_id": "X3lUlvLELLYxeTx4yOVu6UDpasGEVO0Jbu+QFnm0cKQ", "session_key": "AgAAAADxKHa9uFxcXzwYoNueL5Xqi69IkD4sni8Llf..." }, "sender": "@alice:example.org", "keys": {}, "type": "m.forwarded_room_key" } matrix-nio-0.24.0/tests/data/events/fully_read.json000066400000000000000000000002151455215747700223030ustar00rootroot00000000000000{ "content": { "event_id": "$someplace:example.org" }, "room_id": "!somewhere:example.org", "type": "m.fully_read" } matrix-nio-0.24.0/tests/data/events/guest_access.json000066400000000000000000000004421455215747700226270ustar00rootroot00000000000000{ "content": { "guest_access": "can_join" }, "event_id": "$151957878229oehdx:localhost", "origin_server_ts": 1519578782190, "sender": "@example:localhost", "state_key": "", "type": "m.room.guest_access", "unsigned": { "age": 1392989711 } } matrix-nio-0.24.0/tests/data/events/history_visibility.json000066400000000000000000000004641455215747700241330ustar00rootroot00000000000000{ "content": { "history_visibility": "world_readable" }, "event_id": "$151957878235ricnD:localhost", "origin_server_ts": 1519578782195, "sender": "@example:localhost", "state_key": "", "type": "m.room.history_visibility", "unsigned": { "age": 1392989715 } } matrix-nio-0.24.0/tests/data/events/join_rules.json000066400000000000000000000004331455215747700223300ustar00rootroot00000000000000{ "content": { "join_rule": "public" }, "event_id": "$151957878231iejdB:localhost", "origin_server_ts": 1519578782192, "sender": "@example:localhost", "state_key": "", "type": "m.room.join_rules", "unsigned": { "age": 1392989713 } } matrix-nio-0.24.0/tests/data/events/key_accept.json000066400000000000000000000010161455215747700222640ustar00rootroot00000000000000{ "content": { "commitment": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg", "hash": "sha256", "key_agreement_protocol": "curve25519", "message_authentication_code": "hkdf-hmac-sha256", "method": "m.sas.v1", "short_authentication_string": [ "decimal", "emoji" ], "transaction_id": "S0meUniqueAndOpaqueString" }, "type": "m.key.verification.accept", "sender": "@bob:example.org" } matrix-nio-0.24.0/tests/data/events/key_cancel.json000066400000000000000000000003661455215747700222610ustar00rootroot00000000000000{ "content": { "code": "m.user", "reason": "User rejected the key verification request", "transaction_id": "S0meUniqueAndOpaqueString" }, "type": "m.key.verification.cancel", "sender": "@bob:example.org" } matrix-nio-0.24.0/tests/data/events/key_key.json000066400000000000000000000004021455215747700216130ustar00rootroot00000000000000{ "content": { "key": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg", "transaction_id": "S0meUniqueAndOpaqueString" }, "type": "m.key.verification.key", "sender": "@bob:example.org" } matrix-nio-0.24.0/tests/data/events/key_mac.json000066400000000000000000000006261455215747700215730ustar00rootroot00000000000000{ "content": { "keys": "2Wptgo4CwmLo/Y8B8qinxApKaCkBG2fjTWB7AbP5Uy+aIbygsSdLOFzvdDjww8zUVKCmI02eP9xtyJxc/cLiBA", "mac": { "ed25519:ABCDEF": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg" }, "transaction_id": "S0meUniqueAndOpaqueString" }, "type": "m.key.verification.mac", "sender": "@bob:example.org" } matrix-nio-0.24.0/tests/data/events/key_start.json000066400000000000000000000010161455215747700221620ustar00rootroot00000000000000{ "content": { "from_device": "BobDevice1", "hashes": [ "sha256" ], "key_agreement_protocols": [ "curve25519" ], "message_authentication_codes": [ "hkdf-hmac-sha256" ], "method": "m.sas.v1", "short_authentication_string": [ "decimal", "emoji" ], "transaction_id": "S0meUniqueAndOpaqueString" }, "type": "m.key.verification.start", "sender": "@bob:example.org" } matrix-nio-0.24.0/tests/data/events/megolm.json000066400000000000000000000010741455215747700214410ustar00rootroot00000000000000{ "content": { "algorithm": "m.megolm.v1.aes-sha2", "ciphertext": "AwgAEnACgAkLmt6qF84IK++J7UDH2Za1YVchHyprqTqsg...", "device_id": "RJYKSTBOIE", "sender_key": "IlRMeOPX2e0MurIyfWEucYBRVOEEUMrOHqn/8mLqMjA", "session_id": "X3lUlvLELLYxeTx4yOVu6UDpasGEVO0Jbu+QFnm0cKQ" }, "event_id": "$143273582443PhrSn:example.org", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "sender": "@example:example.org", "type": "m.room.encrypted", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/member.json000066400000000000000000000006741455215747700214350ustar00rootroot00000000000000{ "content": { "avatar_url": null, "displayname": "example", "membership": "join" }, "event_id": "$151800140517rfvjc:localhost", "membership": "join", "origin_server_ts": 1518001405556, "sender": "@example:localhost", "state_key": "@example:localhost", "type": "m.room.member", "unsigned": { "age": 2970366338, "replaces_state": "$151800111315tsynI:localhost" } } matrix-nio-0.24.0/tests/data/events/message_emote.json000066400000000000000000000005761455215747700230040ustar00rootroot00000000000000{ "content": { "body": "is dancing", "format": "org.matrix.custom.html", "formatted_body": "is dancing", "msgtype": "m.emote" }, "event_id": "$152037280074GZeOm:localhost", "origin_server_ts": 1520372800469, "sender": "@example:localhost", "type": "m.room.message", "unsigned": { "age": 598971425 } } matrix-nio-0.24.0/tests/data/events/message_notice.json000066400000000000000000000011021455215747700231360ustar00rootroot00000000000000{ "origin_server_ts": 1533565163841, "sender": "@_neb_github:matrix.org", "event_id": "$153356516319138IHRIC:matrix.org", "unsigned": { "age": 743 }, "content": { "body": "https://github.com/matrix-org/matrix-python-sdk/issues/266 : Consider allowing MatrixClient.__init__ to take sync_token kwarg", "format": "org.matrix.custom.html", "formatted_body": "313: nio wins!", "msgtype": "m.notice" }, "type": "m.room.message", "room_id": "!YHhmBTmGBHGQOlGpaZ:matrix.org" } matrix-nio-0.24.0/tests/data/events/message_text.json000066400000000000000000000005751455215747700226560ustar00rootroot00000000000000{ "content": { "body": "is dancing", "format": "org.matrix.custom.html", "formatted_body": "is dancing", "msgtype": "m.text" }, "event_id": "$152037280074GZeOm:localhost", "origin_server_ts": 1520372800469, "sender": "@example:localhost", "type": "m.room.message", "unsigned": { "age": 598971425 } } matrix-nio-0.24.0/tests/data/events/name.json000066400000000000000000000004361455215747700211020ustar00rootroot00000000000000{ "content": { "name": "#tutorial:localhost" }, "event_id": "$15139375513VdeRF:localhost", "origin_server_ts": 1513937551461, "sender": "@example:localhost", "state_key": "", "type": "m.room.name", "unsigned": { "age": 7034220433 } } matrix-nio-0.24.0/tests/data/events/olm.json000066400000000000000000000011461455215747700207500ustar00rootroot00000000000000{ "content": { "algorithm": "m.olm.v1.curve25519-aes-sha2", "ciphertext": { "7qZcfnBmbEGzxxaWfBjElJuvn7BZx+lSz/SvFrDF/z8": { "body": "AwogGJJzMhf/S3GQFXAOrCZ3iKyGU5ZScVtjI0KypTYrW...", "type": 0 } }, "sender_key": "Szl29ksW/L8yZGWAX+8dY1XyFi+i5wm+DRhTGkbMiwU" }, "event_id": "$143273582443PhrSn:example.org", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "sender": "@example:example.org", "type": "m.room.encrypted", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/olm_event.json000066400000000000000000000076571455215747700221660ustar00rootroot00000000000000{ "content": { "sender_key": "ilismY63NcPzJjZDIm97WyA11nhRNHpE5y8p3IDp6Ws", "ciphertext": { "UDQYDtHVcQg5r6DvfEYF5PZzqm8eoxrCRkTH53EVYgk": { "body": "Awogd/Pq16+WDAi19Oro+gOi8xuV8xCKboRTrBxYuOne6AsSIEhNQ375C5BuqKDqHljskBjc5GRvCz90d1q+gfTMa4MrGiCKWKyZjrc1w/MmNkMib3tbIDXWeFE0ekTnLyncgOnpayKgBgMKILrQfa+mouoiCn0ts5Xf78USx6iS12NNHMFOvagrPVweEAMi8AWf2o8Ms63Shf8pw/PDA/miKyAbKWCIr3tX9smoZ1b1+JgBy7gFNyA3xi+yBmAi4iWNXP23tBLWvhVEQHGKarGqRmVc/GC2Buv1iNFrpaI/xFLuhGUFhQVoNfgV9mewb1XS/BAKx5MI/cUTOseZmEenHpvVYGh1jFvw/2Z1CGqG800VXlvAMsdD2Qx9YyRO9KsQqWmy4+AVVGsyWxg+9XhnxlO/60hv7gMrIwM8rGDG7wbCYf/bXCAxXVqB7/I6zhN5McOqlgbI6B6kPdwjT5L1F4qXMQQmaQnuSimz2VOmv30B5NsmkKWHpwcojMJaV0YZIzNEMswd8K2hGtzDHM92YIoc3d9jVfd1Uu3Ci72hyoJTyAVayNGCAAbDXbUzjQuOcC5GRgwvwl7rJL1yppIfXUNEAk2TvLcsnOcDuyiN52I8Y9BPLglkpUFbzcgzN+zdLtzNMjCahLo24h/oBJ/Ka+wDNOt3BKWcZlWOyi6jxWKI1yJo3M0LYZIo8vcohtzOJZa6LXwdMCVkNCgJHkT10sApQ5HnSGA1Ok6IcXwr3CUhOdp7gcNsm3ZQWe/El7ro6bkFhpDyJY0ChFkMAwe8me5ncx9P6nwaX4lE57rxCn+jkxVC7OhJq3DE75h2JkQvKQbdh1nFMq8UZudhShEI/HAZB81t1qKnknVwme84RygMDtHQPKmkEL2WEl7AIIKhUL2PAWS9PztqTmP5rP7kc03Ddk19yRN+cx+rn7beW7YVkWkdRQRDxPaP1wYt4/+kAsKFRZilhdR4fpNv2VN4FkShUFAIWlHhKIl6dTpMeipyw2adqQvASuFf9sW2z6cJ1XcMVX85OFSOPTOPcmBRG7ouMK/MUAG6pBEDQnkU4oBlthxcktXLynqxcMfg1eN+flYgv/G5wX62AiB1Se6Vi3OvHd1MduLEQMbZ79vdCaBnKaYKbK9PJFiksLVKkfMG+DzVCgvuRiA1XLa6kZYq0vDW0vJpoO21S5qb2N7R/nyqjwtHNfBY", "type": 0 }, "232f76N7lW5pQs+g1VXjuOfvSagxhQrFrTzHGRVunzs": { "body": "AwogTYBOrEhjLyjXWIHRiBtshor4Iz4xH2etdAn/ltxOGXgSIEzNU+4ck3p1rTjQg/RN+aVUPCV0HKQrYTEnnRr4QHI7GiCKWKyZjrc1w/MmNkMib3tbIDXWeFE0ekTnLyncgOnpayKgBgMKIGJSw4tptgnFNw4dqhq8NUizMqmICjrLKWs0pF6myF5wEAAi8AXLEDvn9UP1A+UnUMZQSCaXY4PBFdl3sBaakeGFE4gFW1LG378P8glrFU94w1geV9WHB8HAT0lEA1RncwwnTwlCs9Ui9qFrlNIrEPw/sPirvwW9Z2cuS+P1hwQv055+5ifRBD5aO7eTVDVwqdBLmPlzdfP1EGWqgTfbG7pCSrp+b02B8LaLhv/4lYuM/NYOGkbmVtT4VUikWl0VyC7s6mjNxo/EEsZZ1wAQAKHR2tjn1E5r859Jc2/c/7PACMpx04N9cL3KIgWMwPadm1VVipWL7YSEM38n201wXAk3fFzb+Kw4GADBNe3GhIL+JgG0NfDITycMEnLozn60HYqJlKyW4D8mjReZXoIxV37lsllC8dsubfsrxbyUUZAjsGcs8+aCdg7egpk+oDLA5WvbSYV7vLe9m+i0EWVuEd95EPzXx2Y0x5yTuOU2u49gG5ZhQHvaHrxfV6qgm72vJV4Va5I3KrmCez6Su4L7BtYI/iBso5edunZ65536VH3HMPrYyI5oQNwVO037shUxTQZcY6naCDQl0mk7VFVwbLzjBu+Pp7aNorAteJLictyo3GHE77NCDWjYTU8XUEXOClbIY8eblGMg/2VtVnHSn+cs0Nvct9P2ILezVu7WimMpmb3/zXYLBtmyhrLptloLX2EzTSl0tF5RNj5ILtmL1VKwuHGdB+FAhC2b70rxRdOU2++1Sy4wBdpU8p1Oa6CMYJMTDFhTcoYh4HaWDgpU31czxy9TyVaC17r5tb4AW5ZqpdKUILnK5q2hfpNl5YIvGLrHr+bRlPjIRDd9drRlG8JQvPExANJCCGkpw23bn4YbIbuIBrceiEFQLBm4f1fW5jwdNX9izUeM2n4EMGlQO45qbaxFVzZf5cplDELEQ9sFQnj/V2E5xhTA093i1CAcjQILi1nI9C1gAiFdaT9l3/PHksNbN4Dn0usNvcYXJvSBUWA+rGBgGO3TRgTg6UBS0hzsjmsqayyPMFEe2WIk2r9wrqLtzPr1X4b1+kS2", "type": 0 }, "nnHQ0S2C/vEuMvJW/sk1mz5KXZ+kQ4bnesGWLmGpcFs": { "body": "AwogaOkrbdfU3wvl3wMTUYVFHc6vA8AgS2kW/tSKnvlWIUMQASLwBV5gJJBknoGO0pk7psp0au4Vxa22doPBKUfiLHW5Vnz+Ro/O9Tcr+MSrNH8/dYY74Jr9/7g+BCYUys8JKwWFhvIbLgP6k+suXsBO3hKOOvL7nJGIRFBGo65nLwNRuJnyNoaW2vFaPseWdoRwLUaoIXLU1XiIm6FVxQpD9znxM6jnZCIrbM/gxxRJ1g46BG7Ky2DwGVmZZI1aJBiaCOUrUkQ/gSXQKjmiF+XYb8/KIQFmeXfRPqc/G91o9Pmdc++Idfpb1kQtV6BECN1vrCsZc7ie6zyXQUTXLxHCLcBYPtCoJvj/iijSmT4C8DEvUA6eqXQjYqneHpANRy1YRULYb4sXnIhHhukTgLIRjUQHVUxrYFXO16DZw0qMpyC2r4AWLXnHboq1xfrLTiSLB08uNNzbrmZwTRPBPZB9qpCtQy6im9cwl1S6ahEyYYaxkxyArmlCPVcKIiK9/x0OJXPGnq4HX2VMqyevvVOA0hW2DnJVtZsls+/1c5BU/2/CaZABD5AgNjmbD3ltWTImVJ4QgJ7QA8SGHx81sxgYCVH4gGEZFvyieDsBEBZ87qXYhhEoZqaLv0XUHLOBUC2wD449SfkDRuSO4O7TalyCJaoEzUJpwIhmW1Ir/IF9qay8FmaxwWQJ3FpqQShcFGKDk7ttEACD4nzqdJCWimMUPJNcVSlWuV9A6CUW7VoBYfza0ClSMjsjFKRPl7fZiuoGcwJbIoYIH9HuJ/42R52aSiTEVTdLBzBgIuGC/GqYMSQzvl7qYV7X/mPeEyLYHUVlpg9hZ7NJGNRrMspAjghbe29jtHc72CogLtEk1k9v09OFK3hMY4L6giVX+nfjS/gDJcJe6iVYOhP0ze0YnWbLKCGc9DY6yPDrKrTdnIkPPdNoh9HcVdHndaFcu4edBwHOBAjQgMYUGEinZVoWSKK6OVkBjCpf+GW2zmZI39IE18f1tLHKzEwm5pr/kp6zIyTNpVPH99aE5YkJme4JEHdJIDmNVfozE4b7DcCzWBM", "type": 1 } }, "algorithm": "m.olm.v1.curve25519-aes-sha2" }, "type": "m.room.encrypted", "sender": "@example:localhost" } matrix-nio-0.24.0/tests/data/events/power_levels.json000066400000000000000000000015731455215747700226730ustar00rootroot00000000000000{ "content": { "ban": 50, "events": { "m.room.avatar": 50, "m.room.canonical_alias": 50, "m.room.history_visibility": 100, "m.room.name": 50, "m.room.power_levels": 100, "m.room.message": 25 }, "events_default": 0, "invite": 0, "kick": 50, "redact": 50, "state_default": 50, "users": { "@example:localhost": 100, "@alice:localhost": 50, "@carol:localhost": 25, "@bob:localhost": 0 }, "users_default": 0, "notifications": {"room": 60} }, "event_id": "$15139375512JaHAW:localhost", "origin_server_ts": 1513937551359, "sender": "@example:localhost", "state_key": "", "type": "m.room.power_levels", "unsigned": { "age": 7034220535 } } matrix-nio-0.24.0/tests/data/events/push_rules.json000066400000000000000000000020221455215747700223440ustar00rootroot00000000000000{ "type": "m.push_rules", "content": { "global": { "override": [ { "actions": [ "notify", "dont_notify", "coalesce", {"set_tweak": "sound", "value": "default"}, {"unknown": "abc"} ], "conditions": [ {"kind": "event_match", "key": "foo", "pattern": "bar"}, {"kind": "contains_display_name"}, {"kind": "room_member_count", "is": "10"}, {"kind": "room_member_count", "is": "<10"}, {"kind": "sender_notification_permission", "key": "foo"}, {"kind": "unknown", "foo": "bar"} ], "default": true, "enabled": true, "rule_id": "actions_conditions_test" } ] } } } matrix-nio-0.24.0/tests/data/events/reaction.json000066400000000000000000000006041455215747700217630ustar00rootroot00000000000000{ "type": "m.reaction", "sender": "@example:localhost", "content": { "m.relates_to": { "rel_type": "m.annotation", "event_id": "$B4UkDqYQ7JRC3GEMl7pTyu0x2vLFKgm_wgCR1mEw2CM", "key": "❤️" } }, "origin_server_ts": 1520372800469, "unsigned": {"age": 598971425}, "event_id": "$152037280074GZeOm:localhost" } matrix-nio-0.24.0/tests/data/events/receipt.json000066400000000000000000000003551455215747700216150ustar00rootroot00000000000000{ "content": { "$152037280074GZeOm:localhost": { "m.read": { "@bob:example.com": { "ts": 1520372804619 } } } }, "type":"m.receipt" } matrix-nio-0.24.0/tests/data/events/receipt_invalid.json000066400000000000000000000004541455215747700233230ustar00rootroot00000000000000{ "content": { "$152037280074GZeOm:localhost": { "m.read": { "@bob:example.com": { "ts": 1520372804619 }, "@alice:example.com": "\"ts\": 1520372804619" } } }, "type":"m.receipt" } matrix-nio-0.24.0/tests/data/events/redacted.json000066400000000000000000000012131455215747700217270ustar00rootroot00000000000000{ "content": {}, "event_id": "$15275046980maRLj:localhost", "origin_server_ts": 1527504698685, "sender": "@example:localhost", "type": "m.room.message", "unsigned": { "age": 19334, "redacted_because": { "content": {}, "event_id": "$15275047031IXQRi:localhost", "origin_server_ts": 1527504703496, "redacts": "$15275046980maRLj:localhost", "sender": "@example:localhost", "type": "m.room.redaction", "unsigned": { "age": 14523 } }, "redacted_by": "$15275047031IXQRi:localhost" } } matrix-nio-0.24.0/tests/data/events/redacted_invalid.json000066400000000000000000000002561455215747700234430ustar00rootroot00000000000000{ "content": {}, "event_id": "$15275046980maRLj:localhost", "origin_server_ts": 1527504698685, "sender": "@example:localhost", "type": "m.room.message" } matrix-nio-0.24.0/tests/data/events/redacted_state.json000066400000000000000000000012421455215747700231310ustar00rootroot00000000000000{ "content": {}, "event_id": "$example_id:example.org", "origin_server_ts": 1532324933640, "sender": "@example:example.org", "state_key": "test_state_key", "type": "m.some.state", "unsigned": { "age": 30693154231, "redacted_because": { "content": {}, "event_id": "$redaction_example_id:example.org", "origin_server_ts": 1532324940702, "redacts": "$example_id:example.org", "sender": "@example:example:org", "type": "m.room.redaction", "unsigned": {"age": 30693147169} }, "redacted_by": "$redaction_example_id:example.org" } } matrix-nio-0.24.0/tests/data/events/redaction.json000066400000000000000000000003761455215747700221350ustar00rootroot00000000000000{ "content": { "reason": "😀" }, "event_id": "$151957878228ssqrJ:localhost", "origin_server_ts": 1519578782185, "sender": "@example:localhost", "type": "m.room.redaction", "redacts": "$151957878228ssqrj:localhost" } matrix-nio-0.24.0/tests/data/events/room_avatar.json000066400000000000000000000007451455215747700224770ustar00rootroot00000000000000{ "content": { "info": { "h": 398, "mimetype": "image/jpeg", "size": 31037, "w": 394 }, "url": "mxc://domain.com/JWEIFJgwEIhweiWJE" }, "event_id": "$143273582443PhrSn:domain.com", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:domain.com", "sender": "@example:domain.com", "state_key": "", "type": "m.room.avatar", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/room_encrypted_image.json000066400000000000000000000026171455215747700243600ustar00rootroot00000000000000{ "room_id": "!testroom:localhost", "event_id": "$15163622445EBvZK:localhost", "origin_server_ts": 1516362244030, "sender": "@example2:localhost", "type": "m.room.message", "content": { "body": "orange_cat.jpg", "msgtype": "m.image", "file": { "v": "v2", "key": { "alg": "A256CTR", "ext": true, "k": "yx0QvkgYlasdWEsdalkejaHBzCkKEBAp3tB7dGtWgrs", "key_ops": ["encrypt", "decrypt"], "kty": "oct" }, "iv": "0pglXX7fspIBBBBAEERLFd", "hashes": {"sha256": "eXRDFvh+aXsQRj8a+5ZVVWUQ9Y6u9DYiz4tq1NvbLu8"}, "url": "mxc://localhost/maDtasSiPFjROFMnlwxIhhyW", "mimetype": "image/jpeg" }, "info": { "thumbnail_file": { "v": "v2", "key": { "alg": "A256CTR", "ext": true, "k": "E--asljaerlkjmaKLAJD_WN7t6aSfAgiw3wSl_X2Lv0", "key_ops": ["encrypt", "decrypt"], "kty": "oct" }, "iv": "LKAJSAfuJxkAAAAAAAAAAA", "hashes": {"sha256": "lkajsdl-aldsjfasrSlJA9xnr7P2BjWja3vczIyGeN4"}, "url": "mxc://localhost/askdlrioaKLAJKlkdahsdsoM", "mimetype": "image/jpeg" } } } } matrix-nio-0.24.0/tests/data/events/room_encryption.json000066400000000000000000000006511455215747700234070ustar00rootroot00000000000000{ "content": { "algorithm": "m.megolm.v1.aes-sha2", "rotation_period_ms": 604800000, "rotation_period_msgs": 100 }, "event_id": "$143273582443PhrSn:example.org", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "sender": "@example:example.org", "state_key": "", "type": "m.room.encryption", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/room_key.json000066400000000000000000000005301455215747700220010ustar00rootroot00000000000000{ "content": { "algorithm": "m.megolm.v1.aes-sha2", "room_id": "!Cuyf34gef24t:localhost", "session_id": "X3lUlvLELLYxeTx4yOVu6UDpasGEVO0Jbu+QFnm0cKQ", "session_key": "AgAAAADxKHa9uFxcXzwYoNueL5Xqi69IkD4sni8LlfJL7qNBEY..." }, "sender": "@alice:example.org", "keys": {}, "type": "m.room_key" } matrix-nio-0.24.0/tests/data/events/room_key_request.json000066400000000000000000000007461455215747700235620ustar00rootroot00000000000000{ "sender": "@alice:example.org", "content": { "action": "request", "body": { "algorithm": "m.megolm.v1.aes-sha2", "room_id": "!Cuyf34gef24t:localhost", "sender_key": "RF3s+E7RkTQTGF2d8Deol0FkQvgII2aJDf3/Jp5mxVU", "session_id": "X3lUlvLELLYxeTx4yOVu6UDpasGEVO0Jbu+QFnm0cKQ" }, "request_id": "1495474790150.19", "requesting_device_id": "RJYKSTBOIE" }, "type": "m.room_key_request" } matrix-nio-0.24.0/tests/data/events/room_key_request_cancel.json000066400000000000000000000003421455215747700250570ustar00rootroot00000000000000{ "sender": "@alice:example.org", "content": { "action": "request_cancellation", "request_id": "1495474790150.19", "requesting_device_id": "RJYKSTBOIE" }, "type": "m.room_key_request" } matrix-nio-0.24.0/tests/data/events/sticker.json000066400000000000000000000013441455215747700216250ustar00rootroot00000000000000{ "content": { "body": "Landing", "info": { "h": 200, "mimetype": "image/png", "size": 73602, "thumbnail_info": { "h": 200, "mimetype": "image/png", "size": 73602, "w": 140 }, "thumbnail_url": "mxc://matrix.org/sHhqkFCvSkFwtmvtETOtKnLP", "w": 140 }, "url": "mxc://matrix.org/sHhqkFCvSkFwtmvtETOtKnLP" }, "event_id": "$143273582443PhrSn:example.org", "origin_server_ts": 1432735824653, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "sender": "@example:example.org", "type": "m.sticker", "unsigned": { "age": 1234 } } matrix-nio-0.24.0/tests/data/events/tag.json000066400000000000000000000002171455215747700207320ustar00rootroot00000000000000{ "content": { "tags": { "u.work": { "order": 0.9 } } }, "type": "m.tag" } matrix-nio-0.24.0/tests/data/events/to_flatten.json000066400000000000000000000003551455215747700223210ustar00rootroot00000000000000{ "content": { "body": "foo", "m.dotted": { "key": "bar" } }, "event_id": "!test:example.org", "origin_server_ts": 0, "sender": "@alice:example.org", "type": "m.flatten_test" } matrix-nio-0.24.0/tests/data/events/topic.json000066400000000000000000000006541455215747700213020ustar00rootroot00000000000000{ "content": { "topic": "😀" }, "event_id": "$151957878228ssqrJ:localhost", "origin_server_ts": 1519578782185, "sender": "@example:localhost", "state_key": "", "type": "m.room.topic", "unsigned": { "age": 1392989709, "prev_content": { "topic": "test" }, "prev_sender": "@example:localhost", "replaces_state": "$151957069225EVYKm:localhost" } } matrix-nio-0.24.0/tests/data/events/typing.json000066400000000000000000000003051455215747700214670ustar00rootroot00000000000000{ "content": { "user_ids": [ "@alice:matrix.org", "@bob:example.com" ] }, "room_id": "!jEsUZKDJdhlrceRyVU:example.org", "type": "m.typing" } matrix-nio-0.24.0/tests/data/events/unknown.json000066400000000000000000000004061455215747700216560ustar00rootroot00000000000000{ "content": { "dingle": "dongle" }, "event_id": "$152037280074GZeOm:localhost", "origin_server_ts": 1520372800469, "sender": "@example:localhost", "type": "org.example.unknown", "unsigned": { "age": 598971425 } } matrix-nio-0.24.0/tests/data/events/unknown_to_device.json000066400000000000000000000001731455215747700237000ustar00rootroot00000000000000{ "content": { "dingle": "dongle" }, "type": "org.example.unknown", "sender": "@bob:example.org" } matrix-nio-0.24.0/tests/data/file_response000066400000000000000000000001731455215747700205410ustar00rootroot00000000000000PNG  IHDRvPLTE`qZ!]a pHYsodIDATc```P`0`pN= IENDB`matrix-nio-0.24.0/tests/data/get_avatar_response.json000066400000000000000000000000701455215747700227030ustar00rootroot00000000000000{ "avatar_url": "mxc://matrix.org/SDGdghriugerRg" } matrix-nio-0.24.0/tests/data/get_displayname_response.json000066400000000000000000000000351455215747700237340ustar00rootroot00000000000000{ "displayname": "Bob" } matrix-nio-0.24.0/tests/data/get_hierarchy_response.json000066400000000000000000000013741455215747700234130ustar00rootroot00000000000000{ "next_batch": "next_batch_token", "rooms": [ { "avatar_url": "mxc://example.org/abcdef", "canonical_alias": "#general:example.org", "children_state": [ { "content": { "via": [ "example.org" ] }, "origin_server_ts": 1629413349153, "sender": "@alice:example.org", "state_key": "!a:example.org", "type": "m.space.child" } ], "guest_can_join": false, "join_rule": "public", "name": "The First Space", "num_joined_members": 42, "room_id": "!space:example.org", "room_type": "m.space", "topic": "No other spaces were created first, ever", "world_readable": true } ] } matrix-nio-0.24.0/tests/data/get_profile_response.json000066400000000000000000000001551455215747700230710ustar00rootroot00000000000000{ "displayname": "Bob", "avatar_url": "mxc://matrix.org/SDGdghriugerRg", "something_else": 123 } matrix-nio-0.24.0/tests/data/http_502.txt000066400000000000000000000004051455215747700200650ustar00rootroot00000000000000HTTP/1.1 502 BAD GATEWAY Connection: keep-alive Server: gunicorn/19.8.1 Date: Wed, 23 May 2018 15:54:52 GMT Content-Type: text/html; charset=utf-8 Access-Control-Allow-Origin: * Access-Control-Allow-Credentials: true Content-Length: 0 Via: 1.1 vegur matrix-nio-0.24.0/tests/data/http_503.txt000066400000000000000000000004151455215747700200670ustar00rootroot00000000000000HTTP/1.1 503 SERVICE UNAVAILABLE Connection: keep-alive Server: gunicorn/19.8.1 Date: Wed, 23 May 2018 14:51:27 GMT Content-Type: text/html; charset=utf-8 Access-Control-Allow-Origin: * Access-Control-Allow-Credentials: true Content-Length: 0 Via: 1.1 vegur matrix-nio-0.24.0/tests/data/joined_members_response.json000066400000000000000000000001701455215747700235510ustar00rootroot00000000000000{ "joined": { "@bar:example.com": { "avatar_url": null, "display_name": "Bar" } } } matrix-nio-0.24.0/tests/data/keys_claim.json000066400000000000000000000006641455215747700210010ustar00rootroot00000000000000{ "failures": {}, "one_time_keys": { "@alice:example.com": { "JLAFKJWSCS": { "signed_curve25519:AAAAHg": { "key": "zKbLg+NrIjpnagy+pIY6uPL4ZwEG2v+8F9lmgsnlZzs", "signatures": { "@alice:example.com": { "ed25519:JLAFKJWSCS": "FLWxXqGbwrb8SM3Y795eB6OA8bwBcoMZFXBqnTn58AYWZSqiD45tlBVcDa2L7RwdKXebW/VzDlnfVJ+9jok1Bw" } } } } } } } matrix-nio-0.24.0/tests/data/keys_query.json000066400000000000000000000031161455215747700210540ustar00rootroot00000000000000{ "device_keys": { "@alice:example.org": { "JLAFKJWSCS": { "algorithms": [ "m.olm.v1.curve25519-aes-sha2", "m.megolm.v1.aes-sha2" ], "device_id": "JLAFKJWSCS", "user_id": "@alice:example.org", "keys": { "curve25519:JLAFKJWSCS": "wjLpTLRqbqBzLs63aYaEv2Boi6cFEbbM/sSRQ2oAKk4", "ed25519:JLAFKJWSCS": "nE6W2fCblxDcOFmeEtCHNl8/l8bXcu7GKyAswA4r3mM" }, "signatures": { "@alice:example.org": { "ed25519:JLAFKJWSCS": "m53Wkbh2HXkc3vFApZvCrfXcX3AI51GsDHustMhKwlv3TuOJMj4wistcOTM8q2+e/Ro7rWFUb9ZfnNbwptSUBA" } }, "unsigned": { "device_display_name": "Alice's mobile phone" } } }, "@carol:example.org": { "AZERTYUIOP": { "algorithms": [ "m.olm.v1.curve25519-aes-sha2", "m.megolm.v1.aes-sha2" ], "device_id": "AZERTYUIOP", "user_id": "@carol:example.org", "keys": { "curve25519:AZERTYUIOP": "zjLpTLRqbqBzLs63aYaEv2Boi6cFEbbM/sSRQ2oAKk4", "ed25519:AZERTYUIOP": "zE6W2fCblxDcOFmeEtCHNl8/l8bXcu7GKyAswA4r3mM" }, "signatures": { "@carol:example.org": { "ed25519:AZERTYUIOP": "z53Wkbh2HXkc3vFApZvCrfXcX3AI51GsDHustMhKwlv3TuOJMj4wistcOTM8q2+e/Ro7rWFUb9ZfnNbwptSUBA" } }, "unsigned": { "device_display_name": "Carol's typewriter" } } } }, "failures": {} } matrix-nio-0.24.0/tests/data/keys_upload.json000066400000000000000000000001251455215747700211700ustar00rootroot00000000000000{ "one_time_key_counts": { "curve25519": 10, "signed_curve25519": 20 } } matrix-nio-0.24.0/tests/data/limit_exceeded_error.json000066400000000000000000000001351455215747700230270ustar00rootroot00000000000000{ "errcode": "M_LIMIT_EXCEEDED", "error": "Too many requests", "retry_after_ms": 500 } matrix-nio-0.24.0/tests/data/login_info.json000066400000000000000000000001671455215747700210020ustar00rootroot00000000000000{ "flows": [ { "type": "m.login.sso", "type": "m.login.password" } ] } matrix-nio-0.24.0/tests/data/login_invalid_format.json000066400000000000000000000001771455215747700230460ustar00rootroot00000000000000{ "access_token": "abc123", "device_id": "GHTYAJCE", "home_server": "matrix.org", "user_id": "cheeky_monkey" } matrix-nio-0.24.0/tests/data/login_response.json000066400000000000000000000002131455215747700216750ustar00rootroot00000000000000{ "access_token": "abc123", "device_id": "GHTYAJCE", "home_server": "matrix.org", "user_id": "@cheeky_monkey:matrix.org" } matrix-nio-0.24.0/tests/data/login_response_error.json000066400000000000000000000000761455215747700231150ustar00rootroot00000000000000{ "errcode": "M_FORBIDDEN", "error": "Invalid password" } matrix-nio-0.24.0/tests/data/logout_response.json000066400000000000000000000000031455215747700220730ustar00rootroot00000000000000{} matrix-nio-0.24.0/tests/data/register_interactive_response.json000066400000000000000000000003611455215747700250120ustar00rootroot00000000000000{ "flows": [{ "stages": [ "m.login.registration_token", "m.login.dummy" ] } ], "session": "abc123", "params": {}, "completed": [ "m.login.registration_token" ] } matrix-nio-0.24.0/tests/data/register_response.json000066400000000000000000000001541455215747700224150ustar00rootroot00000000000000{ "access_token": "abc123", "device_id": "GHTYAJCE", "user_id": "@cheeky_monkey:matrix.org" } matrix-nio-0.24.0/tests/data/room_id.json000066400000000000000000000000531455215747700203010ustar00rootroot00000000000000{ "room_id": "!testroom:example.org" } matrix-nio-0.24.0/tests/data/room_messages.json000066400000000000000000000023551455215747700215230ustar00rootroot00000000000000{ "chunk": [ { "age": 1042, "content": { "body": "hello world", "msgtype": "m.text" }, "event_id": "$1444812213350496Caaaa:example.com", "origin_server_ts": 1444812213737, "room_id": "!Xq3620DUiqCaoxq:example.com", "sender": "@alice:example.com", "type": "m.room.message" }, { "age": 20123, "content": { "body": "the world is big", "msgtype": "m.text" }, "event_id": "$1444812213350496Cbbbb:example.com", "origin_server_ts": 1444812194656, "room_id": "!Xq3620DUiqCaoxq:example.com", "sender": "@bob:example.com", "type": "m.room.message" }, { "age": 50789, "content": { "name": "New room name" }, "event_id": "$1444812213350496Ccccc:example.com", "origin_server_ts": 1444812163990, "prev_content": { "name": "Old room name" }, "room_id": "!Xq3620DUiqCaoxq:example.com", "sender": "@bob:example.com", "state_key": "", "type": "m.room.name" } ], "end": "t47409-4357353_219380_26003_2265", "start": "t47429-4392820_219380_26003_2265" } matrix-nio-0.24.0/tests/data/room_state.json000066400000000000000000000044171455215747700210350ustar00rootroot00000000000000[ { "content": { "join_rule": "public" }, "type": "m.room.join_rules", "event_id": "$143273582443PhrSn:example.org", "room_id": "!636q39766251:example.com", "sender": "@example:example.org", "origin_server_ts": 1432735824653, "unsigned": { "age": 1234 }, "state_key": "" }, { "content": { "membership": "join", "avatar_url": "mxc://example.org/SEsfnsuifSDFSSEF", "displayname": "Alice Margatroid" }, "type": "m.room.member", "event_id": "$143273582443PhrSn:example.org", "room_id": "!636q39766251:example.com", "sender": "@example:example.org", "origin_server_ts": 1432735824653, "unsigned": { "age": 1234 }, "state_key": "@alice:example.org" }, { "content": { "creator": "@example:example.org", "room_version": "1", "m.federate": true, "predecessor": { "event_id": "$something:example.org", "room_id": "!oldroom:example.org" } }, "type": "m.room.create", "event_id": "$143273582443PhrSn:example.org", "room_id": "!636q39766251:example.com", "sender": "@example:example.org", "origin_server_ts": 1432735824653, "unsigned": { "age": 1234 }, "state_key": "" }, { "content": { "ban": 50, "events": { "m.room.name": 100, "m.room.power_levels": 100 }, "events_default": 0, "invite": 50, "kick": 50, "redact": 50, "state_default": 50, "users": { "@example:localhost": 100 }, "users_default": 0, "notifications": { "room": 20 } }, "type": "m.room.power_levels", "event_id": "$143273582443PhrSn:example.org", "room_id": "!636q39766251:example.com", "sender": "@example:example.org", "origin_server_ts": 1432735824653, "unsigned": { "age": 1234 }, "state_key": "" } ] matrix-nio-0.24.0/tests/data/sample_text_file.py000066400000000000000000000001211455215747700216500ustar00rootroot00000000000000def main(): print("Some code you might want to send using Matrix!") main() matrix-nio-0.24.0/tests/data/sync.json000066400000000000000000000404161455215747700176340ustar00rootroot00000000000000{ "device_one_time_keys_count": {}, "next_batch": "s526_47314_0_7_1_1_1_11444_1", "device_lists": { "changed": [ "@example:example.org" ], "left": [] }, "rooms": { "invite": {}, "join": { "!SVkFJHzfwvuaIEawgC:localhost": { "account_data": { "events": [ { "type": "m.fully_read", "content": { "event_id": "event_id_2" } }, { "type": "m.tag", "content": { "tags": { "u.test": { "order": 1 } } } } ] }, "ephemeral": { "events": [ { "content": { "$151680659217152dPKjd:localhost": { "m.read": { "@example:localhost": { "ts": 1516809890615 } } } }, "type": "m.receipt" } ] }, "state": { "events": [ { "content": { "join_rule": "public" }, "event_id": "$15139375514WsgmR:localhost", "origin_server_ts": 1513937551539, "sender": "@example:localhost", "state_key": "", "type": "m.room.join_rules", "unsigned": { "age": 7034220355 } }, { "content": { "avatar_url": null, "displayname": "example", "membership": "join" }, "event_id": "$151800140517rfvjc:localhost", "membership": "join", "origin_server_ts": 1518001405556, "sender": "@example:localhost", "state_key": "@example:localhost", "type": "m.room.member", "unsigned": { "age": 2970366338, "replaces_state": "$151800111315tsynI:localhost" } }, { "content": { "history_visibility": "shared" }, "event_id": "$15139375515VaJEY:localhost", "origin_server_ts": 1513937551613, "sender": "@example:localhost", "state_key": "", "type": "m.room.history_visibility", "unsigned": { "age": 7034220281 } }, { "content": { "creator": "@example:localhost" }, "event_id": "$15139375510KUZHi:localhost", "origin_server_ts": 1513937551203, "sender": "@example:localhost", "state_key": "", "type": "m.room.create", "unsigned": { "age": 7034220691 } }, { "content": { "aliases": [ "#tutorial:localhost" ] }, "event_id": "$15139375516NUgtD:localhost", "origin_server_ts": 1513937551720, "sender": "@example:localhost", "state_key": "localhost", "type": "m.room.aliases", "unsigned": { "age": 7034220174 } }, { "content": { "topic": "\ud83d\ude00" }, "event_id": "$151957878228ssqrJ:localhost", "origin_server_ts": 1519578782185, "sender": "@example:localhost", "state_key": "", "type": "m.room.topic", "unsigned": { "age": 1392989709, "prev_content": { "topic": "test" }, "prev_sender": "@example:localhost", "replaces_state": "$151957069225EVYKm:localhost" } }, { "content": { "ban": 50, "events": { "m.room.avatar": 50, "m.room.canonical_alias": 50, "m.room.history_visibility": 100, "m.room.name": 50, "m.room.power_levels": 100 }, "events_default": 0, "invite": 0, "kick": 50, "redact": 50, "state_default": 50, "users": { "@example:localhost": 100 }, "users_default": 0 }, "event_id": "$15139375512JaHAW:localhost", "origin_server_ts": 1513937551359, "sender": "@example:localhost", "state_key": "", "type": "m.room.power_levels", "unsigned": { "age": 7034220535 } }, { "content": { "alias": "#tutorial:localhost" }, "event_id": "$15139375513VdeRF:localhost", "origin_server_ts": 1513937551461, "sender": "@example:localhost", "state_key": "", "type": "m.room.canonical_alias", "unsigned": { "age": 7034220433 } }, { "content": { "avatar_url": null, "displayname": "example2", "membership": "join" }, "event_id": "$152034824468gOeNB:localhost", "membership": "join", "origin_server_ts": 1520348244605, "sender": "@example2:localhost", "state_key": "@example2:localhost", "type": "m.room.member", "unsigned": { "age": 623527289, "prev_content": { "membership": "leave" }, "prev_sender": "@example:localhost", "replaces_state": "$152034819067QWJxM:localhost" } } ] }, "timeline": { "events": [ { "content": { "body": "baba", "format": "org.matrix.custom.html", "formatted_body": "baba", "msgtype": "m.text" }, "event_id": "$152037280074GZeOm:localhost", "origin_server_ts": 1520372800469, "sender": "@example:localhost", "type": "m.room.message", "unsigned": { "age": 598971425 } } ], "limited": true, "prev_batch": "t392-516_47314_0_7_1_1_1_11444_1" }, "unread_notifications": { "highlight_count": 1, "notification_count": 11 } } }, "leave": {} }, "to_device": { "events": [] }, "presence": { "events": [ { "type": "m.presence", "sender": "@example:localhost", "content": { "presence": "online", "last_active_ago": 1337, "currently_active": true, "status_msg": "I am here." } }, { "type": "m.presence", "sender": "@example2:localhost", "content": { "presence": "offline", "last_active_ago": 1337, "currently_active": false, "status_msg": "I am gone." } } ] }, "account_data": { "events": [ { "type": "m.push_rules", "content": { "global": { "override": [ { "actions": [ "dont_notify" ], "conditions": [ { "key": "content.msgtype", "kind": "event_match", "pattern": "m.notice" } ], "default": true, "enabled": false, "rule_id": ".m.rule.suppress_notices" } ], "content": [ { "actions": [ "notify", "do_special_thing", { "set_tweak": "sound", "value": "default" }, { "set_tweak": "highlight" } ], "default": true, "enabled": true, "pattern": "alice", "rule_id": ".m.rule.contains_user_name" } ], "room": [], "sender": [], "underride": [ { "actions": [ "coalesce", { "set_tweak": "sound", "value": "ring" }, { "set_tweak": "highlight", "value": false } ], "conditions": [ { "kind": "special_kind" }, { "key": "type", "kind": "event_match", "pattern": "m.call.invite" } ], "default": true, "enabled": true, "rule_id": ".m.rule.special_call" }, { "actions": [ "notify" ], "conditions": [ { "key": "room", "kind": "sender_notification_permission" }, { "is": "<10", "kind": "room_member_count" }, { "key": "type", "kind": "event_match", "pattern": "m.room.message" } ], "default": true, "enabled": true, "rule_id": ".m.rule.room_less_than_10_room_perm" }, { "actions": [ "notify", { "set_tweak": "sound", "value": "default" }, { "set_tweak": "highlight", "value": false } ], "conditions": [ { "is": "2", "kind": "room_member_count" }, { "key": "type", "kind": "event_match", "pattern": "m.room.message" } ], "default": true, "enabled": true, "rule_id": ".m.rule.room_one_to_one" } ] } } } ] } } matrix-nio-0.24.0/tests/data/test_bytes000066400000000000000000000000121455215747700200610ustar00rootroot00000000000000Test bytesmatrix-nio-0.24.0/tests/data/upload_response.json000066400000000000000000000001021455215747700220460ustar00rootroot00000000000000{ "content_uri": "mxc://example.com/AQwafuaFswefuhsfAFAgsw" } matrix-nio-0.24.0/tests/data/whoami_response.json000066400000000000000000000001431455215747700220530ustar00rootroot00000000000000{ "user_id": "@cheeky_monkey:matrix.org", "is_guest": false, "device_id": "GHTYAJCE" } matrix-nio-0.24.0/tests/encryption_test.py000066400000000000000000001561361455215747700206660ustar00rootroot00000000000000import copy import json import os import time from datetime import datetime, timedelta import pytest from helpers import faker from olm import Account, OutboundGroupSession from nio.crypto import ( DeviceStore, GroupSessionStore, InboundGroupSession, Olm, OlmDevice, OutboundSession, OutgoingKeyRequest, Session, SessionStore, ) from nio.events import ( DummyEvent, ForwardedRoomKeyEvent, MegolmEvent, OlmEvent, RoomKeyEvent, RoomKeyRequest, RoomKeyRequestCancellation, RoomMessageText, ToDeviceEvent, UnknownBadEvent, ) from nio.exceptions import EncryptionError, GroupEncryptionError, OlmTrustError from nio.responses import KeysClaimResponse, KeysQueryResponse, KeysUploadResponse from nio.store import DefaultStore, Ed25519Key, Key, KeyStore AliceId = "@alice:example.org" Alice_device = "ALDEVICE" BobId = "@bob:example.org" Bob_device = "BOBDEVICE" MaloryId = "@malory:example.org" Malory_device = "MALORYDEVICE" PICKLE_KEY = "DEFAULT_KEY" TEST_ROOM = "!test_room" ephemeral_dir = os.path.join(os.curdir, "tests/data/encryption") def ephemeral(func): def wrapper(*args, **kwargs): try: ret = func(*args, **kwargs) finally: os.remove(os.path.join(ephemeral_dir, "@ephemeral:localhost_DEVICEID.db")) return ret return wrapper @pytest.fixture def olm_account(tempdir): return Olm( faker.mx_id(), faker.device_id(), DefaultStore("ephemeral", "DEVICEID", tempdir) ) @pytest.fixture def bob_account(tempdir): return Olm( faker.mx_id(), faker.device_id(), DefaultStore("ephemeral", "DEVICEID", tempdir) ) @pytest.fixture def alice_account_pair(tempdir): first_device_id = faker.device_id() second_device_id = faker.device_id() first = Olm( AliceId, faker.device_id(), DefaultStore(AliceId, first_device_id, tempdir) ) second = Olm( AliceId, faker.device_id(), DefaultStore(AliceId, second_device_id, tempdir) ) first_device = OlmDevice( first.user_id, first.device_id, first.account.identity_keys ) second_device = OlmDevice( second.user_id, second.device_id, second.account.identity_keys ) first.device_store.add(second_device) second.device_store.add(first_device) first.verify_device(second_device) second.verify_device(first_device) return (first, second) class TestClass: @staticmethod def _load_response(filename): with open(filename) as f: return json.loads(f.read()) def _get_store(self, user_id, device_id, pickle_key=""): return DefaultStore(user_id, device_id, ephemeral_dir, pickle_key) @staticmethod def olm_message_to_event(message_dict, recipient, sender): olm_content = message_dict["messages"][recipient.user_id][recipient.device_id] return { "sender": sender.user_id, "type": "m.room.encrypted", "content": olm_content, } @property def ephemeral_olm(self): user_id = "@ephemeral:localhost" device_id = "DEVICEID" return Olm(user_id, device_id, self._get_store(user_id, device_id)) @ephemeral def test_new_account_creation(self): olm = self.ephemeral_olm assert isinstance(olm.account, Account) def _load(self, user_id, device_id, pickle_key=""): return Olm(user_id, device_id, self._get_store(user_id, device_id, pickle_key)) def test_account_loading(self): olm = self._load("example", "DEVICEID", PICKLE_KEY) assert isinstance(olm.account, Account) assert ( olm.account.identity_keys["curve25519"] == "Xjuu9d2KjHLGIHpCOCHS7hONQahapiwI1MhVmlPlCFM" ) assert ( olm.account.identity_keys["ed25519"] == "FEfrmWlasr4tcMtbNX/BU5lbdjmpt3ptg8ApTD8YAh4" ) def test_fingerprint_store(self, monkeypatch): def mocksave(self): return monkeypatch.setattr(KeyStore, "_save", mocksave) store = KeyStore(os.path.join(ephemeral_dir, "ephemeral_devices")) account = Account() device = OlmDevice("example", "DEVICEID", account.identity_keys) key = Key.from_olmdevice(device) assert key not in store assert store.add(key) assert key in store assert store.remove(key) assert store.check(key) is False def test_fingerprint_store_loading(self): store = KeyStore(os.path.join(ephemeral_dir, "known_devices")) key = Ed25519Key( "example", "DEVICEID", "2MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA" ) assert key in store def test_invalid_store_entry_equality(self): entry = Ed25519Key( "example", "DEVICEID", "2MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA" ) assert entry != 1 def test_differing_store_entries(self): alice = Ed25519Key( "alice", "DEVICEID", "2MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA" ) bob = Ed25519Key( "bob", "DEVICEDI", "3MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA" ) assert alice != bob def _create_session(self): alice = Account() bob = Account() bob.generate_one_time_keys(1) one_time = list(bob.one_time_keys["curve25519"].values())[0] id_key = bob.identity_keys["curve25519"] s = OutboundSession(alice, id_key, one_time) return alice, bob, s def test_session_store(self): alice, bob, s = self._create_session() store = SessionStore() store.add(bob.identity_keys["curve25519"], s) assert s in store def test_session_store_sort(self): alice, bob, s = self._create_session() bob.generate_one_time_keys(1) one_time = list(bob.one_time_keys["curve25519"].values())[0] curve_key = bob.identity_keys["curve25519"] s2 = OutboundSession(alice, curve_key, one_time) store = SessionStore() store.add(curve_key, s) store.add(curve_key, s2) if s.use_time > s2.use_time: assert s == store.get(curve_key) else: assert s2 == store.get(curve_key) def test_device_store(self): alice = OlmDevice( "example", "DEVICEID", { "ed25519": "2MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA", "curve25519": "3MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA", }, ) store = DeviceStore() assert store.add(alice) assert store.add(alice) is False assert alice in store @ephemeral def test_olm_outbound_session_create(self): bob = Account() bob.generate_one_time_keys(1) one_time = list(bob.one_time_keys["curve25519"].values())[0] bob_device = OlmDevice(BobId, Bob_device, bob.identity_keys) olm = self.ephemeral_olm olm.device_store[bob_device.user_id][bob_device.id] = bob_device olm.create_session(one_time, bob_device.curve25519) assert isinstance( olm.session_store.get(bob.identity_keys["curve25519"]), OutboundSession ) def test_olm_session_load(self): olm = self._load("example", "DEVICEID", PICKLE_KEY) bob_session = olm.session_store.get( "+Qs131S/odNdWG6VJ8hiy9YZW0us24wnsDjYQbaxLk4" ) assert bob_session assert bob_session.id == "EeEiqT9LjCtECaN7WTqcBQ7D5Dwm4+/L9Uxr1IyPAts" @ephemeral def test_olm_group_session_store(self): olm = self.ephemeral_olm bob_account = Account() outbound_session = OutboundGroupSession() olm.create_group_session( bob_account.identity_keys["curve25519"], bob_account.identity_keys["ed25519"], "!test_room", outbound_session.id, outbound_session.session_key, ) del olm olm = self.ephemeral_olm bob_session = olm.inbound_group_store.get( "!test_room", bob_account.identity_keys["curve25519"], outbound_session.id ) assert bob_session assert bob_session.id == outbound_session.id @ephemeral def test_keys_query(self): olm = self.ephemeral_olm parsed_dict = TestClass._load_response("tests/data/keys_query.json") response = KeysQueryResponse.from_dict(parsed_dict) assert isinstance(response, KeysQueryResponse) olm.handle_response(response) device = olm.device_store["@alice:example.org"]["JLAFKJWSCS"] assert device.ed25519 == "nE6W2fCblxDcOFmeEtCHNl8/l8bXcu7GKyAswA4r3mM" del olm olm = self.ephemeral_olm device = olm.device_store["@alice:example.org"]["JLAFKJWSCS"] assert device.ed25519 == "nE6W2fCblxDcOFmeEtCHNl8/l8bXcu7GKyAswA4r3mM" @ephemeral def test_same_query_response_twice(self): olm = self.ephemeral_olm parsed_dict = TestClass._load_response("tests/data/keys_query.json") response = KeysQueryResponse.from_dict(parsed_dict) olm.handle_response(response) assert response.changed # TODO check out why this fails under python2 if we remove the copy() # call. response2 = copy.copy(response) olm.handle_response(response) assert response2.changed def test_olm_inbound_session(self, monkeypatch): def mocksave(self): return monkeypatch.setattr(KeyStore, "_save", mocksave) # create three new accounts alice = self._load(AliceId, Alice_device) bob = self._load(BobId, Bob_device) malory = self._load(BobId, Bob_device) # create olm devices for each others known devices list alice_device = OlmDevice(AliceId, Alice_device, alice.account.identity_keys) bob_device = OlmDevice(BobId, Bob_device, bob.account.identity_keys) malory_device = OlmDevice(MaloryId, Malory_device, malory.account.identity_keys) # add the devices to the device list alice.device_store.add(bob_device) alice.device_store.add(malory_device) bob.device_store.add(alice_device) # bob creates one time keys bob.account.generate_one_time_keys(1) one_time = list(bob.account.one_time_keys["curve25519"].values())[0] # Mark the keys as published bob.account.mark_keys_as_published() # alice creates an outbound olm session with bob alice.create_session(one_time, bob_device.curve25519) # alice creates an group session alice.create_outbound_group_session("!test:example.org") group_session = alice.outbound_group_sessions["!test:example.org"] # alice shares the group session with bob, but bob isn't verified with pytest.raises(OlmTrustError): sharing_with, to_device = alice.share_group_session( "!test:example.org", [BobId] ) alice.verify_device(bob_device) # alice shares the group session with bob and malory, but malory isn't # blocked with pytest.raises(OlmTrustError): sharing_with, to_device = alice.share_group_session( "!test:example.org", [BobId, MaloryId] ) alice.blacklist_device(malory_device) sharing_with, to_device = alice.share_group_session( "!test:example.org", [BobId, MaloryId] ) # check that we aren't sharing the group session with malory with pytest.raises(KeyError): to_device["messages"][MaloryId][malory_device.id]["ciphertext"] ciphertext = to_device["messages"][BobId][bob_device.id]["ciphertext"] olm_event_dict = { "sender": AliceId, "type": "m.room.encrypted", "content": { "algorithm": Olm._olm_algorithm, "sender_key": alice_device.curve25519, "ciphertext": ciphertext, }, } olm_event = OlmEvent.from_dict(olm_event_dict) assert isinstance(olm_event, OlmEvent) # bob decrypts the message and creates a new inbound session with alice try: # pdb.set_trace() bob.decrypt_event(olm_event) # we check that the session is there assert bob.session_store.get(alice_device.curve25519) # we check that the group session is there assert bob.inbound_group_store.get( "!test:example.org", alice_device.curve25519, group_session.id, ) # Test another round of sharing, this time with an existing session alice.create_outbound_group_session(TEST_ROOM) group_session = alice.outbound_group_sessions[TEST_ROOM] sharing_with, to_device = alice.share_group_session( TEST_ROOM, [BobId, MaloryId] ) ciphertext = to_device["messages"][BobId][bob_device.id]["ciphertext"] olm_event_dict = { "sender": AliceId, "type": "m.room.encrypted", "content": { "algorithm": Olm._olm_algorithm, "sender_key": alice_device.curve25519, "ciphertext": ciphertext, }, } olm_event = OlmEvent.from_dict(olm_event_dict) assert isinstance(olm_event, OlmEvent) event = bob.decrypt_event(olm_event) assert event assert bob.inbound_group_store.get( TEST_ROOM, alice_device.curve25519, group_session.id, ) finally: # remove the databases, the known devices store is handled by # monkeypatching os.remove(os.path.join(ephemeral_dir, f"{AliceId}_{Alice_device}.db")) os.remove(os.path.join(ephemeral_dir, f"{BobId}_{Bob_device}.db")) def test_group_session_sharing(self, monkeypatch): def mocksave(self): return monkeypatch.setattr(KeyStore, "_save", mocksave) # create three new accounts alice = self._load(AliceId, Alice_device) bob = self._load(BobId, Bob_device) malory = self._load(BobId, Bob_device) # create olm devices for each others known devices list alice_device = OlmDevice(AliceId, Alice_device, alice.account.identity_keys) bob_device = OlmDevice(BobId, Bob_device, bob.account.identity_keys) malory_device = OlmDevice(MaloryId, Malory_device, malory.account.identity_keys) # add the devices to the device list alice.device_store.add(bob_device) alice.device_store.add(malory_device) bob.device_store.add(alice_device) # bob creates one time keys bob.account.generate_one_time_keys(1) one_time = list(bob.account.one_time_keys["curve25519"].values())[0] # Mark the keys as published bob.account.mark_keys_as_published() # alice creates an outbound olm session with bob alice.create_session(one_time, bob_device.curve25519) alice.verify_device(bob_device) alice.verify_device(malory_device) alice._maxToDeviceMessagesPerRequest = 1 sharing_with, to_device = alice.share_group_session( "!test:example.org", [BobId, MaloryId] ) group_session = alice.outbound_group_sessions["!test:example.org"] assert group_session assert len(sharing_with) == 1 assert not group_session.users_shared_with group_session.users_shared_with.update(sharing_with) sharing_with, to_device = alice.share_group_session( "!test:example.org", [BobId, MaloryId] ) assert len(sharing_with) == 1 os.remove(os.path.join(ephemeral_dir, f"{AliceId}_{Alice_device}.db")) os.remove(os.path.join(ephemeral_dir, f"{BobId}_{Bob_device}.db")) @ephemeral def test_room_key_event(self): olm = self.ephemeral_olm session = OutboundGroupSession() payload = { "sender": BobId, "sender_device": Bob_device, "type": "m.room_key", "content": { "algorithm": "m.megolm.v1.aes-sha2", "room_id": TEST_ROOM, "session_id": session.id, "session_key": session.session_key, }, "keys": {}, } bad_event = olm._handle_room_key_event( BobId, "Xjuu9d2KjHLGIHpCOCHS7hONQahapiwI1MhVmlPlCFM", {} ) assert isinstance(bad_event, UnknownBadEvent) event = olm._handle_room_key_event( BobId, "Xjuu9d2KjHLGIHpCOCHS7hONQahapiwI1MhVmlPlCFM", payload ) assert not event payload["keys"] = {"ed25519": "FEfrmWlasr4tcMtbNX/BU5lbdjmpt3ptg8ApTD8YAh4"} event = olm._handle_room_key_event( BobId, "Xjuu9d2KjHLGIHpCOCHS7hONQahapiwI1MhVmlPlCFM", payload ) assert isinstance(event, RoomKeyEvent) def test_forwarded_room_key_event(self, alice_account_pair): olm, bob = alice_account_pair device = olm.device_store[bob.user_id][bob.device_id] session = OutboundGroupSession() session = InboundGroupSession( session.session_key, device.ed25519, device.curve25519, TEST_ROOM, ) payload = { "sender": device.user_id, "sender_device": device.device_id, "type": "m.forwarded_room_key", "content": { "algorithm": "m.megolm.v1.aes-sha2", "room_id": session.room_id, "session_id": session.id, "session_key": session.export_session(session.first_known_index), "sender_key": session.sender_key, "sender_claimed_ed25519_key": session.ed25519, "forwarding_curve25519_key_chain": session.forwarding_chain, }, "keys": {"ed25519": session.ed25519}, } bad_event = olm._handle_room_key_event(device.user_id, device.curve25519, {}) assert isinstance(bad_event, UnknownBadEvent) event = olm._handle_forwarded_room_key_event( device.user_id, device.curve25519, payload ) assert not event key_request = OutgoingKeyRequest( session.id, session.id, session.room_id, "m.megolm.v1.aes-sha2", ) olm.outgoing_key_requests[session.id] = key_request event = olm._handle_olm_event(device.user_id, device.curve25519, payload) assert isinstance(event, ForwardedRoomKeyEvent) def test_user_verification_status(self, monkeypatch): def mocksave(self): return monkeypatch.setattr(KeyStore, "_save", mocksave) # create three new accounts alice = self._load(AliceId, Alice_device) bob = self._load(BobId, Bob_device) # create olm devices for each others known devices list bob_device = OlmDevice(BobId, Bob_device, bob.account.identity_keys) bob2_device = OlmDevice(BobId, Malory_device, bob.account.identity_keys) alice.device_store.add(bob_device) assert not alice.user_fully_verified(BobId) alice.verify_device(bob_device) assert alice.user_fully_verified(BobId) alice.device_store.add(bob2_device) assert not alice.user_fully_verified(BobId) alice.verify_device(bob2_device) assert alice.user_fully_verified(BobId) os.remove(os.path.join(ephemeral_dir, f"{AliceId}_{Alice_device}.db")) os.remove(os.path.join(ephemeral_dir, f"{BobId}_{Bob_device}.db")) @ephemeral def test_group_decryption(self): olm = self.ephemeral_olm olm.create_outbound_group_session(TEST_ROOM) message = { "type": "m.room.message", "content": { "msgtype": "m.text", "body": "hello wordl", }, } with pytest.raises(GroupEncryptionError): encrypted_dict = olm.group_encrypt(TEST_ROOM, message) session = olm.outbound_group_sessions[TEST_ROOM] session.shared = True encrypted_dict = olm.group_encrypt(TEST_ROOM, message) megolm = {"type": "m.room.encrypted", "content": encrypted_dict} megolm_event = MegolmEvent.from_dict(megolm) assert isinstance(megolm_event, UnknownBadEvent) megolm["event_id"] = "1" megolm["sender"] = "@ephemeral:example.org" megolm["origin_server_ts"] = 0 megolm_event = MegolmEvent.from_dict(megolm) assert isinstance(megolm_event, MegolmEvent) with pytest.raises(EncryptionError): event = olm.decrypt_megolm_event(megolm_event) session_store = olm.inbound_group_store olm.inbound_group_store = GroupSessionStore() with pytest.raises(EncryptionError): event = olm.decrypt_megolm_event(megolm_event) olm.inbound_group_store = session_store megolm_event.room_id = TEST_ROOM event = olm.decrypt_event(megolm_event) assert isinstance(event, RoomMessageText) assert event.decrypted @ephemeral def test_key_sharing(self): olm = self.ephemeral_olm assert olm.should_upload_keys to_share = olm.share_keys() assert "device_keys" in to_share assert "one_time_keys" in to_share assert len(to_share["one_time_keys"]) == olm.account.max_one_time_keys // 2 response = KeysUploadResponse.from_dict( { "one_time_key_counts": { "curve25519": 0, "signed_curve25519": olm.account.max_one_time_keys // 2, } } ) olm.handle_response(response) assert not olm.should_upload_keys with pytest.raises( ValueError, match="Can't share any keys, too many keys already shared" ): olm.share_keys() olm.uploaded_key_count -= 1 assert olm.should_upload_keys to_share = olm.share_keys() assert "device_keys" not in to_share assert "one_time_keys" in to_share assert len(to_share["one_time_keys"]) == 1 def test_outbound_session_creation(self, monkeypatch): def mocksave(self): return monkeypatch.setattr(KeyStore, "_save", mocksave) alice = self._load(AliceId, Alice_device) bob = self._load(BobId, Bob_device) bob_device = OlmDevice(BobId, Bob_device, bob.account.identity_keys) assert not alice.get_missing_sessions([BobId]) alice.device_store.add(bob_device) missing = alice.get_missing_sessions([BobId]) assert not alice.session_store.get(bob_device.curve25519) assert BobId in missing assert Bob_device in missing[BobId] to_share = bob.share_keys() one_time_key = list(to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { BobId: { Bob_device: {one_time_key[0]: one_time_key[1]}, }, }, "failures": {}, } response = KeysClaimResponse.from_dict(key_claim_dict, TEST_ROOM) assert isinstance(response, KeysClaimResponse) print(response) alice.handle_response(response) assert not alice.get_missing_sessions([BobId]) assert alice.session_store.get(bob_device.curve25519) os.remove(os.path.join(ephemeral_dir, f"{AliceId}_{Alice_device}.db")) os.remove(os.path.join(ephemeral_dir, f"{BobId}_{Bob_device}.db")) def test_group_session_sharing_new(self, olm_account, bob_account): alice = olm_account bob = bob_account alice_device = OlmDevice( alice.user_id, alice.device_id, alice.account.identity_keys ) bob_device = OlmDevice(bob.user_id, bob.device_id, bob.account.identity_keys) alice.device_store.add(bob_device) bob.device_store.add(alice_device) bob.account.generate_one_time_keys(1) one_time = list(bob.account.one_time_keys["curve25519"].values())[0] bob.account.mark_keys_as_published() alice.create_session(one_time, bob_device.curve25519) sharing_with, to_device = alice.share_group_session( "!test:example.org", [bob.user_id], ignore_unverified_devices=True ) assert len(sharing_with) == 1 assert alice.outbound_group_sessions["!test:example.org"] assert alice.is_device_ignored(bob_device) def test_session_unwedging(self, olm_account, bob_account): alice = olm_account bob = bob_account alice_device = OlmDevice( alice.user_id, alice.device_id, alice.account.identity_keys ) bob_device = OlmDevice(bob.user_id, bob.device_id, bob.account.identity_keys) alice.device_store.add(bob_device) bob.device_store.add(alice_device) bob.account.generate_one_time_keys(1) one_time = list(bob.account.one_time_keys["curve25519"].values())[0] bob.account.mark_keys_as_published() alice.create_session(one_time, bob_device.curve25519) # Let us pickle our session with bob here so we can later unpickle it # and wedge our session. alice_pickle = alice.session_store[bob_device.curve25519][0].pickle("") # Share a initial olm encrypted message _, to_device = alice.share_group_session( TEST_ROOM, [bob.user_id], ignore_unverified_devices=True ) outbound_session = alice.outbound_group_sessions[TEST_ROOM] olm_message = self.olm_message_to_event(to_device, bob, alice) # Pass the to-device event to bob and make sure we get the right events event = ToDeviceEvent.parse_event(olm_message) assert isinstance(event, OlmEvent) decrypted_event = bob.decrypt_event(event) assert isinstance(decrypted_event, RoomKeyEvent) # Make sure bob got the room-key assert bob.inbound_group_store bob_session = bob.inbound_group_store.get( TEST_ROOM, alice_device.curve25519, outbound_session.id ) assert bob_session.id == outbound_session.id # Now bob shares a room-key with alice _, to_device = bob.share_group_session( TEST_ROOM, [alice.user_id], ignore_unverified_devices=True ) olm_message = self.olm_message_to_event(to_device, alice, bob) event = ToDeviceEvent.parse_event(olm_message) assert isinstance(event, OlmEvent) decrypted_event = alice.decrypt_event(event) assert isinstance(decrypted_event, RoomKeyEvent) # Let us wedge the session now session = alice.session_store[bob_device.curve25519][0] alice.session_store[bob_device.curve25519][0] = Session.from_pickle( alice_pickle, session.creation_time, "", session.use_time ) alice.rotate_outbound_group_session(TEST_ROOM) # Try to share a room-key now _, to_device = alice.share_group_session( TEST_ROOM, [bob.user_id], ignore_unverified_devices=True ) # Set the creation time to be older than an hour, otherwise we will not # be able to unwedge the session. alice_session = bob.session_store.get(alice_device.curve25519) alice_session.creation_time = datetime.now() - timedelta(hours=2) olm_message = self.olm_message_to_event(to_device, bob, alice) # Pass the to-device event to bob and make sure we get the right events event = ToDeviceEvent.parse_event(olm_message) assert isinstance(event, OlmEvent) decrypted_event = bob.decrypt_event(event) # Make sure that decryption failed assert decrypted_event is None # Make sure that we have queued a m.dummy message to be sent out as a # to-device message assert alice_device in bob.wedged_devices # Bob should now claim new keys from alice, we're simulating this over # here since the olm machine doesn't know how to do requests. to_share = alice.share_keys() one_time_key = list(to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {one_time_key[0]: one_time_key[1]}, }, }, "failures": {}, } response = KeysClaimResponse.from_dict(key_claim_dict, TEST_ROOM) assert not bob.outgoing_to_device_messages assert isinstance(response, KeysClaimResponse) bob.handle_response(response) # After we claimed the keys a new Olm session will be created and a # to-device message will be prepared for alice. assert bob.outgoing_to_device_messages message = bob.outgoing_to_device_messages[0] assert message.type == "m.room.encrypted" assert message.recipient == alice.user_id assert message.recipient_device == alice.device_id # Forward the message to alice. event = ToDeviceEvent.parse_event( self.olm_message_to_event(message.as_dict(), alice, bob) ) assert isinstance(event, OlmEvent) # Take out our currently used session for bob. wedged_session = alice.session_store.get(bob_device.curve25519) decrypted_event = alice.decrypt_event(event) assert isinstance(decrypted_event, DummyEvent) # Check that the dummy event created a new Olm session and that it is # the preferred one. new_session = alice.session_store.get(bob_device.curve25519) assert wedged_session.use_time < new_session.use_time assert wedged_session != new_session # Try to mark the device again to be unwedged, this should fail since # our creation time isn't old enough. alice._mark_device_for_unwedging(alice_device.user_id, alice_device.curve25519) assert alice_device not in bob.wedged_devices def test_device_renaming(self, olm_account): parsed_dict = TestClass._load_response("tests/data/keys_query.json") response = KeysQueryResponse.from_dict(parsed_dict) assert isinstance(response, KeysQueryResponse) olm_account.handle_response(response) device = olm_account.device_store["@alice:example.org"]["JLAFKJWSCS"] assert device.ed25519 == "nE6W2fCblxDcOFmeEtCHNl8/l8bXcu7GKyAswA4r3mM" assert device.display_name == "Alice's mobile phone" parsed_dict["device_keys"]["@alice:example.org"]["JLAFKJWSCS"]["unsigned"][ "device_display_name" ] = "Phoney" response = KeysQueryResponse.from_dict(parsed_dict) olm_account.handle_response(response) assert device.display_name == "Phoney" def test_replay_attack_protection(self, olm_account, bob_account): alice = olm_account bob = bob_account alice_device = OlmDevice( alice.user_id, alice.device_id, alice.account.identity_keys ) bob_device = OlmDevice(bob.user_id, bob.device_id, bob.account.identity_keys) alice.device_store.add(bob_device) bob.device_store.add(alice_device) bob.account.generate_one_time_keys(1) one_time = list(bob.account.one_time_keys["curve25519"].values())[0] bob.account.mark_keys_as_published() alice.create_session(one_time, bob_device.curve25519) # Share a initial olm encrypted message _, to_device = alice.share_group_session( TEST_ROOM, [bob.user_id], ignore_unverified_devices=True ) outbound_session = alice.outbound_group_sessions[TEST_ROOM] outbound_session.shared = True olm_message = self.olm_message_to_event(to_device, bob, alice) # Pass the to-device event to bob and make sure we get the right events event = ToDeviceEvent.parse_event(olm_message) assert isinstance(event, OlmEvent) decrypted_event = bob.decrypt_event(event) assert isinstance(decrypted_event, RoomKeyEvent) message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = alice.group_encrypt(TEST_ROOM, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": alice.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM, } event = MegolmEvent.from_dict(encrypted_message) decrypted_event = bob.decrypt_event(event) assert decrypted_event.body == message["content"]["body"] # Let us now replay the event. encrypted_message["event_id"] = "!new_event_id" event = MegolmEvent.from_dict(encrypted_message) with pytest.raises(EncryptionError): bob.decrypt_megolm_event(event) encrypted_message["event_id"] = "!event_id" old_time = encrypted_message["origin_server_ts"] encrypted_message["origin_server_ts"] += 100 event = MegolmEvent.from_dict(encrypted_message) with pytest.raises(EncryptionError): bob.decrypt_megolm_event(event) # Let us now check that normal messages from the room history decrypt # again. encrypted_message["origin_server_ts"] = old_time event = MegolmEvent.from_dict(encrypted_message) decrypted_event = bob.decrypt_event(event) assert decrypted_event.body == message["content"]["body"] def test_key_forwards_with_ourselves(self, alice_account_pair): alice, bob = alice_account_pair bob_device = alice.device_store[bob.user_id][bob.device_id] bob.account.generate_one_time_keys(1) one_time = list(bob.account.one_time_keys["curve25519"].values())[0] bob.account.mark_keys_as_published() alice.create_session(one_time, bob_device.curve25519) _, to_device = alice.share_group_session( TEST_ROOM, [bob.user_id], ignore_unverified_devices=True ) # Setup a working olm session by sharing a key from alice to bob olm_message = self.olm_message_to_event(to_device, bob, alice) event = ToDeviceEvent.parse_event(olm_message) bob.decrypt_event(event) # Bob shares a room session as well but alice never receives the # session. bob.share_group_session( TEST_ROOM, [alice.user_id], ignore_unverified_devices=True ) session = bob.outbound_group_sessions[TEST_ROOM] session.shared = True session.users_shared_with.add((alice.user_id, alice.device_id)) message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = bob.group_encrypt(TEST_ROOM, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM, } event = MegolmEvent.from_dict(encrypted_message) # Alice tries to decrypt the event but can't. decrypted_event = alice.decrypt_event(event) assert decrypted_event is None key_request = event.as_key_request( bob.user_id, alice.device_id, event.session_id, ) outgoing_key_request = OutgoingKeyRequest( event.session_id, event.session_id, TEST_ROOM, event.algorithm ) alice.outgoing_key_requests[event.session_id] = outgoing_key_request key_request = { "sender": alice.user_id, "type": "m.room_key_request", "content": key_request.as_dict()["messages"][bob.user_id]["*"], } key_request_event = RoomKeyRequest.from_dict(key_request) assert isinstance(key_request_event, RoomKeyRequest) assert not bob.outgoing_to_device_messages # Bob receives the event and queues it up for collection. bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() # Convert the key request event into a to-device message. bob.collect_key_requests() # Check that the message is now queued. assert bob.outgoing_to_device_messages to_device = bob.outgoing_to_device_messages[0] # Let us now share the to-device message with Alice olm_message = self.olm_message_to_event(to_device.as_dict(), alice, bob) forwarded_key_event = ToDeviceEvent.parse_event(olm_message) assert isinstance(forwarded_key_event, OlmEvent) # Decrypt the olm event and check that we received a forwarded room # key. decrypted_event = alice.handle_to_device_event(forwarded_key_event) assert isinstance(decrypted_event, ForwardedRoomKeyEvent) # Alice tries to decrypt the previous event again. decrypted_event = alice.decrypt_event(event) assert isinstance(decrypted_event, RoomMessageText) assert decrypted_event.body == "It's a secret to everybody." def test_key_forwards_missing_session(self, alice_account_pair): alice, bob = alice_account_pair bob.create_outbound_group_session(TEST_ROOM) alice_device = bob.device_store[alice.user_id][alice.device_id] session = bob.outbound_group_sessions[TEST_ROOM] session.shared = True message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = bob.group_encrypt(TEST_ROOM, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM, } event = MegolmEvent.from_dict(encrypted_message) # Alice tries to decrypt the event but can't. decrypted_event = alice.decrypt_event(event) assert decrypted_event is None key_request = event.as_key_request( bob.user_id, alice.device_id, event.session_id, ) outgoing_key_request = OutgoingKeyRequest( event.session_id, event.session_id, TEST_ROOM, event.algorithm ) alice.outgoing_key_requests[event.session_id] = outgoing_key_request key_request = { "sender": alice.user_id, "type": "m.room_key_request", "content": key_request.as_dict()["messages"][bob.user_id]["*"], } key_request_event = RoomKeyRequest.from_dict(key_request) assert isinstance(key_request_event, RoomKeyRequest) assert not bob.outgoing_to_device_messages # Bob receives the event and queues it up for collection. bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() # Convert the key request event into a to-device message. bob.collect_key_requests() # Check that the message is not queued. We are missing a Olm session. assert not bob.outgoing_to_device_messages assert alice_device in bob.key_request_devices_no_session assert ( key_request_event in bob.key_requests_waiting_for_session[ alice_device.user_id, alice_device.id ].values() ) # Let us do a key claim request. to_share = alice.share_keys() one_time_key = list(to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {one_time_key[0]: one_time_key[1]}, }, }, "failures": {}, } response = KeysClaimResponse.from_dict(key_claim_dict) bob.handle_response(response) # We got a session now, the device is not waiting for a session anymore assert alice_device not in bob.key_request_devices_no_session # The key request is neither waiting for a session anymore. assert ( key_request_event not in bob.key_requests_waiting_for_session[ alice_device.user_id, alice_device.id ].values() ) # The key request is now waiting to be collected again. assert key_request_event in bob.received_key_requests.values() # Let us collect it now. bob.collect_key_requests() # We found a to-device message now. to_device = bob.outgoing_to_device_messages[0] # Let us now share the to-device message with Alice olm_message = self.olm_message_to_event(to_device.as_dict(), alice, bob) forwarded_key_event = ToDeviceEvent.parse_event(olm_message) assert isinstance(forwarded_key_event, OlmEvent) # Decrypt the olm event and check that we received a forwarded room # key. decrypted_event = alice.handle_to_device_event(forwarded_key_event) assert isinstance(decrypted_event, ForwardedRoomKeyEvent) # Alice tries to decrypt the previous event again. decrypted_event = alice.decrypt_event(event) assert isinstance(decrypted_event, RoomMessageText) assert decrypted_event.body == "It's a secret to everybody." def test_key_forward_untrusted_device(self, alice_account_pair): alice, bob = alice_account_pair alice_device = bob.device_store[alice.user_id][alice.device_id] bob_device = alice.device_store[bob.user_id][bob.device_id] bob.unverify_device(alice_device) bob.account.generate_one_time_keys(1) one_time = list(bob.account.one_time_keys["curve25519"].values())[0] bob.account.mark_keys_as_published() alice.create_session(one_time, bob_device.curve25519) _, to_device = alice.share_group_session( TEST_ROOM, [bob.user_id], ignore_unverified_devices=True ) # Setup a working olm session by sharing a key from alice to bob olm_message = self.olm_message_to_event(to_device, bob, alice) event = ToDeviceEvent.parse_event(olm_message) bob.decrypt_event(event) # Bob shares a room session as well but alice never receives the # session. bob.share_group_session( TEST_ROOM, [alice.user_id], ignore_unverified_devices=True ) session = bob.outbound_group_sessions[TEST_ROOM] session.shared = True session.users_shared_with.add((alice.user_id, alice.device_id)) message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = bob.group_encrypt(TEST_ROOM, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM, } event = MegolmEvent.from_dict(encrypted_message) # Alice tries to decrypt the event but can't. decrypted_event = alice.decrypt_event(event) assert decrypted_event is None key_request = event.as_key_request( bob.user_id, alice.device_id, event.session_id, ) outgoing_key_request = OutgoingKeyRequest( event.session_id, event.session_id, TEST_ROOM, event.algorithm ) alice.outgoing_key_requests[event.session_id] = outgoing_key_request key_request = { "sender": alice.user_id, "type": "m.room_key_request", "content": key_request.as_dict()["messages"][bob.user_id]["*"], } key_request_event = RoomKeyRequest.from_dict(key_request) assert isinstance(key_request_event, RoomKeyRequest) assert not bob.outgoing_to_device_messages # Bob receives the event and queues it up for collection. bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() # Convert the key request event into a to-device message. collected_requests = bob.collect_key_requests() # The message could not be queued because the device is not trusted assert not bob.outgoing_to_device_messages assert key_request_event in bob.key_request_from_untrusted.values() assert key_request_event in collected_requests # Let us try to continue the key share without verifying the device. assert not bob.continue_key_share(key_request_event) # Let us now verify the device and tell our Olm machine that we should # resume. bob.verify_device(alice_device) assert bob.continue_key_share(key_request_event) assert key_request_event not in bob.key_request_from_untrusted.values() # There is now a key queued up to be sent as a to-device message. assert bob.outgoing_to_device_messages to_device = bob.outgoing_to_device_messages[0] # Let us now share the to-device message with Alice olm_message = self.olm_message_to_event(to_device.as_dict(), alice, bob) forwarded_key_event = ToDeviceEvent.parse_event(olm_message) assert isinstance(forwarded_key_event, OlmEvent) # Decrypt the olm event and check that we received a forwarded room # key. decrypted_event = alice.handle_to_device_event(forwarded_key_event) assert isinstance(decrypted_event, ForwardedRoomKeyEvent) # Alice tries to decrypt the previous event again. decrypted_event = alice.decrypt_event(event) assert isinstance(decrypted_event, RoomMessageText) assert decrypted_event.body == "It's a secret to everybody." def test_key_forward_cancelling(self, olm_account, bob_account): alice = olm_account bob = bob_account bob.user_id = alice.user_id alice_device = OlmDevice( alice.user_id, alice.device_id, alice.account.identity_keys ) bob_device = OlmDevice(bob.user_id, bob.device_id, bob.account.identity_keys) alice.device_store.add(bob_device) bob.device_store.add(alice_device) # bob.verify_device(alice_device) bob.create_outbound_group_session(TEST_ROOM) session = bob.outbound_group_sessions[TEST_ROOM] session.shared = True message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = bob.group_encrypt(TEST_ROOM, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM, } event = MegolmEvent.from_dict(encrypted_message) # Alice tries to decrypt the event but can't. decrypted_event = alice.decrypt_event(event) assert decrypted_event is None key_request = event.as_key_request( bob.user_id, alice.device_id, event.session_id, ) outgoing_key_request = OutgoingKeyRequest( event.session_id, event.session_id, TEST_ROOM, event.algorithm ) alice.outgoing_key_requests[event.session_id] = outgoing_key_request key_request = { "sender": alice.user_id, "type": "m.room_key_request", "content": key_request.as_dict()["messages"][bob.user_id]["*"], } key_request_event = RoomKeyRequest.from_dict(key_request) assert isinstance(key_request_event, RoomKeyRequest) assert not bob.outgoing_to_device_messages cancellation = RoomKeyRequestCancellation( {}, key_request_event.sender, key_request_event.requesting_device_id, key_request_event.request_id, ) # Bob receives the event and queues it up for collection. bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() # Cancel the request immediately. bob.handle_to_device_event(cancellation) assert key_request_event not in bob.received_key_requests.values() # Bob receives the event again bob.handle_to_device_event(key_request_event) # This time we collect the event. assert cancellation not in bob.collect_key_requests() # Check that the message is not queued. We are missing a Olm session. assert not bob.outgoing_to_device_messages assert alice_device in bob.key_request_devices_no_session assert ( key_request_event in bob.key_requests_waiting_for_session[ alice_device.user_id, alice_device.id ].values() ) # We cancel again. bob.handle_to_device_event(cancellation) assert cancellation not in bob.collect_key_requests() assert alice_device not in bob.key_request_devices_no_session assert ( key_request_event not in bob.key_requests_waiting_for_session[ alice_device.user_id, alice_device.id ].values() ) # Let us do another round bob.handle_to_device_event(key_request_event) bob.collect_key_requests() # Let us do a key claim request. to_share = alice.share_keys() one_time_key = list(to_share["one_time_keys"].items())[0] key_claim_dict = { "one_time_keys": { alice.user_id: { alice.device_id: {one_time_key[0]: one_time_key[1]}, }, }, "failures": {}, } response = KeysClaimResponse.from_dict(key_claim_dict) bob.handle_response(response) # We got a session now, the device is not waiting for a session anymore assert alice_device not in bob.key_request_devices_no_session # The key request is neither waiting for a session anymore. assert ( key_request_event not in bob.key_requests_waiting_for_session[ alice_device.user_id, alice_device.id ].values() ) # The key request is now waiting to be collected again. assert key_request_event in bob.received_key_requests.values() # Let us collect it now. bob.collect_key_requests() # Still no, device isn't verified. assert not bob.outgoing_to_device_messages assert key_request_event in bob.key_request_from_untrusted.values() # Cancel again, now we're going to get the cancellation event in the # collect output bob.handle_to_device_event(cancellation) assert cancellation in bob.collect_key_requests() # Let us finally check out if bob can also reject the sharing of the # key. bob.handle_to_device_event(key_request_event) event_for_user = bob.collect_key_requests()[0] assert not bob.outgoing_to_device_messages assert key_request_event in bob.key_request_from_untrusted.values() assert bob.cancel_key_share(event_for_user) assert key_request_event not in bob.key_request_from_untrusted.values() def test_invalid_key_requests(self, olm_account, bob_account): alice = olm_account bob = bob_account alice_device = OlmDevice( alice.user_id, alice.device_id, alice.account.identity_keys ) bob_device = OlmDevice(bob.user_id, bob.device_id, bob.account.identity_keys) alice.device_store.add(bob_device) bob.device_store.add(alice_device) # bob.verify_device(alice_device) bob.create_outbound_group_session(TEST_ROOM) session = bob.outbound_group_sessions[TEST_ROOM] session.shared = True message = { "type": "m.room.message", "content": {"msgtype": "m.text", "body": "It's a secret to everybody."}, } encrypted_content = bob.group_encrypt(TEST_ROOM, message) encrypted_message = { "event_id": "!event_id", "type": "m.room.encrypted", "sender": bob.user_id, "origin_server_ts": int(time.time()), "content": encrypted_content, "room_id": TEST_ROOM, } event = MegolmEvent.from_dict(encrypted_message) # Alice tries to decrypt the event but can't. decrypted_event = alice.decrypt_event(event) assert decrypted_event is None key_request = event.as_key_request( bob.user_id, alice.device_id, event.session_id, ) outgoing_key_request = OutgoingKeyRequest( event.session_id, event.session_id, TEST_ROOM, event.algorithm ) alice.outgoing_key_requests[event.session_id] = outgoing_key_request key_request = { "sender": alice.user_id, "type": "m.room_key_request", "content": key_request.as_dict()["messages"][bob.user_id]["*"], } key_request_event = RoomKeyRequest.from_dict(key_request) assert isinstance(key_request_event, RoomKeyRequest) assert not bob.outgoing_to_device_messages key_request_event.session_id = "fake_id" bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() assert not bob.outgoing_to_device_messages bob.collect_key_requests() assert not bob.outgoing_to_device_messages key_request_event.session_id = session.id key_request_event.requesting_device_id = "FAKE_ID" bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() assert not bob.outgoing_to_device_messages bob.collect_key_requests() assert not bob.outgoing_to_device_messages alice_device.deleted = True key_request_event.requesting_device_id = alice.device_id bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() assert not bob.outgoing_to_device_messages bob.collect_key_requests() assert not bob.outgoing_to_device_messages bob.user_id = alice.user_id key_request_event.session_id = "fake_id" bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() assert not bob.outgoing_to_device_messages bob.collect_key_requests() assert not bob.outgoing_to_device_messages key_request_event.session_id = session.id key_request_event.requesting_device_id = "FAKE_ID" bob.handle_to_device_event(key_request_event) assert key_request_event in bob.received_key_requests.values() assert not bob.outgoing_to_device_messages bob.collect_key_requests() assert not bob.outgoing_to_device_messages matrix-nio-0.24.0/tests/event_builders_test.py000066400000000000000000000042051455215747700214730ustar00rootroot00000000000000import pytest import nio.event_builders as builders class TestClass: def test_base_class(self): with pytest.raises(NotImplementedError): builders.EventBuilder().as_dict() def test_enable_encryption(self): event = builders.EnableEncryptionBuilder( algorithm="test", rotation_ms=9801, rotation_msgs=101 ).as_dict() assert event == { "type": "m.room.encryption", "state_key": "", "content": { "algorithm": "test", "rotation_period_ms": 9801, "rotation_period_msgs": 101, }, } def test_change_name(self): event = builders.ChangeNameBuilder("foo").as_dict() assert event == { "type": "m.room.name", "state_key": "", "content": {"name": "foo"}, } too_long_name = "TooLongName" * 256 with pytest.raises( ValueError, match=f"Room name exceeds 255 characters: {too_long_name}" ): builders.ChangeNameBuilder(too_long_name) def test_change_topic(self): event = builders.ChangeTopicBuilder("Lorem ipsum").as_dict() assert event == { "type": "m.room.topic", "state_key": "", "content": {"topic": "Lorem ipsum"}, } def test_change_join_rules(self): event = builders.ChangeJoinRulesBuilder("invite").as_dict() assert event == { "type": "m.room.join_rules", "state_key": "", "content": {"join_rule": "invite"}, } def test_change_guest_access(self): event = builders.ChangeGuestAccessBuilder("can_join").as_dict() assert event == { "type": "m.room.guest_access", "state_key": "", "content": {"guest_access": "can_join"}, } def test_change_history_visibility(self): event = builders.ChangeHistoryVisibilityBuilder("joined").as_dict() assert event == { "type": "m.room.history_visibility", "state_key": "", "content": {"history_visibility": "joined"}, } matrix-nio-0.24.0/tests/event_test.py000066400000000000000000000572131455215747700176110ustar00rootroot00000000000000import json from nio.api import PushRuleKind from nio.events import ( AccountDataEvent, BadEvent, CallAnswerEvent, CallCandidatesEvent, CallEvent, CallHangupEvent, CallInviteEvent, DummyEvent, EphemeralEvent, Event, ForwardedRoomKeyEvent, FullyReadEvent, InviteAliasEvent, InviteEvent, InviteMemberEvent, InviteNameEvent, KeyVerificationAccept, KeyVerificationCancel, KeyVerificationKey, KeyVerificationMac, KeyVerificationStart, MegolmEvent, OlmEvent, PowerLevelsEvent, PushContainsDisplayName, PushEventMatch, PushRoomMemberCount, PushRule, PushRuleset, PushRulesEvent, PushSenderNotificationPermission, PushUnknownCondition, ReactionEvent, Receipt, ReceiptEvent, RedactedEvent, RedactionEvent, RoomAliasEvent, RoomAvatarEvent, RoomCreateEvent, RoomEncryptedImage, RoomEncryptionEvent, RoomGuestAccessEvent, RoomHistoryVisibilityEvent, RoomJoinRulesEvent, RoomKeyEvent, RoomKeyRequest, RoomKeyRequestCancellation, RoomMemberEvent, RoomMessageEmote, RoomMessageNotice, RoomMessageText, RoomNameEvent, RoomTopicEvent, StickerEvent, TagEvent, ToDeviceEvent, TypingNoticeEvent, UnknownAccountDataEvent, UnknownBadEvent, UnknownEncryptedEvent, UnknownEvent, UnknownToDeviceEvent, ) from nio.responses import RoomSummary from nio.rooms import MatrixRoom class TestClass: @staticmethod def _load_response(filename): with open(filename) as f: return json.loads(f.read()) def test_redacted_event(self): parsed_dict = TestClass._load_response("tests/data/events/redacted.json") response = RedactedEvent.from_dict(parsed_dict) assert isinstance(response, RedactedEvent) def test_malformed_event(self): parsed_dict = TestClass._load_response( "tests/data/events/redacted_invalid.json" ) response = RedactedEvent.from_dict(parsed_dict) assert isinstance(response, BadEvent) def test_create_event(self): parsed_dict = TestClass._load_response("tests/data/events/create.json") event = RoomCreateEvent.from_dict(parsed_dict) assert isinstance(event, RoomCreateEvent) def test_create_event_typed(self): parsed_dict = TestClass._load_response("tests/data/events/create_typed.json") event = RoomCreateEvent.from_dict(parsed_dict) assert isinstance(event, RoomCreateEvent) assert event.room_type == "nio.matrix.test" def test_guest_access_event(self): parsed_dict = TestClass._load_response("tests/data/events/guest_access.json") event = RoomGuestAccessEvent.from_dict(parsed_dict) assert isinstance(event, RoomGuestAccessEvent) def test_join_rules_event(self): parsed_dict = TestClass._load_response("tests/data/events/join_rules.json") event = RoomJoinRulesEvent.from_dict(parsed_dict) assert isinstance(event, RoomJoinRulesEvent) def test_history_visibility_event(self): parsed_dict = TestClass._load_response( "tests/data/events/history_visibility.json" ) event = RoomHistoryVisibilityEvent.from_dict(parsed_dict) assert isinstance(event, RoomHistoryVisibilityEvent) def test_topic_event(self): parsed_dict = TestClass._load_response("tests/data/events/topic.json") event = RoomTopicEvent.from_dict(parsed_dict) assert isinstance(event, RoomTopicEvent) def test_room_avatar_event(self): parsed_dict = TestClass._load_response("tests/data/events/room_avatar.json") event = RoomAvatarEvent.from_dict(parsed_dict) assert isinstance(event, RoomAvatarEvent) def test_room_avatar_event_no_url(self): parsed_dict = TestClass._load_response("tests/data/events/room_avatar.json") parsed_dict["content"].pop("url") event = RoomAvatarEvent.from_dict(parsed_dict) assert isinstance(event, RoomAvatarEvent) def test_tag_event(self): parsed_dict = TestClass._load_response("tests/data/events/tag.json") event = AccountDataEvent.parse_event(parsed_dict) assert isinstance(event, TagEvent) def test_name_event(self): parsed_dict = TestClass._load_response("tests/data/events/name.json") event = RoomNameEvent.from_dict(parsed_dict) assert isinstance(event, RoomNameEvent) def test_alias_event(self): parsed_dict = TestClass._load_response("tests/data/events/alias.json") event = RoomAliasEvent.from_dict(parsed_dict) assert isinstance(event, RoomAliasEvent) def test_message_text(self): parsed_dict = TestClass._load_response("tests/data/events/message_text.json") event = RoomMessageText.from_dict(parsed_dict) assert isinstance(event, RoomMessageText) def test_message_emote(self): parsed_dict = TestClass._load_response("tests/data/events/message_emote.json") event = RoomMessageEmote.from_dict(parsed_dict) assert isinstance(event, RoomMessageEmote) def test_message_notice(self): parsed_dict = TestClass._load_response("tests/data/events/message_notice.json") event = RoomMessageNotice.from_dict(parsed_dict) assert isinstance(event, RoomMessageNotice) def test_power_levels(self): parsed_dict = TestClass._load_response("tests/data/events/power_levels.json") event = PowerLevelsEvent.from_dict(parsed_dict) assert isinstance(event, PowerLevelsEvent) levels = event.power_levels admin = "@example:localhost" mod = "@alice:localhost" higher_user = "@carol:localhost" user = "@bob:localhost" assert levels.get_state_event_required_level("m.room.name") == 50 assert levels.get_state_event_required_level("m.room.undefined") == 50 assert levels.get_message_event_required_level("m.room.message") == 25 assert levels.get_message_event_required_level("m.room.undefined") == 0 assert levels.get_notification_required_level("room") == 60 assert levels.get_notification_required_level("non_existent") == 50 assert levels.get_user_level(admin) == 100 assert levels.get_user_level(user) == 0 assert levels.can_user_send_state(admin, "m.room.name") is True assert levels.can_user_send_state(user, "m.room.name") is False assert levels.can_user_send_message(admin) is True assert levels.can_user_send_message(user, "m.room.message") is False assert levels.can_user_invite(admin) is True assert levels.can_user_invite(user) is True assert levels.can_user_kick(admin) is True assert levels.can_user_kick(user) is False assert levels.can_user_kick(admin, admin) is False assert levels.can_user_kick(admin, mod) is True assert levels.can_user_kick(mod, admin) is False assert levels.can_user_kick(mod, higher_user) is True assert levels.can_user_kick(higher_user, user) is False assert levels.can_user_ban(admin) is True assert levels.can_user_ban(user) is False assert levels.can_user_ban(admin, admin) is False assert levels.can_user_ban(admin, mod) is True assert levels.can_user_ban(mod, admin) is False assert levels.can_user_ban(mod, higher_user) is True assert levels.can_user_ban(higher_user, user) is False assert levels.can_user_redact(admin) is True assert levels.can_user_redact(user) is False assert levels.can_user_notify(admin, "room") is True assert levels.can_user_notify(mod, "room") is False def test_membership(self): parsed_dict = TestClass._load_response("tests/data/events/member.json") event = RoomMemberEvent.from_dict(parsed_dict) assert isinstance(event, RoomMemberEvent) def test_redaction(self): parsed_dict = TestClass._load_response("tests/data/events/redaction.json") event = RedactionEvent.from_dict(parsed_dict) assert isinstance(event, RedactionEvent) def test_sticker(self): parsed_dict = TestClass._load_response("tests/data/events/sticker.json") event = StickerEvent.from_dict(parsed_dict) assert isinstance(event, StickerEvent) def test_reaction(self): parsed_dict = TestClass._load_response("tests/data/events/reaction.json") event = ReactionEvent.from_dict(parsed_dict) assert isinstance(event, ReactionEvent) def test_empty_event(self): parsed_dict = {} response = RedactedEvent.from_dict(parsed_dict) assert isinstance(response, UnknownBadEvent) def test_room_encryption(self): parsed_dict = TestClass._load_response("tests/data/events/room_encryption.json") event = Event.parse_event(parsed_dict) assert isinstance(event, RoomEncryptionEvent) def test_room_key(self): parsed_dict = TestClass._load_response("tests/data/events/room_key.json") event = RoomKeyEvent.from_dict(parsed_dict, "@alice:example.org", "alice_key") assert isinstance(event, RoomKeyEvent) def test_forwarded_room_key(self): parsed_dict = TestClass._load_response( "tests/data/events/forwarded_room_key.json" ) event = ForwardedRoomKeyEvent.from_dict( parsed_dict, "@alice:example.org", "alice_key" ) assert isinstance(event, ForwardedRoomKeyEvent) def test_invalid_state_event(self): for event_type, event_file in [ ("m.room.create", "create.json"), ("m.room.guest_access", "guest_access.json"), ("m.room.join_rules", "join_rules.json"), ("m.room.history_visibility", "history_visibility.json"), ("m.room.member", "member.json"), ("m.room.canonical_alias", "alias.json"), ("m.room.name", "name.json"), ("m.room.topic", "topic.json"), ("m.room.avatar", "room_avatar.json"), ("m.room.power_levels", "power_levels.json"), ("m.room.encryption", "room_encryption.json"), ]: parsed_dict = TestClass._load_response(f"tests/data/events/{event_file}") parsed_dict.pop("state_key") event = Event.parse_event(parsed_dict) assert isinstance(event, BadEvent) assert event.source["type"] == event_type def test_invalid_invite_state_events(self): for event_type, event_file in [ ("m.room.member", "member.json"), ("m.room.canonical_alias", "alias.json"), ("m.room.name", "name.json"), ]: parsed_dict = TestClass._load_response(f"tests/data/events/{event_file}") parsed_dict.pop("state_key") event = InviteEvent.parse_event(parsed_dict) assert isinstance(event, BadEvent) assert event.source["type"] == event_type for event_type, event_file in [ ("m.room.member", "member.json"), ("m.room.canonical_alias", "alias.json"), ("m.room.name", "name.json"), ]: parsed_dict = TestClass._load_response(f"tests/data/events/{event_file}") parsed_dict.pop("type") event = InviteEvent.parse_event(parsed_dict) assert not event def test_invite_events(self): for event_type, event_file in [ (InviteMemberEvent, "member.json"), (InviteAliasEvent, "alias.json"), (InviteNameEvent, "name.json"), ]: parsed_dict = TestClass._load_response(f"tests/data/events/{event_file}") event = InviteEvent.parse_event(parsed_dict) assert isinstance(event, event_type) def test_megolm_event(self): parsed_dict = TestClass._load_response("tests/data/events/megolm.json") event = Event.parse_event(parsed_dict) assert isinstance(event, MegolmEvent) parsed_dict["content"]["algorithm"] = "m.megolm.unknown" event = Event.parse_event(parsed_dict) assert isinstance(event, UnknownEncryptedEvent) def test_olm_event(self): parsed_dict = TestClass._load_response("tests/data/events/olm.json") event = ToDeviceEvent.parse_event(parsed_dict) assert isinstance(event, OlmEvent) parsed_dict["content"]["algorithm"] = "m.megolm.unknown" event = ToDeviceEvent.parse_event(parsed_dict) assert not event def test_ephemeral_event(self): event = EphemeralEvent.parse_event({}) assert not event event = EphemeralEvent.parse_event({"type": "m.unknown", "content": {}}) assert not event def test_typing_event(self): parsed_dict = TestClass._load_response("tests/data/events/typing.json") event = EphemeralEvent.parse_event(parsed_dict) assert isinstance(event, TypingNoticeEvent) assert "@bob:example.com" in event.users def test_read_receipt_event(self): parsed_dict = TestClass._load_response("tests/data/events/receipt.json") event = EphemeralEvent.parse_event(parsed_dict) # Warning: this is directly tied to the above file; any changes below # need to be reflected there too. receipt = Receipt( "$152037280074GZeOm:localhost", "m.read", "@bob:example.com", 1520372804619 ) assert isinstance(event, ReceiptEvent) assert receipt in event.receipts def test_read_receipt_event_bad_ts(self): """Test reading an m_receipt event that has malformed data for one user. @alice:example.com is a user using Synapse pre 0.99.3 with a timestamp bug. We want to ignore her malformed value without losing the receipt data from @bob:example.com """ parsed_dict = TestClass._load_response("tests/data/events/receipt_invalid.json") event = EphemeralEvent.parse_event(parsed_dict) # Warning: this is directly tied to the above file; any changes below # need to be reflected there too. receipt = Receipt( "$152037280074GZeOm:localhost", "m.read", "@bob:example.com", 1520372804619 ) assert isinstance(event, ReceiptEvent) assert receipt in event.receipts def test_account_data_event(self): event = AccountDataEvent.parse_event({}) assert isinstance(event, UnknownBadEvent) event = AccountDataEvent.parse_event({"type": "m.unknown", "content": {}}) assert isinstance(event, UnknownAccountDataEvent) def test_fully_read_event(self): parsed_dict = TestClass._load_response("tests/data/events/fully_read.json") event = AccountDataEvent.parse_event(parsed_dict) assert isinstance(event, FullyReadEvent) def test_invalid_call_events(self): for _, event_file in [ (CallInviteEvent, "call_invite.json"), (CallAnswerEvent, "call_answer.json"), (CallCandidatesEvent, "call_candidates.json"), (CallHangupEvent, "call_hangup.json"), ]: parsed_dict = TestClass._load_response(f"tests/data/events/{event_file}") parsed_dict["content"].pop("call_id") event = CallEvent.parse_event(parsed_dict) assert isinstance(event, BadEvent) def test_call_events(self): for event_type, event_file in [ (CallInviteEvent, "call_invite.json"), (CallAnswerEvent, "call_answer.json"), (CallCandidatesEvent, "call_candidates.json"), (CallHangupEvent, "call_hangup.json"), ]: parsed_dict = TestClass._load_response(f"tests/data/events/{event_file}") event = CallEvent.parse_event(parsed_dict) assert isinstance(event, event_type) def test_key_verification_events(self): for event_type, event_file in [ (KeyVerificationStart, "key_start.json"), (KeyVerificationAccept, "key_accept.json"), (KeyVerificationKey, "key_key.json"), (KeyVerificationMac, "key_mac.json"), (KeyVerificationCancel, "key_cancel.json"), ]: parsed_dict = TestClass._load_response(f"tests/data/events/{event_file}") event = ToDeviceEvent.parse_event(parsed_dict) assert isinstance(event, event_type) def test_invalid_key_verification(self): for _, event_file in [ (KeyVerificationStart, "key_start.json"), (KeyVerificationAccept, "key_accept.json"), (KeyVerificationKey, "key_key.json"), (KeyVerificationMac, "key_mac.json"), (KeyVerificationCancel, "key_cancel.json"), ]: parsed_dict = TestClass._load_response(f"tests/data/events/{event_file}") parsed_dict["content"].pop("transaction_id") event = ToDeviceEvent.parse_event(parsed_dict) assert isinstance(event, UnknownBadEvent) def test_invalid_room_event(self): event = Event.parse_event({"type": "m.unknown"}) assert isinstance(event, UnknownBadEvent) def test_unknown_room_event(self): parsed_dict = TestClass._load_response("tests/data/events/unknown.json") event = Event.parse_event(parsed_dict) assert isinstance(event, UnknownEvent) def test_unknown_to_device_event(self): parsed_dict = TestClass._load_response( "tests/data/events/unknown_to_device.json" ) event = ToDeviceEvent.parse_event(parsed_dict) assert isinstance(event, UnknownToDeviceEvent) def test_redacted_state_event(self): parsed_dict = TestClass._load_response("tests/data/events/redacted_state.json") event = Event.parse_event(parsed_dict) assert isinstance(event, RedactedEvent) def test_dummy_event(self): parsed_dict = TestClass._load_response("tests/data/events/dummy.json") event = DummyEvent.from_dict(parsed_dict, "@alice:example.org", "alice_key") assert isinstance(event, DummyEvent) def test_room_key_request(self): parsed_dict = TestClass._load_response( "tests/data/events/room_key_request.json" ) event = ToDeviceEvent.parse_event(parsed_dict) assert isinstance(event, RoomKeyRequest) assert event.room_id is not None parsed_dict = TestClass._load_response( "tests/data/events/room_key_request_cancel.json" ) event = ToDeviceEvent.parse_event(parsed_dict) assert isinstance(event, RoomKeyRequestCancellation) def test_encrypted_media_thumbnails(self): parsed_dict = TestClass._load_response( "tests/data/events/room_encrypted_image.json" ) event = Event.parse_decrypted_event(parsed_dict) assert isinstance(event, RoomEncryptedImage) assert event.thumbnail_url assert event.thumbnail_key assert event.thumbnail_hashes assert event.thumbnail_iv assert event.mimetype def test_event_flattening(self): parsed_dict = TestClass._load_response( "tests/data/events/to_flatten.json", ) event = Event.from_dict(parsed_dict) assert event.flattened() == { "content.body": "foo", "content.m.dotted.key": "bar", "event_id": "!test:example.org", "origin_server_ts": 0, "sender": "@alice:example.org", "type": "m.flatten_test", } def test_pushrules_parsing(self): parsed_dict = TestClass._load_response( "tests/data/events/push_rules.json", ) parsed_rule = parsed_dict["content"]["global"]["override"][0] event = PushRulesEvent.from_dict(parsed_dict) assert isinstance(event, PushRulesEvent) assert bool(event) is True rule = event.global_rules.override[0] for i, action in enumerate(rule.actions): assert action.as_value == parsed_rule["actions"][i] for i, condition in enumerate(rule.conditions): assert condition.as_value == parsed_rule["conditions"][i] def test_pushrules_matching(self): room = MatrixRoom("!test:example.org", "@alice:example.com") name = "Alice" event = Event.from_dict( { "event_id": "!test:example.org", "room_id": room.room_id, "origin_server_ts": 0, "sender": "@alice:example.org", "type": "m.test", "words": "foo bar", "int": 0, "content": {"body": "a,here c"}, } ) args = (event, room, name) # PushEventMatch must_succeed = [ ("type", "m.test"), ("type", "M*T"), # glob + ignoring case ("content.body", "heRe"), # word boundaries + ignoring case ("content.body", "a"), # word at the start of the string ("content.body", "c"), # word at the end of the string ("content.body", "[a-z]*c"), # more glob patterns ] must_fail = [ ("int", "0"), # only match string values ("words", "foo"), # match words only for content.body ("content.body", "her"), # not a full word match ] for key, pattern in must_succeed: assert PushEventMatch(key, pattern).matches(*args) for key, pattern in must_fail: assert not PushEventMatch(key, pattern).matches(*args) # PushContainsDisplayName assert not PushContainsDisplayName().matches(*args) del event.source["content"]["body"] assert not PushContainsDisplayName().matches(*args) event.source["content"]["body"] = "alice!" assert PushContainsDisplayName().matches(*args) # PushRoomMemberCount room.summary = RoomSummary(100, 5) # invited members don't matter tests = [(5, "=="), (6, "<"), (4, ">"), (5, "<="), (4, ">=")] for count, operator in tests: assert PushRoomMemberCount(count, operator).matches(*args) # PushSenderNotificationPermission assert not PushSenderNotificationPermission("room").matches(*args) room.power_levels.users[event.sender] = 50 assert PushSenderNotificationPermission("room").matches(*args) # PushUnknownCondition assert not PushUnknownCondition({}).matches(*args) # PushRule rule = PushRule(PushRuleKind.override, "all", False) assert rule.matches(*args) rule.enabled = False assert not rule.matches(*args) cnds = [PushEventMatch("type", "m.test")] rule = PushRule(PushRuleKind.override, "test", False, conditions=cnds) assert rule.matches(*args) cnds.append(PushUnknownCondition({})) assert not rule.matches(*args) rule = PushRule(PushRuleKind.room, room.room_id, False) assert rule.matches(*args) rule.id += "blah" assert not rule.matches(*args) rule = PushRule(PushRuleKind.sender, event.sender, False) assert rule.matches(*args) rule.id += "blah" assert not rule.matches(*args) event.source["content"]["body"] = "a here! b c" rule = PushRule(PushRuleKind.content, "here", False, pattern="here") assert rule.matches(*args) rule.pattern = "her" assert not rule.matches(*args) # PushRuleset ruleset = PushRuleset( room=[ PushRule(PushRuleKind.room, "blah", False), PushRule(PushRuleKind.room, room.room_id, False), ], sender=[PushRule(PushRuleKind.sender, event.sender, False)], ) assert ruleset.matching_rule(*args) is ruleset.room[1] del ruleset.room[1] del ruleset.sender[0] assert ruleset.matching_rule(*args) is None matrix-nio-0.24.0/tests/helpers.py000066400000000000000000000127521455215747700170720ustar00rootroot00000000000000""" helpers ~~~~~~~ This module contains helpers for the nio tests. """ import os from random import choice from string import ascii_letters, ascii_uppercase from faker import Faker from faker.providers import BaseProvider from hpack.hpack import Encoder from hyperframe.frame import ( AltSvcFrame, ContinuationFrame, DataFrame, GoAwayFrame, HeadersFrame, PingFrame, PriorityFrame, PushPromiseFrame, RstStreamFrame, SettingsFrame, WindowUpdateFrame, ) from nio.crypto import OlmAccount, OlmDevice from nio.store import Ed25519Key SAMPLE_SETTINGS = { SettingsFrame.HEADER_TABLE_SIZE: 4096, SettingsFrame.ENABLE_PUSH: 1, SettingsFrame.MAX_CONCURRENT_STREAMS: 2, } faker = Faker() class Provider(BaseProvider): def mx_id(self): return f"@{faker.user_name()}:{faker.hostname()}" def avatar_url(self): return f"mxc://{faker.hostname()}/{''.join(choice(ascii_letters) for i in range(24))}#auto" def device_id(self): return "".join(choice(ascii_uppercase) for i in range(10)) def olm_key_pair(self): return OlmAccount().identity_keys def olm_device(self): user_id = faker.mx_id() device_id = faker.device_id() key_pair = faker.olm_key_pair() return OlmDevice( user_id, device_id, key_pair, ) def ed25519_key(self): return Ed25519Key( faker.mx_id(), faker.device_id(), faker.olm_key_pair()["ed25519"] ) faker.add_provider(Provider) ephemeral_dir = os.path.join(os.curdir, "tests/data/encryption") def ephemeral(func): def wrapper(*args, **kwargs): try: ret = func(*args, **kwargs) finally: os.remove(os.path.join(ephemeral_dir, "@ephemeral:example.org_DEVICEID.db")) return ret return wrapper class FrameFactory: """ A class containing lots of helper methods and state to build frames. This allows test cases to easily build correct HTTP/2 frames to feed to hyper-h2. """ def __init__(self): self.encoder = Encoder() def refresh_encoder(self): self.encoder = Encoder() def preamble(self): return b"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n" def build_headers_frame(self, headers, flags=[], stream_id=1, **priority_kwargs): """ Builds a single valid headers frame out of the contained headers. """ f = HeadersFrame(stream_id) f.data = self.encoder.encode(headers) f.flags.add("END_HEADERS") for flag in flags: f.flags.add(flag) for k, v in priority_kwargs.items(): setattr(f, k, v) return f def build_continuation_frame(self, header_block, flags=[], stream_id=1): """ Builds a single continuation frame out of the binary header block. """ f = ContinuationFrame(stream_id) f.data = header_block f.flags = set(flags) return f def build_data_frame(self, data, flags=None, stream_id=1, padding_len=0): """ Builds a single data frame out of a chunk of data. """ flags = set(flags) if flags is not None else set() f = DataFrame(stream_id) f.data = data f.flags = flags if padding_len: flags.add("PADDED") f.pad_length = padding_len return f def build_settings_frame(self, settings, ack=False): """ Builds a single settings frame. """ f = SettingsFrame(0) if ack: f.flags.add("ACK") f.settings = settings return f def build_window_update_frame(self, stream_id, increment): """ Builds a single WindowUpdate frame. """ f = WindowUpdateFrame(stream_id) f.window_increment = increment return f def build_ping_frame(self, ping_data, flags=None): """ Builds a single Ping frame. """ f = PingFrame(0) f.opaque_data = ping_data if flags: f.flags = set(flags) return f def build_goaway_frame(self, last_stream_id, error_code=0, additional_data=b""): """ Builds a single GOAWAY frame. """ f = GoAwayFrame(0) f.error_code = error_code f.last_stream_id = last_stream_id f.additional_data = additional_data return f def build_rst_stream_frame(self, stream_id, error_code=0): """ Builds a single RST_STREAM frame. """ f = RstStreamFrame(stream_id) f.error_code = error_code return f def build_push_promise_frame( self, stream_id, promised_stream_id, headers, flags=[] ): """ Builds a single PUSH_PROMISE frame. """ f = PushPromiseFrame(stream_id) f.promised_stream_id = promised_stream_id f.data = self.encoder.encode(headers) f.flags = set(flags) f.flags.add("END_HEADERS") return f def build_priority_frame(self, stream_id, weight, depends_on=0, exclusive=False): """ Builds a single priority frame. """ f = PriorityFrame(stream_id) f.depends_on = depends_on f.stream_weight = weight f.exclusive = exclusive return f def build_alt_svc_frame(self, stream_id, origin, field): """ Builds a single ALTSVC frame. """ f = AltSvcFrame(stream_id) f.origin = origin f.field = field return f matrix-nio-0.24.0/tests/http2_test.py000066400000000000000000000105071455215747700175240ustar00rootroot00000000000000import h2 import pytest from nio.client import HttpClient, RequestInfo, TransportType from nio.exceptions import LocalProtocolError from nio.http import Http2Response from nio.responses import LoginResponse, SyncResponse class TestClass: example_response_headers = [(":status", "200"), ("server", "fake-serv/0.1.0")] @staticmethod def _load_response(filename): with open(filename, "rb") as f: return f.read() def login_response(self, stream_id, frame_factory): f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=stream_id ) login_body = self._load_response("tests/data/login_response.json") data = frame_factory.build_data_frame( data=login_body, stream_id=stream_id, flags=["END_STREAM"] ) return f.serialize() + data.serialize() def sync_response(self, stream_id, frame_factory): f = frame_factory.build_headers_frame( headers=self.example_response_headers, stream_id=stream_id ) body = self._load_response("tests/data/sync.json") data = frame_factory.build_data_frame( data=body, stream_id=stream_id, flags=["END_STREAM"] ) return f.serialize() + data.serialize() def test_client_lag(self, frame_factory): client = HttpClient("localhost", "example") client.connect(TransportType.HTTP2) response = Http2Response() response.send_time = 0 response.receive_time = 30 response.timeout = 25 * 1000 response2 = Http2Response() response2.send_time = 0 response2.receive_time = 31 response2.timeout = 25 * 1000 client.connection._responses[response.uuid] = response client.connection._responses[response2.uuid] = response2 typed_response = RequestInfo("sync", 25 * 1000) client.requests_made[response.uuid] = typed_response assert client.lag == 6 def test_client_local_error(self, frame_factory): client = HttpClient("localhost", "example") with pytest.raises(LocalProtocolError): uuid, request = client.login("wordpass") client.connect(TransportType.HTTP2) uuid, request = client.login("wordpass") with pytest.raises(LocalProtocolError): uuid, request = client.sync() client.receive(self.login_response(1, frame_factory)) client.next_response() uuid, request = client.sync() def test_client_receive(self, frame_factory): client = HttpClient("localhost", "example") client.connect(TransportType.HTTP2) uuid, request = client.login("wordpass") conf = h2.config.H2Configuration(client_side=True) server = h2.connection.H2Connection(conf) server.max_inbound_frame_size = 64 * 1024 server.initiate_connection() server.receive_data(frame_factory.preamble()) server.receive_data(request) # assert events[0].headers == [] client.receive(self.login_response(1, frame_factory)) response = client.next_response() assert isinstance(response, LoginResponse) assert response.uuid == uuid uuid, request = client.sync() server.receive_data(request) client.receive(self.sync_response(3, frame_factory)) response = client.next_response() assert isinstance(response, SyncResponse) assert response.uuid == uuid sync_uuid, request = client.sync() server.receive_data(request) content = {"body": "test", "msgtype": "m.text"} send_uuid, send_request = client.room_send( "!test:localhost", "m.room.message", content ) def test_frame_splitting(self, frame_factory): client = HttpClient("localhost", "example") data = client.connect(TransportType.HTTP2) client.connection._connection.outbound_flow_control_window = 5 uuid, request = client.login("wordpass") assert client.connection._data_to_send to_send = data + request while to_send: f = frame_factory.build_window_update_frame( stream_id=0, increment=5, ) client.receive(f.serialize()) to_send = client.data_to_send() assert not client.connection._data_to_send matrix-nio-0.24.0/tests/http_test.py000066400000000000000000000016441455215747700174440ustar00rootroot00000000000000from __future__ import annotations from typing import Any, Dict from nio.client import HttpClient class TestClass: @staticmethod def _load_response(filename: str) -> Dict[Any, Any]: with open(filename, "rb") as f: return f.read() def test_503(self): client = HttpClient("localhost", "example") client.connect() client.login("test") transport_response = self._load_response("tests/data/http_503.txt") client.receive(transport_response) response = client.next_response() assert response.status_code == 503 def test_502(self): client = HttpClient("localhost", "example") client.connect() client.login("test") transport_response = self._load_response("tests/data/http_502.txt") client.receive(transport_response) response = client.next_response() assert response.status_code == 502 matrix-nio-0.24.0/tests/key_export_test.py000066400000000000000000000065721455215747700206630ustar00rootroot00000000000000import json from os import path import pytest from hypothesis import given from hypothesis.strategies import binary from nio import EncryptionError from nio.crypto import Olm from nio.crypto.key_export import decrypt, decrypt_and_read, encrypt, encrypt_and_save from nio.store import DefaultStore TEST_ROOM = "!test:example.org" class TestClass: @given(binary()) def test_encrypt(self, data): passphrase = "A secret" ciphertext = encrypt(data, passphrase, count=10) plaintext = decrypt(ciphertext, passphrase) assert data == plaintext def test_encrypt_rounds(self, benchmark): data = b"data" passphrase = "A secret" benchmark(encrypt, data, passphrase, count=10000) def test_decrypt_failure(self): data = b"data" passphrase = "A secret" ciphertext = encrypt(data, passphrase, count=10) with pytest.raises( ValueError, match="HMAC check failed for encrypted payload." ): decrypt(ciphertext, "Fake key") def test_encrypt_file(self, tempdir): data = b"data" passphrase = "A secret" file = path.join(tempdir, "keys_file") encrypt_and_save(data, file, passphrase, count=10) plaintext = decrypt_and_read(file, passphrase) assert plaintext == data def test_export(self, tempdir): user_id = "ephemeral" device_id = "DEVICEID" file = path.join(tempdir, "keys_file") store = DefaultStore(user_id, device_id, tempdir, "") olm = Olm(user_id, device_id, store) olm.create_outbound_group_session(TEST_ROOM) out_session = olm.outbound_group_sessions[TEST_ROOM] assert olm.inbound_group_store.get( TEST_ROOM, olm.account.identity_keys["curve25519"], out_session.id ) olm.export_keys(file, "pass") alice_store = DefaultStore("alice", device_id, tempdir, "") alice = Olm("alice", device_id, alice_store) assert not alice.inbound_group_store.get( TEST_ROOM, olm.account.identity_keys["curve25519"], out_session.id ) alice.import_keys(file, "pass") assert alice.inbound_group_store.get( TEST_ROOM, olm.account.identity_keys["curve25519"], out_session.id ) def test_unencrypted_import(self, tempdir): device_id = "DEVICEID" file = path.join(tempdir, "keys_file") with open(file, "w") as f: f.write("{}") alice_store = DefaultStore("alice", device_id, tempdir, "") alice = Olm("alice", device_id, alice_store) with pytest.raises(EncryptionError): alice.import_keys(file, "pass") def test_invalid_json(self, tempdir): device_id = "DEVICEID" file = path.join(tempdir, "keys_file") encrypt_and_save(b"{sessions: [{}]}", file, "pass", count=10) alice_store = DefaultStore("alice", device_id, tempdir, "") alice = Olm("alice", device_id, alice_store) with pytest.raises(EncryptionError): alice.import_keys(file, "pass") def test_invalid_json_schema(self, tempdir): file = path.join(tempdir, "keys_file") payload = {"sessions": [{"algorithm": "test"}]} encrypt_and_save(json.dumps(payload).encode(), file, "pass", count=10) imported = Olm.import_keys_static(file, "pass") assert len(imported) == 0 matrix-nio-0.24.0/tests/memory_store_test.py000066400000000000000000000054461455215747700212150ustar00rootroot00000000000000from helpers import faker from nio.crypto import ( DeviceStore, GroupSessionStore, InboundGroupSession, OlmAccount, OutboundGroupSession, OutboundSession, SessionStore, ) BOB_ID = "@bob:example.org" BOB_DEVICE = "AGMTSWVYML" BOB_CURVE = "T9tOKF+TShsn6mk1zisW2IBsBbTtzDNvw99RBFMJOgI" BOB_ONETIME = "6QlQw3mGUveS735k/JDaviuoaih5eEi6S1J65iHjfgU" TEST_ROOM = "!test:example.org" class TestClass: def test_session_store(self): account = OlmAccount() store = SessionStore() session = OutboundSession(account, BOB_CURVE, BOB_ONETIME) assert session not in store assert len(store.values()) == 0 assert not store.get(BOB_CURVE) assert store.add(BOB_CURVE, session) assert len(store.values()) == 1 assert session in store assert not store.add(BOB_CURVE, session) assert len(store.values()) == 1 assert session in store assert (BOB_CURVE, [session]) == list(store.items())[0] def test_session_store_order(self): alice = OlmAccount() bob = OlmAccount() bob_curve = bob.identity_keys["curve25519"] bob.generate_one_time_keys(2) store = SessionStore() first, second = bob.one_time_keys["curve25519"].values() session2 = OutboundSession(alice, bob_curve, second) session = OutboundSession(alice, bob_curve, first) assert session.id != session2.id assert session not in store assert store.add(bob_curve, session) assert len(store[bob_curve]) == 1 assert session in store assert store.add(bob_curve, session2) is True print(store.values()) assert len(store[bob_curve]) == 2 session_a, session_b = store[bob_curve] assert session_a.use_time > session_b.use_time def test_device_get_by_sender_key(self): store = DeviceStore() for _ in range(10): store.add(faker.olm_device()) device = faker.olm_device() store.add(device) fetched_device = store.device_from_sender_key(device.user_id, device.curve25519) assert fetched_device == device def test_group_session_store(self): store = GroupSessionStore() account = OlmAccount() out_group = OutboundGroupSession() session = InboundGroupSession( out_group.session_key, account.identity_keys["ed25519"], BOB_CURVE, TEST_ROOM, ) assert session not in store assert not store.get(TEST_ROOM, BOB_CURVE, session.id) assert store.add(session) assert store.get(TEST_ROOM, BOB_CURVE, session.id) assert session in store assert not store.add(session) assert store[TEST_ROOM] == {BOB_CURVE: {session.id: session}} matrix-nio-0.24.0/tests/responses_test.py000066400000000000000000000257451455215747700205160ustar00rootroot00000000000000import json from pathlib import Path from typing import Type import pytest from nio.responses import ( DeleteDevicesAuthResponse, DevicesResponse, DiskDownloadResponse, DownloadError, DownloadResponse, ErrorResponse, JoinedMembersError, JoinedMembersResponse, JoinResponse, KeysClaimResponse, KeysQueryResponse, KeysUploadResponse, LoginError, LoginInfoResponse, LoginResponse, LogoutResponse, MemoryDownloadResponse, ProfileGetAvatarResponse, ProfileGetDisplayNameResponse, ProfileGetResponse, RegisterInteractiveResponse, RegisterResponse, RoomContextError, RoomContextResponse, RoomCreateResponse, RoomForgetResponse, RoomKeyRequestError, RoomKeyRequestResponse, RoomKnockResponse, RoomLeaveResponse, RoomMessagesResponse, RoomTypingResponse, SpaceGetHierarchyResponse, SyncError, SyncResponse, ThumbnailError, ThumbnailResponse, ToDeviceError, ToDeviceResponse, UploadResponse, _ErrorWithRoomId, ) TEST_ROOM_ID = "!test:example.org" def _load_bytes(filename): with open(filename, "rb") as f: return f.read() def _load_response(filename): with open(filename) as f: return json.loads(f.read()) class TestClass: def test_login_parse(self): parsed_dict = _load_response("tests/data/login_response.json") response = LoginResponse.from_dict(parsed_dict) assert isinstance(response, LoginResponse) def test_login_failure_parse(self): parsed_dict = _load_response("tests/data/login_response_error.json") response = LoginResponse.from_dict(parsed_dict) assert isinstance(response, LoginError) def test_login_failure_format(self): parsed_dict = _load_response("tests/data/login_invalid_format.json") response = LoginResponse.from_dict(parsed_dict) assert isinstance(response, ErrorResponse) def test_logout_parse(self): parsed_dict = _load_response("tests/data/logout_response.json") response = LogoutResponse.from_dict(parsed_dict) assert isinstance(response, LogoutResponse) def test_room_messages(self): parsed_dict = _load_response("tests/data/room_messages.json") response = RoomMessagesResponse.from_dict(parsed_dict, TEST_ROOM_ID) assert isinstance(response, RoomMessagesResponse) def test_keys_upload(self): parsed_dict = _load_response("tests/data/keys_upload.json") response = KeysUploadResponse.from_dict(parsed_dict) assert isinstance(response, KeysUploadResponse) def test_keys_query(self): parsed_dict = _load_response("tests/data/keys_query.json") response = KeysQueryResponse.from_dict(parsed_dict) assert isinstance(response, KeysQueryResponse) def test_keys_claim(self): parsed_dict = _load_response("tests/data/keys_claim.json") response = KeysClaimResponse.from_dict(parsed_dict, "!test:example.org") assert isinstance(response, KeysClaimResponse) def test_devices(self): parsed_dict = _load_response("tests/data/devices.json") response = DevicesResponse.from_dict(parsed_dict) assert isinstance(response, DevicesResponse) assert response.devices[0].id == "QBUAZIFURK" def test_delete_devices_auth(self): parsed_dict = _load_response("tests/data/delete_devices.json") response = DeleteDevicesAuthResponse.from_dict(parsed_dict) assert isinstance(response, DeleteDevicesAuthResponse) assert response.session == "xxxxxxyz" def test_joined_parse(self): parsed_dict = _load_response("tests/data/joined_members_response.json") response = JoinedMembersResponse.from_dict(parsed_dict, "!testroom") assert isinstance(response, JoinedMembersResponse) def test_joined_fail(self): parsed_dict = {} response = JoinedMembersResponse.from_dict(parsed_dict, "!testroom") assert isinstance(response, JoinedMembersError) def test_upload_parse(self): parsed_dict = _load_response("tests/data/upload_response.json") response = UploadResponse.from_dict(parsed_dict) assert isinstance(response, UploadResponse) @pytest.mark.parametrize( ("data", "response_class"), [ (_load_bytes("tests/data/file_response"), MemoryDownloadResponse), (Path("tests/data/file_response"), DiskDownloadResponse), ], ) def test_download(self, data, response_class: Type[DownloadResponse]): response = response_class.from_data(data, "image/png", "example.png") assert isinstance(response, response_class) assert response.body == data assert response.content_type == "image/png" assert response.filename == "example.png" data = _load_response("tests/data/limit_exceeded_error.json") response = response_class.from_data(data, "image/png") assert isinstance(response, DownloadError) assert response.status_code == data["errcode"] response = response_class.from_data("123", "image/png") assert isinstance(response, DownloadError) def test_thumbnail(self): data = _load_bytes("tests/data/file_response") response = ThumbnailResponse.from_data(data, "image/png") assert isinstance(response, ThumbnailResponse) assert response.body == data data = _load_response("tests/data/limit_exceeded_error.json") response = ThumbnailResponse.from_data(data, "image/png") assert isinstance(response, ThumbnailError) assert response.status_code == data["errcode"] response = ThumbnailResponse.from_data("123", "image/png") assert isinstance(response, ThumbnailError) response = ThumbnailResponse.from_data(b"5xx error", "text/html") assert isinstance(response, ThumbnailError) def test_sync_fail(self): parsed_dict = {} response = SyncResponse.from_dict(parsed_dict, 0) assert isinstance(response, SyncError) def test_sync_parse(self): parsed_dict = _load_response("tests/data/sync.json") response = SyncResponse.from_dict(parsed_dict) assert type(response) == SyncResponse def test_keyshare_request(self): parsed_dict = { "errcode": "M_LIMIT_EXCEEDED", "error": "Too many requests", "retry_after_ms": 2000, } response = RoomKeyRequestResponse.from_dict( parsed_dict, "1", "1", TEST_ROOM_ID, "megolm.v1" ) assert isinstance(response, RoomKeyRequestError) response = RoomKeyRequestResponse.from_dict( {}, "1", "1", TEST_ROOM_ID, "megolm.v1" ) assert isinstance(response, RoomKeyRequestResponse) def test_get_profile(self): parsed_dict = _load_response("tests/data/get_profile_response.json") response = ProfileGetResponse.from_dict(parsed_dict) assert isinstance(response, ProfileGetResponse) assert response.other_info == {"something_else": 123} def test_get_displayname(self): parsed_dict = _load_response("tests/data/get_displayname_response.json") response = ProfileGetDisplayNameResponse.from_dict(parsed_dict) assert isinstance(response, ProfileGetDisplayNameResponse) def test_get_avatar(self): parsed_dict = _load_response("tests/data/get_avatar_response.json") response = ProfileGetAvatarResponse.from_dict(parsed_dict) assert isinstance(response, ProfileGetAvatarResponse) def test_to_device(self): message = "message" response = ToDeviceResponse.from_dict( {"error": "error", "errcode": "M_UNKNOWN"}, message ) assert isinstance(response, ToDeviceError) response = ToDeviceResponse.from_dict({}, message) assert isinstance(response, ToDeviceResponse) def test_context(self): response = RoomContextResponse.from_dict( {"error": "error", "errcode": "M_UNKNOWN"}, TEST_ROOM_ID ) assert isinstance(response, RoomContextError) assert response.room_id == TEST_ROOM_ID parsed_dict = _load_response("tests/data/context.json") response = RoomContextResponse.from_dict(parsed_dict, TEST_ROOM_ID) assert isinstance(response, RoomContextResponse) assert response.room_id == TEST_ROOM_ID assert not response.events_before assert len(response.events_after) == 1 assert len(response.state) == 9 def test_limit_exceeded_error(self): parsed_dict = _load_response("tests/data/limit_exceeded_error.json") response = ErrorResponse.from_dict(parsed_dict) assert isinstance(response, ErrorResponse) assert response.retry_after_ms == parsed_dict["retry_after_ms"] room_id = "!SVkFJHzfwvuaIEawgC:localhost" response2 = _ErrorWithRoomId.from_dict(parsed_dict, room_id) assert isinstance(response2, _ErrorWithRoomId) assert response.retry_after_ms == parsed_dict["retry_after_ms"] assert response2.room_id == room_id def test_room_create(self): parsed_dict = _load_response("tests/data/room_id.json") response = RoomCreateResponse.from_dict(parsed_dict) assert isinstance(response, RoomCreateResponse) def test_join(self): parsed_dict = _load_response("tests/data/room_id.json") response = JoinResponse.from_dict(parsed_dict) assert isinstance(response, JoinResponse) def test_knock(self): parsed_dict = _load_response("tests/data/room_id.json") response = RoomKnockResponse.from_dict(parsed_dict) assert isinstance(response, RoomKnockResponse) def test_room_leave(self): response = RoomLeaveResponse.from_dict({}) assert isinstance(response, RoomLeaveResponse) def test_room_forget(self): response = RoomForgetResponse.from_dict({}, TEST_ROOM_ID) assert isinstance(response, RoomForgetResponse) def test_room_typing(self): response = RoomTypingResponse.from_dict({}, TEST_ROOM_ID) assert isinstance(response, RoomTypingResponse) def test_login_info(self): parsed_dict = _load_response("tests/data/login_info.json") response = LoginInfoResponse.from_dict(parsed_dict) assert isinstance(response, LoginInfoResponse) def test_space_get_hierarchy(self): parsed_dict = _load_response("tests/data/get_hierarchy_response.json") response = SpaceGetHierarchyResponse.from_dict(parsed_dict) assert isinstance(response, SpaceGetHierarchyResponse) def test_register(self): parsed_dict = _load_response("tests/data/register_response.json") response = RegisterResponse.from_dict(parsed_dict) assert isinstance(response, RegisterResponse) def test_register_interactive(self): parsed_dict = _load_response("tests/data/register_interactive_response.json") response = RegisterInteractiveResponse.from_dict(parsed_dict) assert isinstance(response, RegisterInteractiveResponse) matrix-nio-0.24.0/tests/room_test.py000066400000000000000000000563331455215747700174460ustar00rootroot00000000000000# Copyright © 2021 Famedly GmbH # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import pytest from helpers import faker from nio.events import ( InviteAliasEvent, InviteMemberEvent, InviteNameEvent, Receipt, ReceiptEvent, RoomAvatarEvent, RoomCreateEvent, RoomGuestAccessEvent, RoomHistoryVisibilityEvent, RoomJoinRulesEvent, RoomMemberEvent, RoomNameEvent, RoomSpaceChildEvent, RoomSpaceParentEvent, RoomUpgradeEvent, TypingNoticeEvent, ) from nio.responses import RoomSummary from nio.rooms import MatrixInvitedRoom, MatrixRoom TEST_ROOM = "!test:example.org" BOB_ID = "@bob:example.org" ALICE_ID = "@alice:example.org" class TestClass: def _create_test_data(self): pass @property def new_user(self): return faker.mx_id(), faker.name(), faker.avatar_url() @property def test_room(self): room = MatrixRoom(TEST_ROOM, BOB_ID) room.update_summary(RoomSummary(0, 0, [])) return room def test_room_creation(self): room = self.test_room assert room def test_adding_members(self): room = self.test_room assert not room.users mx_id, name, avatar = self.new_user room.add_member(mx_id, name, avatar) room.summary.heroes.append(mx_id) room.summary.joined_member_count += 1 assert room.users assert room.member_count == room.joined_count == 1 assert room.invited_count == 0 room.summary = None assert room.member_count == room.joined_count == 1 assert room.invited_count == 0 member = list(room.users.values())[0] assert member.user_id == mx_id assert member.display_name == name assert member.avatar_url == avatar def test_summary_details(self): room = self.test_room room.summary = None with pytest.raises(ValueError, match="Unusable summary"): assert room._summary_details() room.summary = RoomSummary(None, None, []) with pytest.raises(ValueError, match="Unusable summary"): assert room._summary_details() room.summary = RoomSummary(0, None, []) with pytest.raises(ValueError, match="Unusable summary"): assert room._summary_details() room.summary = RoomSummary(None, 0, []) with pytest.raises(ValueError, match="Unusable summary"): assert room._summary_details() room.summary = RoomSummary(0, 0, []) assert room._summary_details() == ([], 0, 0) def test_named_checks(self): room = self.test_room assert not room.is_named assert room.is_group room.name = "Test room" assert room.is_named assert not room.is_group def test_name_calculation_when_unnamed(self): room = self.test_room assert room.named_room_name() is None assert room.display_name == "Empty Room" # Members join room.add_member(BOB_ID, "Bob", None) # us room.summary.joined_member_count += 1 assert room.display_name == "Empty Room" room.add_member("@alice:example.org", "Alice", None) room.summary.heroes.append("@alice:example.org") room.summary.joined_member_count += 1 assert room.display_name == "Alice" room.add_member("@malory:example.org", "Alice", None) room.summary.heroes.append("@malory:example.org") room.summary.joined_member_count += 1 assert ( room.display_name == "Alice (@alice:example.org) and Alice (@malory:example.org)" ) room.add_member("@steve:example.org", "Steve", None) room.summary.heroes.append("@steve:example.org") room.summary.joined_member_count += 1 assert ( room.display_name == "Alice (@alice:example.org), Alice (@malory:example.org) " "and Steve" ) room.add_member("@carol:example.org", "Carol", None) room.summary.joined_member_count += 1 assert ( room.display_name == "Alice (@alice:example.org), Alice (@malory:example.org), " "Steve and 1 other" ) room.add_member("@dave:example.org", "Dave", None) room.summary.joined_member_count += 1 assert ( room.display_name == "Alice (@alice:example.org), Alice (@malory:example.org), " "Steve and 2 others" ) room.add_member("@erin:example.org", "Eirin", None) room.summary.invited_member_count += 1 assert ( room.display_name == "Alice (@alice:example.org), Alice (@malory:example.org), " "Steve and 3 others" ) # Members leave room.summary.joined_member_count = 1 room.summary.invited_member_count = 0 assert ( room.display_name == "Empty Room (had Alice (@alice:example.org), " "Alice (@malory:example.org) and Steve)" ) room.remove_member("@steve:example.org") room.summary.heroes.remove("@steve:example.org") assert ( room.display_name == "Empty Room (had Alice (@alice:example.org) and " "Alice (@malory:example.org))" ) room.remove_member("@malory:example.org") room.summary.heroes.remove("@malory:example.org") assert room.display_name == "Empty Room (had Alice)" room.remove_member("@alice:example.org") room.summary.heroes.remove("@alice:example.org") assert room.display_name == "Empty Room" room.remove_member("@bob:example.org") # us assert not room.summary.heroes assert room.display_name == "Empty Room" def test_name_calculation_when_unnamed_no_summary(self): room = self.test_room room.summary = RoomSummary() assert room.named_room_name() is None assert room.display_name == "Empty Room" # Members join room.add_member(BOB_ID, "Bob", None) # us assert room.display_name == "Empty Room" room.add_member("@alice:example.org", "Alice", None) assert room.display_name == "Alice" room.add_member("@malory:example.org", "Alice", None) assert ( room.display_name == "Alice (@alice:example.org) and Alice (@malory:example.org)" ) room.add_member("@steve:example.org", "Steve", None) room.add_member("@carol:example.org", "Carol", None) room.add_member("@dave:example.org", "Dave", None) assert ( room.display_name == "Alice (@alice:example.org), Alice (@malory:example.org), " "Carol, Dave and Steve" ) room.add_member("@erin:example.org", "Eirin", None) assert ( room.display_name == "Alice (@alice:example.org), Alice (@malory:example.org), " "Carol, Dave, Eirin and 1 other" ) room.add_member("@frank:example.org", "Frank", None) assert ( room.display_name == "Alice (@alice:example.org), Alice (@malory:example.org), " "Carol, Dave, Eirin and 2 others" ) room.add_member("@gregor:example.org", "Gregor", None) assert ( room.display_name == "Alice (@alice:example.org), Alice (@malory:example.org), " "Carol, Dave, Eirin and 3 others" ) # Members leave for member in room.users.copy(): room.remove_member(member) assert room.display_name == "Empty Room" def test_name_calculation_with_canonical_alias(self): room = self.test_room room.canonical_alias = "#test:termina.org.uk" assert room.display_name == "#test:termina.org.uk" def test_name_calculation_prefer_name_over_alias(self): room = self.test_room room.canonical_alias = "#test:termina.org.uk" room.name = "Test room" assert room.display_name == "Test room" def test_name_calculation_when_hash_already_prefixed(self): room = self.test_room room.name = "#test" assert room.display_name == "#test" def test_set_room_avatar(self): room = self.test_room room.room_avatar_url = "mxc://foo" assert room.gen_avatar_url == "mxc://foo" def test_room_avatar_calculation_when_no_set_avatar(self): room = self.test_room assert room.room_avatar_url is None assert room.summary assert room.is_group room.add_member("@bob:example.org", "Bob", "mxc://abc", True) # us room.summary.joined_member_count += 1 assert room.gen_avatar_url is None room.add_member("@carol:example.org", "Carol", "mxc://bar", True) room.summary.invited_member_count += 1 assert room.gen_avatar_url is None room.summary.heroes.append("@carol:example.org") assert room.gen_avatar_url == "mxc://bar" room.name = "Test" assert not room.is_group assert room.gen_avatar_url is None room.name = None assert room.is_group assert room.gen_avatar_url == "mxc://bar" room.add_member("@alice:example.org", "Alice", "mxc://baz") room.summary.heroes.append("@alice:matrix.org") room.summary.joined_member_count += 1 assert room.gen_avatar_url is None def test_room_avatar_calculation_when_no_set_avatar_no_summary(self): room = self.test_room room.summary = None assert room.room_avatar_url is None assert room.is_group room.add_member("@bob:example.org", "Bob", "mxc://abc", True) # us assert room.gen_avatar_url is None room.add_member("@carol:example.org", "Carol", "mxc://bar", True) assert room.gen_avatar_url == "mxc://bar" room.name = "Test" assert not room.is_group assert room.gen_avatar_url is None room.name = None assert room.is_group assert room.gen_avatar_url == "mxc://bar" room.add_member("@alice:example.org", "Alice", "mxc://baz") assert room.gen_avatar_url is None def test_user_name_calculation(self): room = self.test_room assert room.user_name("@not_in_the_room:example.org") is None room.add_member("@alice:example.org", "Alice", None) assert room.user_name("@alice:example.org") == "Alice" assert room.user_name_clashes("Alice") == ["@alice:example.org"] room.add_member("@bob:example.org", None, None) assert room.user_name("@bob:example.org") == "@bob:example.org" room.add_member("@malory:example.org", "Alice", None) assert room.user_name("@alice:example.org") == "Alice (@alice:example.org)" assert room.user_name("@malory:example.org") == "Alice (@malory:example.org)" assert room.user_name_clashes("Alice") == [ "@alice:example.org", "@malory:example.org", ] room.remove_member("@alice:example.org") assert room.user_name("@malory:example.org") == "Alice" room.remove_member("@malory:example.org") room.add_member("@alice:example.org", None, None) assert room.user_name("@alice:example.org") == "@alice:example.org" assert room.user_name_clashes("@alice:example.org") == ["@alice:example.org"] room.add_member("@malory:example.org", "@alice:example.org", None) assert room.user_name("@alice:example.org") == "@alice:example.org" assert ( room.user_name("@malory:example.org") == "@alice:example.org (@malory:example.org)" ) assert room.user_name_clashes("@alice:example.org") == [ "@alice:example.org", "@malory:example.org", ] def test_avatar_url(self): room = self.test_room assert room.user_name("@not_in_the_room:example.org") is None assert room.avatar_url("@not_in_the_room:example.org") is None room.add_member("@alice:example.org", "Alice", "mxc://foo") assert room.avatar_url("@alice:example.org") == "mxc://foo" def test_machine_name(self): room = self.test_room assert room.machine_name == TEST_ROOM room.canonical_alias = "Alias room" assert room.machine_name == "Alias room" def test_typing_notice_event(self): room = self.test_room assert not room.typing_users room.handle_ephemeral_event(TypingNoticeEvent([BOB_ID])) assert room.typing_users == [BOB_ID] def test_read_receipt_event(self): """Verify that m.read ReceiptEvents update a room's read_receipt dict. Successive m.read receipts should replace the first receipt with the second. """ room = self.test_room assert room.read_receipts == {} r1 = Receipt("event_id", "m.read", BOB_ID, 10) r2 = Receipt("event_id2", "m.read", BOB_ID, 15) r1_event = ReceiptEvent([r1]) r2_event = ReceiptEvent([r2]) room.handle_ephemeral_event(r1_event) assert room.read_receipts == {BOB_ID: r1} room.handle_ephemeral_event(r2_event) assert room.read_receipts == {BOB_ID: r2} def test_non_read_receipt_event(self): """Verify that non-m.read receipts don't leak into a room's read_receipt dict. """ room = self.test_room room.handle_ephemeral_event( ReceiptEvent([Receipt("event_id", "m.downvoted", BOB_ID, 0)]) ) assert room.read_receipts == {} def test_create_event(self): room = self.test_room assert not room.creator room.handle_event( RoomCreateEvent( {"event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0}, BOB_ID, False, ) ) assert room.creator == BOB_ID assert room.federate is False assert room.room_version == "1" def test_guest_access_event(self): room = self.test_room assert room.guest_access == "forbidden" room.handle_event( RoomGuestAccessEvent( {"event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0}, "can_join", ) ) assert room.guest_access == "can_join" def test_history_visibility_event(self): room = self.test_room assert room.history_visibility == "shared" room.handle_event( RoomHistoryVisibilityEvent( {"event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0}, "invited", ) ) assert room.history_visibility == "invited" def test_join_rules_event(self): room = self.test_room assert room.join_rule == "invite" room.handle_event( RoomJoinRulesEvent( {"event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0}, "public", ) ) assert room.join_rule == "public" def test_name_event(self): room = self.test_room assert not room.name room.handle_event( RoomNameEvent( {"event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0}, "test name", ) ) assert room.name == "test name" def test_space_parent(self): room = self.test_room assert room.parents == set() room.handle_event( RoomSpaceParentEvent( { "event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0, "content": {}, }, "!X:example.org", ) ) assert "!X:example.org" not in room.parents room.handle_event( RoomSpaceParentEvent( { "event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0, "content": {"via": ["!A:example.org"]}, }, "!X:example.org", ) ) assert "!X:example.org" in room.parents room.handle_event( RoomSpaceParentEvent( { "event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0, "content": {}, }, "!X:example.org", ) ) assert "!X:example.org" not in room.parents def test_space_child(self): room = self.test_room assert room.children == set() room.handle_event( RoomSpaceChildEvent( { "event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0, "content": {}, }, "!X:example.org", ) ) assert "!X:example.org" not in room.children room.handle_event( RoomSpaceChildEvent( { "event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0, "content": {"via": ["!A:example.org"]}, }, "!X:example.org", ) ) assert "!X:example.org" in room.children room.handle_event( RoomSpaceChildEvent( { "event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0, "content": {}, }, "!X:example.org", ) ) assert "!X:example.org" not in room.children def test_room_avatar_event(self): room = self.test_room assert not room.gen_avatar_url room.handle_event( RoomAvatarEvent( {"event_id": "event_id", "sender": BOB_ID, "origin_server_ts": 0}, "mxc://foo", ) ) assert room.gen_avatar_url == "mxc://foo" def test_summary_update(self): room = self.test_room room.summary = None room.update_summary(RoomSummary(1, 2, [])) assert room.invited_count == 1 assert room.joined_count == 2 assert room.member_count == 3 assert room.summary room.update_summary(RoomSummary(1, 3, ["@alice:example.org"])) assert room.invited_count == 1 assert room.joined_count == 3 assert room.member_count == 4 assert room.summary.heroes == ["@alice:example.org"] def test_invited_room(self): room = MatrixInvitedRoom(TEST_ROOM, BOB_ID) room.handle_event( InviteMemberEvent( {}, "@alice:example.org", BOB_ID, "invite", None, {"membership": "invite"}, ) ) assert room.inviter == "@alice:example.org" assert not room.name room.handle_event(InviteNameEvent({}, BOB_ID, "test name")) assert room.name == "test name" assert not room.canonical_alias room.handle_event(InviteAliasEvent({}, BOB_ID, "test alias")) assert room.canonical_alias == "test alias" def test_handle_member_return_value(self): room = self.test_room assert not room.users mx_id, name, avatar = self.new_user assert room.add_member(mx_id, name, avatar) assert not room.add_member(mx_id, name, avatar) assert room.remove_member(mx_id) assert not room.remove_member(mx_id) def test_user_membership_changes(self): invited_event = RoomMemberEvent( {"event_id": "event1", "sender": BOB_ID, "origin_server_ts": 1}, ALICE_ID, "invite", None, {"membership": "invite", "displayname": "Alice Margarine"}, ) joins_event = RoomMemberEvent( {"event_id": "event2", "sender": ALICE_ID, "origin_server_ts": 2}, ALICE_ID, "join", None, { "membership": "join", "displayname": "Alice Margatroid", "avatar_url": "mxc://new", }, ) leaves_event = RoomMemberEvent( {"event_id": "event3", "sender": ALICE_ID, "origin_server_ts": 3}, ALICE_ID, "leave", None, {"membership": "leave"}, ) unknown_event = RoomMemberEvent( {"event_id": "event4", "sender": ALICE_ID, "origin_server_ts": 4}, ALICE_ID, "bad_membership", None, {"membership": "bad_membership"}, ) room = self.test_room assert not room.users assert not room.invited_users # Alice is invited, accepts (her name and avatar changed) then leaves room.handle_membership(invited_event) assert set(room.users) == {ALICE_ID} assert set(room.invited_users) == {ALICE_ID} room.handle_membership(joins_event) assert set(room.users) == {ALICE_ID} assert not room.invited_users assert room.names["Alice Margatroid"] == [ALICE_ID] assert room.users[ALICE_ID].display_name == "Alice Margatroid" assert room.users[ALICE_ID].avatar_url == "mxc://new" room.handle_membership(leaves_event) assert not room.users assert not room.invited_users # Alice is invited and declines room.handle_membership(invited_event) assert set(room.users) == {ALICE_ID} assert set(room.invited_users) == {ALICE_ID} room.handle_membership(leaves_event) assert not room.users assert not room.invited_users # Alice joins without invite then leaves room.handle_membership(joins_event) assert set(room.users) == {ALICE_ID} assert not room.invited_users room.handle_membership(leaves_event) assert not room.users assert not room.invited_users # Ensure we get False if we handle an event that changes nothing or # has an unknown new membership assert not room.handle_membership(leaves_event) assert not room.handle_membership(unknown_event) def test_room_upgrade(self): room = self.test_room room.handle_event( RoomUpgradeEvent( { "event_id": "event5", "sender": ALICE_ID, "origin_server_ts": 4, "state_key": "", }, "This room has been replaced", "!newroom:example.org", ) ) assert room.replacement_room == "!newroom:example.org" matrix-nio-0.24.0/tests/sas_test.py000066400000000000000000001027531455215747700172560ustar00rootroot00000000000000from datetime import timedelta import pytest from helpers import faker from nio.crypto import OlmDevice, Sas, SasState from nio.events import ( KeyVerificationAccept, KeyVerificationCancel, KeyVerificationKey, KeyVerificationMac, KeyVerificationStart, ) from nio.exceptions import LocalProtocolError alice_id = "@alice:example.org" alice_device_id = "JLAFKJWSCS" alice_keys = faker.olm_key_pair() bob_id = "@bob:example.org" bob_device_id = "JLAFKJWSRS" bob_keys = faker.olm_key_pair() alice_device = OlmDevice(alice_id, alice_device_id, alice_keys) bob_device = OlmDevice(bob_id, bob_device_id, bob_keys) class TestClass: def test_sas_creation(self): alice = Sas(alice_id, alice_device_id, alice_keys["ed25519"], bob_device) with pytest.raises(LocalProtocolError): alice.accept_verification() def test_sas_start(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) assert alice.state == SasState.created start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) assert isinstance(start_event, KeyVerificationStart) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) with pytest.raises(LocalProtocolError): bob.start_verification() assert bob.state == SasState.started def test_sas_accept(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device, bob_keys["ed25519"], alice_device, start_event ) accept = {"sender": bob_id, "content": bob.accept_verification().content} accept_event = KeyVerificationAccept.from_dict(accept) assert isinstance(accept_event, KeyVerificationAccept) alice.receive_accept_event(accept_event) assert alice.state == SasState.accepted def test_sas_share_keys(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) accept = {"sender": bob_id, "content": bob.accept_verification().content} accept_event = KeyVerificationAccept.from_dict(accept) alice.receive_accept_event(accept_event) alice_key = {"sender": alice_id, "content": alice.share_key().content} key_event = KeyVerificationKey.from_dict(alice_key) assert isinstance(key_event, KeyVerificationKey) bob.receive_key_event(key_event) assert bob.state == SasState.key_received bob_key = {"sender": bob_id, "content": bob.share_key().content} key_event = KeyVerificationKey.from_dict(bob_key) assert isinstance(key_event, KeyVerificationKey) alice.receive_key_event(key_event) assert alice.state == SasState.key_received assert alice.get_emoji() == bob.get_emoji() def test_sas_decimals(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) accept = {"sender": bob_id, "content": bob.accept_verification().content} accept_event = KeyVerificationAccept.from_dict(accept) alice.receive_accept_event(accept_event) alice.set_their_pubkey(bob.pubkey) bob.set_their_pubkey(alice.pubkey) assert alice.get_decimals() == bob.get_decimals() def test_sas_invalid_commitment(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) accept = {"sender": bob_id, "content": bob.accept_verification().content} accept_event = KeyVerificationAccept.from_dict(accept) alice.receive_accept_event(accept_event) alice_key = {"sender": alice_id, "content": alice.share_key().content} key_event = KeyVerificationKey.from_dict(alice_key) assert isinstance(key_event, KeyVerificationKey) bob.receive_key_event(key_event) assert bob.state == SasState.key_received bob_key = {"sender": bob_id, "content": bob.share_key().content} bob_key["content"]["key"] = alice.pubkey key_event = KeyVerificationKey.from_dict(bob_key) assert isinstance(key_event, KeyVerificationKey) alice.receive_key_event(key_event) assert alice.state == SasState.canceled def test_sas_mac(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) with pytest.raises(LocalProtocolError): alice.accept_sas() alice.set_their_pubkey(bob.pubkey) bob.set_their_pubkey(alice.pubkey) alice.state = SasState.key_received bob.state = SasState.key_received alice.chosen_mac_method = Sas._mac_normal bob.chosen_mac_method = Sas._mac_normal with pytest.raises(LocalProtocolError): alice.get_mac() alice.accept_sas() alice_mac = {"sender": alice_id, "content": alice.get_mac().content} mac_event = KeyVerificationMac.from_dict(alice_mac) assert isinstance(mac_event, KeyVerificationMac) assert not bob.verified bob.receive_mac_event(mac_event) assert bob.state == SasState.mac_received assert not bob.verified bob.accept_sas() assert bob.verified def test_sas_old_mac_method(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) start_event.message_authentication_codes.remove(Sas._mac_normal) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) with pytest.raises(LocalProtocolError): alice.accept_sas() alice.set_their_pubkey(bob.pubkey) bob.set_their_pubkey(alice.pubkey) alice.state = SasState.key_received bob.state = SasState.key_received alice.chosen_mac_method = Sas._mac_normal bob.chosen_mac_method = Sas._mac_normal with pytest.raises(LocalProtocolError): alice.get_mac() alice.accept_sas() alice_mac = {"sender": alice_id, "content": alice.get_mac().content} mac_event = KeyVerificationMac.from_dict(alice_mac) assert isinstance(mac_event, KeyVerificationMac) assert not bob.verified bob.receive_mac_event(mac_event) assert bob.state == SasState.mac_received assert not bob.verified bob.accept_sas() assert bob.verified def test_sas_cancellation(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) assert not alice.canceled with pytest.raises(LocalProtocolError): alice.get_cancellation() alice.cancel() assert alice.canceled with pytest.raises(LocalProtocolError): alice.start_verification() cancellation = alice.get_cancellation().content assert cancellation == { "transaction_id": alice.transaction_id, "code": "m.user", "reason": "Canceled by user", } def test_sas_invalid_start(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) start_event.method = "m.sas.v0" bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) assert bob.canceled def test_sas_reject(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) with pytest.raises(LocalProtocolError): alice.reject_sas() alice.set_their_pubkey(bob.pubkey) bob.set_their_pubkey(alice.pubkey) alice.state = SasState.key_received bob.state = SasState.key_received alice.reject_sas() assert alice.canceled def test_sas_timeout(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) assert not alice.timed_out minute = timedelta(minutes=1) alice.creation_time -= minute assert not alice.timed_out alice.creation_time -= minute * 4 assert alice.timed_out assert alice.canceled def test_sas_event_timeout(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) minute = timedelta(minutes=1) assert not alice.timed_out alice._last_event_time -= minute assert alice.timed_out assert alice.canceled def test_sas_local_errors(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) alice.set_their_pubkey(bob.pubkey) alice.state = SasState.canceled bob.state = SasState.canceled with pytest.raises(LocalProtocolError): bob.accept_verification() with pytest.raises(LocalProtocolError): alice.share_key() alice.sas_accepted = True with pytest.raises(LocalProtocolError): alice.get_mac() def test_sas_not_ok_events(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) accept = {"sender": bob_id, "content": bob.accept_verification().content} accept_event = KeyVerificationAccept.from_dict(accept) accept_event.sender = faker.mx_id() alice.receive_accept_event(accept_event) assert alice.canceled alice.state = SasState.created accept_event.sender = bob_id accept_event.transaction_id = "fake_id" alice.receive_accept_event(accept_event) assert alice.canceled accept_event.transaction_id = alice.transaction_id alice.receive_accept_event(accept_event) assert alice.canceled alice.state = SasState.created accept_event.hash = "fake_hash" alice.receive_accept_event(accept_event) assert alice.canceled alice.state = SasState.created accept_event.hash = Sas._hash_v1 alice.receive_accept_event(accept_event) alice_key = {"sender": alice_id, "content": alice.share_key().content} alice_key_event = KeyVerificationKey.from_dict(alice_key) alice_key_event.sender = faker.mx_id() bob.receive_key_event(alice_key_event) assert bob.canceled bob.set_their_pubkey(alice.pubkey) bob.state = SasState.key_received bob.chosen_mac_method = Sas._mac_normal alice.chosen_mac_method = Sas._mac_normal alice.set_their_pubkey(bob.pubkey) alice.state = SasState.key_received bob.accept_sas() bob_mac = {"sender": bob_id, "content": bob.get_mac().content} mac_event = KeyVerificationMac.from_dict(bob_mac) mac_event.sender = faker.mx_id() alice.receive_mac_event(mac_event) assert alice.canceled def test_sas_mac_before_key(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) bob.set_their_pubkey(alice.pubkey) bob.state = SasState.key_received bob.chosen_mac_method = Sas._mac_normal bob.accept_sas() bob_mac = {"sender": bob_id, "content": bob.get_mac().content} mac_event = KeyVerificationMac.from_dict(bob_mac) alice.receive_mac_event(mac_event) assert alice.canceled def test_sas_invalid_mac(self): alice = Sas( alice_id, alice_device_id, alice_keys["ed25519"], bob_device, ) start = {"sender": alice_id, "content": alice.start_verification().content} start_event = KeyVerificationStart.from_dict(start) bob = Sas.from_key_verification_start( bob_id, bob_device_id, bob_keys["ed25519"], alice_device, start_event ) with pytest.raises(LocalProtocolError): alice.accept_sas() alice.set_their_pubkey(bob.pubkey) bob.set_their_pubkey(alice.pubkey) alice.state = SasState.key_received bob.state = SasState.key_received alice.chosen_mac_method = Sas._mac_normal bob.chosen_mac_method = Sas._mac_normal alice.accept_sas() alice_mac = {"sender": alice_id, "content": alice.get_mac().content} mac_event = KeyVerificationMac.from_dict(alice_mac) mac_event.keys = "FAKEKEYS" bob.receive_mac_event(mac_event) assert bob.canceled assert not bob.verified bob.state = SasState.key_received assert not bob.canceled mac_event = KeyVerificationMac.from_dict(alice_mac) mac_event.mac[f"ed25519:{alice_device_id}"] = "FAKEKEYS" bob.receive_mac_event(mac_event) assert bob.canceled assert not bob.verified def test_client_creation(self, olm_machine): bob_sas = Sas( bob_id, bob_device_id, olm_machine.account.identity_keys["ed25519"], bob_device, ) start = {"sender": bob_id, "content": bob_sas.start_verification().content} start_event = KeyVerificationStart.from_dict(start) assert olm_machine.device_store[bob_id][bob_device_id] olm_machine.handle_key_verification(start_event) alice_sas = olm_machine.key_verifications[start_event.transaction_id] assert alice_sas def test_client_gc(self, olm_machine): bob_sas = Sas( bob_id, bob_device_id, olm_machine.account.identity_keys["ed25519"], bob_device, ) start = {"sender": bob_id, "content": bob_sas.start_verification().content} start_event = KeyVerificationStart.from_dict(start) olm_machine.handle_key_verification(start_event) alice_sas = olm_machine.key_verifications[start_event.transaction_id] alice_sas.cancel() olm_machine.clear_verifications() alice_sas = olm_machine.key_verifications[start_event.transaction_id] assert alice_sas alice_sas.creation_time -= timedelta(minutes=25) olm_machine.clear_verifications() with pytest.raises(KeyError): alice_sas = olm_machine.key_verifications[start_event.transaction_id] def test_client_full_sas(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] bob_sas = Sas( bob_id, bob_device_id, bob_device.ed25519, alice_device, ) start = {"sender": bob_id, "content": bob_sas.start_verification().content} start_event = KeyVerificationStart.from_dict(start) assert olm_machine.device_store[bob_id][bob_device_id] olm_machine.handle_key_verification(start_event) alice_sas = olm_machine.key_verifications[start_event.transaction_id] accept = { "sender": olm_machine.user_id, "content": alice_sas.accept_verification().content, } accept_event = KeyVerificationAccept.from_dict(accept) bob_sas.receive_accept_event(accept_event) bob_key = {"sender": bob_id, "content": bob_sas.share_key().content} bob_key_event = KeyVerificationKey.from_dict(bob_key) assert bob_sas.chosen_key_agreement == Sas._key_agreement_v2 assert alice_sas.chosen_key_agreement == Sas._key_agreement_v2 olm_machine.handle_key_verification(bob_key_event) alice_key = {"sender": alice_id, "content": alice_sas.share_key().content} alice_key_event = KeyVerificationKey.from_dict(alice_key) bob_sas.receive_key_event(alice_key_event) assert alice_sas.other_key_set assert bob_sas.other_key_set bob_sas.accept_sas() bob_mac = {"sender": bob_id, "content": bob_sas.get_mac().content} bob_mac_event = KeyVerificationMac.from_dict(bob_mac) olm_machine.handle_key_verification(bob_mac_event) assert alice_sas.state == SasState.mac_received assert not alice_sas.verified alice_sas.accept_sas() assert alice_sas.verified bob_mac_event.keys = "fake_keys" olm_machine.handle_key_verification(bob_mac_event) assert alice_sas.verified def test_client_invalid_key(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_sas = Sas( bob_id, bob_device_id, faker.olm_key_pair()["ed25519"], alice_device, ) start = {"sender": bob_id, "content": bob_sas.start_verification().content} start_event = KeyVerificationStart.from_dict(start) assert olm_machine.device_store[bob_id][bob_device_id] olm_machine.handle_key_verification(start_event) alice_sas = olm_machine.key_verifications[start_event.transaction_id] accept = { "sender": olm_machine.user_id, "content": alice_sas.accept_verification().content, } accept_event = KeyVerificationAccept.from_dict(accept) bob_sas.receive_accept_event(accept_event) bob_key = {"sender": bob_id, "content": bob_sas.share_key().content} bob_key_event = KeyVerificationKey.from_dict(bob_key) olm_machine.handle_key_verification(bob_key_event) alice_key = {"sender": alice_id, "content": alice_sas.share_key().content} alice_key_event = KeyVerificationKey.from_dict(alice_key) bob_sas.receive_key_event(alice_key_event) assert alice_sas.other_key_set assert bob_sas.other_key_set bob_sas.accept_sas() bob_mac = {"sender": bob_id, "content": bob_sas.get_mac().content} bob_mac_event = KeyVerificationMac.from_dict(bob_mac) olm_machine.handle_key_verification(bob_mac_event) assert alice_sas.state == SasState.canceled assert not alice_sas.verified with pytest.raises(LocalProtocolError): alice_sas.accept_sas() def test_client_full_we_start(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] start = { "sender": alice_device.user_id, "content": olm_machine.create_sas(bob_device).content, } start_event = KeyVerificationStart.from_dict(start) bob_sas = Sas.from_key_verification_start( bob_device.user_id, bob_device.id, bob_device.ed25519, alice_device, start_event, ) alice_sas = olm_machine.key_verifications[start_event.transaction_id] assert alice_sas accept = {"sender": bob_id, "content": bob_sas.accept_verification().content} accept_event = KeyVerificationAccept.from_dict(accept) olm_machine.handle_key_verification(accept_event) alice_key = {"sender": alice_id, "content": alice_sas.share_key().content} alice_key_event = KeyVerificationKey.from_dict(alice_key) bob_sas.receive_key_event(alice_key_event) bob_key = {"sender": bob_id, "content": bob_sas.share_key().content} bob_key_event = KeyVerificationKey.from_dict(bob_key) olm_machine.handle_key_verification(bob_key_event) assert alice_sas.other_key_set assert bob_sas.other_key_set bob_sas.accept_sas() bob_mac = {"sender": bob_id, "content": bob_sas.get_mac().content} bob_mac_event = KeyVerificationMac.from_dict(bob_mac) assert not olm_machine.is_device_verified(bob_device) alice_sas.accept_sas() olm_machine.handle_key_verification(bob_mac_event) assert alice_sas.state == SasState.mac_received assert alice_sas.verified assert olm_machine.is_device_verified(bob_device) def test_client_unknown_device(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = faker.olm_device() bob_sas = Sas( bob_device.user_id, bob_device.id, bob_device.ed25519, alice_device ) start = { "sender": bob_device.user_id, "content": bob_sas.start_verification().content, } start_event = KeyVerificationStart.from_dict(start) olm_machine.handle_key_verification(start_event) assert start_event.transaction_id not in olm_machine.key_verifications assert bob_device.user_id in olm_machine.users_for_key_query def test_client_unsupported_method(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] bob_sas = Sas( bob_device.user_id, bob_device.id, bob_device.ed25519, alice_device ) start = { "sender": bob_device.user_id, "content": bob_sas.start_verification().content, } start_event = KeyVerificationStart.from_dict(start) start_event.method = "unsupported" assert not olm_machine.outgoing_to_device_messages olm_machine.handle_key_verification(start_event) assert start_event.transaction_id not in olm_machine.key_verifications assert olm_machine.outgoing_to_device_messages to_device = olm_machine.outgoing_to_device_messages[0] assert start_event.transaction_id == to_device.content["transaction_id"] def test_client_unknown_txid(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] bob_sas = Sas( bob_device.user_id, bob_device.id, bob_device.ed25519, alice_device ) start = { "sender": bob_device.user_id, "content": bob_sas.start_verification().content, } start_event = KeyVerificationStart.from_dict(start) olm_machine.handle_key_verification(start_event) bob_key = {"sender": bob_id, "content": bob_sas.share_key().content} bob_key_event = KeyVerificationKey.from_dict(bob_key) bob_key_event.transaction_id = "unknown" olm_machine.handle_key_verification(bob_key_event) alice_sas = olm_machine.key_verifications[start_event.transaction_id] assert alice_sas assert not alice_sas.other_key_set assert bob_key_event.transaction_id not in olm_machine.key_verifications def test_client_accept_cancel(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] start = { "sender": alice_device.user_id, "content": olm_machine.create_sas(bob_device).content, } start_event = KeyVerificationStart.from_dict(start) bob_sas = Sas.from_key_verification_start( bob_device.user_id, bob_device.id, bob_device.ed25519, alice_device, start_event, ) alice_sas = olm_machine.key_verifications[start_event.transaction_id] assert alice_sas accept = {"sender": bob_id, "content": bob_sas.accept_verification().content} accept_event = KeyVerificationAccept.from_dict(accept) olm_machine.handle_key_verification(accept_event) assert not alice_sas.canceled olm_machine.handle_key_verification(accept_event) assert alice_sas.canceled def test_client_cancel_event(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] start = { "sender": alice_device.user_id, "content": olm_machine.create_sas(bob_device).content, } start_event = KeyVerificationStart.from_dict(start) bob_sas = Sas.from_key_verification_start( bob_device.user_id, bob_device.id, bob_device.ed25519, alice_device, start_event, ) alice_sas = olm_machine.key_verifications[start_event.transaction_id] assert alice_sas bob_sas.cancel() cancel = {"sender": bob_id, "content": bob_sas.get_cancellation().content} cancel_event = KeyVerificationCancel.from_dict(cancel) assert not alice_sas.canceled olm_machine.handle_key_verification(cancel_event) assert alice_sas.canceled assert alice_sas.transaction_id not in olm_machine.key_verifications def test_key_cancel(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] bob_sas = Sas( bob_device.user_id, bob_device.id, bob_device.ed25519, alice_device ) start = { "sender": bob_device.user_id, "content": bob_sas.start_verification().content, } start_event = KeyVerificationStart.from_dict(start) olm_machine.handle_key_verification(start_event) bob_key = {"sender": bob_id, "content": bob_sas.share_key().content} assert not olm_machine.outgoing_to_device_messages bob_key_event = KeyVerificationKey.from_dict(bob_key) olm_machine.handle_key_verification(bob_key_event) alice_sas = olm_machine.key_verifications[start_event.transaction_id] assert alice_sas assert not alice_sas.canceled assert alice_sas.other_key_set olm_machine.handle_key_verification(bob_key_event) assert alice_sas.canceled assert olm_machine.outgoing_to_device_messages to_device = olm_machine.outgoing_to_device_messages[0] assert start_event.transaction_id == to_device.content["transaction_id"] def test_duplicate_verification(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] bob_sas = Sas( bob_device.user_id, bob_device.id, bob_device.ed25519, alice_device ) start = { "sender": bob_device.user_id, "content": bob_sas.start_verification().content, } start_event = KeyVerificationStart.from_dict(start) olm_machine.handle_key_verification(start_event) alice_sas = olm_machine.key_verifications[start_event.transaction_id] assert alice_sas olm_machine.handle_key_verification(start_event) assert alice_sas.canceled new_alice_sas = olm_machine.get_active_sas(bob_id, bob_device_id) assert new_alice_sas assert not new_alice_sas.canceled def test_client_sas_expiration(self, olm_machine): bob_device = olm_machine.device_store[bob_id][bob_device_id] olm_machine.create_sas(bob_device) sas = olm_machine.get_active_sas(bob_id, bob_device_id) assert sas olm_machine.clear_verifications() assert sas in olm_machine.key_verifications.values() minute = timedelta(minutes=1) sas.creation_time -= minute * 5 olm_machine.clear_verifications() assert sas.canceled assert sas not in olm_machine.key_verifications.values() def test_full_sas_key_agreement_v1(self, olm_machine): alice_device = OlmDevice( olm_machine.user_id, olm_machine.device_id, olm_machine.account.identity_keys, ) bob_device = olm_machine.device_store[bob_id][bob_device_id] bob_sas = Sas( bob_id, bob_device_id, bob_device.ed25519, alice_device, ) start = {"sender": bob_id, "content": bob_sas.start_verification().content} start_event = KeyVerificationStart.from_dict(start) start_event.key_agreement_protocols = [Sas._key_agreement_v1] assert olm_machine.device_store[bob_id][bob_device_id] olm_machine.handle_key_verification(start_event) alice_sas = olm_machine.key_verifications[start_event.transaction_id] accept = { "sender": olm_machine.user_id, "content": alice_sas.accept_verification().content, } accept_event = KeyVerificationAccept.from_dict(accept) bob_sas.receive_accept_event(accept_event) assert bob_sas.chosen_key_agreement == Sas._key_agreement_v1 assert alice_sas.chosen_key_agreement == Sas._key_agreement_v1 bob_key = {"sender": bob_id, "content": bob_sas.share_key().content} bob_key_event = KeyVerificationKey.from_dict(bob_key) olm_machine.handle_key_verification(bob_key_event) alice_key = {"sender": alice_id, "content": alice_sas.share_key().content} alice_key_event = KeyVerificationKey.from_dict(alice_key) bob_sas.receive_key_event(alice_key_event) assert alice_sas.other_key_set assert bob_sas.other_key_set assert alice_sas.get_emoji() == alice_sas.get_emoji() bob_sas.accept_sas() bob_mac = {"sender": bob_id, "content": bob_sas.get_mac().content} bob_mac_event = KeyVerificationMac.from_dict(bob_mac) olm_machine.handle_key_verification(bob_mac_event) assert alice_sas.state == SasState.mac_received assert not alice_sas.verified alice_sas.accept_sas() assert alice_sas.verified bob_mac_event.keys = "fake_keys" olm_machine.handle_key_verification(bob_mac_event) assert alice_sas.verified matrix-nio-0.24.0/tests/sessions_test.py000066400000000000000000000056251455215747700203360ustar00rootroot00000000000000import pytest from nio import EncryptionError from nio.crypto import ( InboundSession, OlmAccount, OutboundGroupSession, OutboundSession, Session, ) BOB_ID = "@bob:example.org" BOB_DEVICE = "AGMTSWVYML" BOB_CURVE = "T9tOKF+TShsn6mk1zisW2IBsBbTtzDNvw99RBFMJOgI" BOB_ONETIME = "6QlQw3mGUveS735k/JDaviuoaih5eEi6S1J65iHjfgU" TEST_ROOM = "!test:example.org" class TestClass: def test_account(self): account = OlmAccount() assert ( account.identity_keys == OlmAccount.from_pickle(account.pickle()).identity_keys ) def test_session(self): account = OlmAccount() session = OutboundSession(account, BOB_CURVE, BOB_ONETIME) assert ( session.id == Session.from_pickle(session.pickle(), session.creation_time).id ) assert not session.expired def test_olm_session_encryption(self): alice = OlmAccount() bob = OlmAccount() plaintext = "It's a secret to everybody" bob_curve = bob.identity_keys["curve25519"] bob.generate_one_time_keys(1) bob_onetime = list(bob.one_time_keys["curve25519"].values())[0] session = OutboundSession(alice, bob_curve, bob_onetime) creation_time = session.use_time # Encrypt a message and check that the use time increased. message = session.encrypt(plaintext) assert session.use_time >= creation_time inbound = InboundSession(bob, message) creation_time = inbound.use_time # Decrypt a message and check that the use time increased. decrypted_plaintext = inbound.decrypt(message) assert inbound.use_time >= creation_time assert decrypted_plaintext == plaintext pickle = inbound.pickle("") unpickled = Session.from_pickle( pickle, inbound.creation_time, "", inbound.use_time ) use_time = unpickled.use_time message = unpickled.encrypt(plaintext) assert unpickled.use_time > use_time pickle = session.pickle("") unpickled = Session.from_pickle( pickle, session.creation_time, "", session.use_time ) use_time = unpickled.use_time decrypted_plaintext = unpickled.decrypt(message) assert unpickled.use_time >= use_time assert decrypted_plaintext == plaintext def test_outbound_group_session(self): session = OutboundGroupSession() assert not session.expired assert not session.shared assert session.message_count == 0 with pytest.raises(EncryptionError): session.encrypt("Hello") session.mark_as_shared() assert session.shared session.encrypt("Hello") assert session.message_count == 1 session.message_count = 101 assert session.expired with pytest.raises(EncryptionError): session.encrypt("Hello") matrix-nio-0.24.0/tests/store_test.py000066400000000000000000000557571455215747700176370ustar00rootroot00000000000000import copy import os from collections import defaultdict import pytest from helpers import ephemeral, ephemeral_dir, faker from nio.crypto import ( InboundGroupSession, OlmAccount, OlmDevice, OutboundGroupSession, OutboundSession, OutgoingKeyRequest, TrustState, ) from nio.exceptions import OlmTrustError from nio.store import ( DefaultStore, Ed25519Key, Key, KeyStore, MatrixStore, SqliteMemoryStore, SqliteStore, ) BOB_ID = "@bob:example.org" BOB_DEVICE = "AGMTSWVYML" BOB_CURVE = "T9tOKF+TShsn6mk1zisW2IBsBbTtzDNvw99RBFMJOgI" BOB_ONETIME = "6QlQw3mGUveS735k/JDaviuoaih5eEi6S1J65iHjfgU" TEST_ROOM = "!test:example.org" TEST_ROOM_2 = "!test2:example.org" TEST_FORWARDING_CHAIN = [BOB_CURVE, BOB_ONETIME] @pytest.fixture def matrix_store(tempdir): return MatrixStore("ephemeral", "DEVICEID", tempdir) @pytest.fixture def store(tempdir): store = DefaultStore("ephemeral", "DEVICEID", tempdir) account = OlmAccount() store.save_account(account) return store @pytest.fixture def sqlstore(tempdir): store = SqliteStore("ephemeral", "DEVICEID", tempdir) account = OlmAccount() store.save_account(account) return store @pytest.fixture def sqlmemorystore(): store = SqliteMemoryStore("ephemeral", "DEVICEID") account = OlmAccount() store.save_account(account) return store class TestClass: @property def ephemeral_store(self): return MatrixStore("@ephemeral:example.org", "DEVICEID", ephemeral_dir) @property def example_devices(self): devices = defaultdict(dict) for _ in range(10): device = faker.olm_device() devices[device.user_id][device.id] = device bob_device = OlmDevice( BOB_ID, BOB_DEVICE, {"ed25519": BOB_ONETIME, "curve25519": BOB_CURVE} ) devices[BOB_ID][BOB_DEVICE] = bob_device return devices def copy_store(self, old_store): return MatrixStore(old_store.user_id, old_store.device_id, old_store.store_path) def _create_ephemeral_account(self): store = self.ephemeral_store account = OlmAccount() store.save_account(account) return account def test_key(self): user_id = faker.mx_id() device_id = faker.device_id() fp_key = faker.olm_key_pair()["ed25519"] key = Ed25519Key(user_id, device_id, fp_key) assert key.to_line() == f"{user_id} {device_id} matrix-ed25519 {fp_key}\n" loaded_key = Key.from_line(key.to_line()) assert isinstance(loaded_key, Ed25519Key) assert key.user_id == loaded_key.user_id assert key.device_id == loaded_key.device_id assert key.key == loaded_key.key assert key == loaded_key def test_key_store(self, tempdir): store_path = os.path.join(tempdir, "test_store") store = KeyStore(os.path.join(tempdir, "test_store")) assert repr(store) == f"KeyStore object, file: {store_path}" key = faker.ed25519_key() store.add(key) assert key == store.get_key(key.user_id, key.device_id) def test_key_store_add_invalid(self, tempdir): os.path.join(tempdir, "test_store") store = KeyStore(os.path.join(tempdir, "test_store")) key = faker.ed25519_key() store.add(key) fake_key = copy.copy(key) fake_key.key = "FAKE_KEY" with pytest.raises(OlmTrustError): store.add(fake_key) def test_key_store_check_invalid(self, tempdir): os.path.join(tempdir, "test_store") store = KeyStore(os.path.join(tempdir, "test_store")) key = faker.ed25519_key() store.add(key) fake_key = copy.copy(key) fake_key.key = "FAKE_KEY" assert fake_key not in store assert key in store def test_key_store_add_many(self, tempdir): os.path.join(tempdir, "test_store") store = KeyStore(os.path.join(tempdir, "test_store")) keys = [ faker.ed25519_key(), faker.ed25519_key(), faker.ed25519_key(), faker.ed25519_key(), ] store.add_many(keys) store2 = KeyStore(os.path.join(tempdir, "test_store")) for key in keys: assert key in store2 def test_key_store_remove_many(self, tempdir): os.path.join(tempdir, "test_store") store = KeyStore(os.path.join(tempdir, "test_store")) keys = [ faker.ed25519_key(), faker.ed25519_key(), faker.ed25519_key(), faker.ed25519_key(), ] store.add_many(keys) for key in keys: assert key in store store.remove_many(keys) store2 = KeyStore(os.path.join(tempdir, "test_store")) for key in keys: assert key not in store2 @ephemeral def test_store_opening(self): store = self.ephemeral_store account = store.load_account() assert not account @ephemeral def test_store_account_saving(self): account = self._create_ephemeral_account() store2 = self.ephemeral_store loaded_account = store2.load_account() assert account.identity_keys == loaded_account.identity_keys @ephemeral def test_store_session(self): account = self._create_ephemeral_account() store = self.ephemeral_store session = OutboundSession(account, BOB_CURVE, BOB_ONETIME) store.save_session(BOB_CURVE, session) store2 = self.ephemeral_store session_store = store2.load_sessions() loaded_session = session_store.get(BOB_CURVE) assert loaded_session assert session.id == loaded_session.id @ephemeral def test_store_group_session(self): account = self._create_ephemeral_account() store = self.ephemeral_store out_group = OutboundGroupSession() in_group = InboundGroupSession( out_group.session_key, account.identity_keys["ed25519"], account.identity_keys["curve25519"], TEST_ROOM, TEST_FORWARDING_CHAIN, ) store.save_inbound_group_session(in_group) store2 = self.ephemeral_store session_store = store2.load_inbound_group_sessions() loaded_session = session_store.get( TEST_ROOM, account.identity_keys["curve25519"], in_group.id ) assert loaded_session assert in_group.id == loaded_session.id assert sorted(loaded_session.forwarding_chain) == sorted(TEST_FORWARDING_CHAIN) @ephemeral def test_store_device_keys(self): self._create_ephemeral_account() store = self.ephemeral_store devices = self.example_devices assert len(devices) == 11 store.save_device_keys(devices) store2 = self.ephemeral_store device_store = store2.load_device_keys() bob_device = device_store[BOB_ID][BOB_DEVICE] assert bob_device assert bob_device.user_id == BOB_ID assert bob_device.id == BOB_DEVICE assert bob_device.ed25519 == BOB_ONETIME assert bob_device.curve25519 == BOB_CURVE assert not bob_device.deleted assert len(device_store.users) == 11 @ephemeral def test_two_stores(self): try: account = self._create_ephemeral_account() store = self.ephemeral_store loaded_account = store.load_account() assert account.identity_keys == loaded_account.identity_keys store2 = MatrixStore("ephemeral2", "DEVICEID2", ephemeral_dir) assert not store2.load_account() loaded_account = store.load_account() assert account.identity_keys == loaded_account.identity_keys finally: os.remove(os.path.join(ephemeral_dir, "ephemeral2_DEVICEID2.db")) @ephemeral def test_empty_device_keys(self): self._create_ephemeral_account() store = self.ephemeral_store store.save_device_keys({}) @ephemeral def test_saving_account_twice(self): account = self._create_ephemeral_account() store = self.ephemeral_store session = OutboundSession(account, BOB_CURVE, BOB_ONETIME) store.save_session(BOB_CURVE, session) store.save_account(account) store2 = self.ephemeral_store session_store = store2.load_sessions() loaded_session = session_store.get(BOB_CURVE) assert loaded_session assert session.id == loaded_session.id @ephemeral def test_encrypted_room_saving(self): self._create_ephemeral_account() store = self.ephemeral_store encrypted_rooms = store.load_encrypted_rooms() assert not encrypted_rooms store.save_encrypted_rooms([TEST_ROOM]) store = self.ephemeral_store encrypted_rooms = store.load_encrypted_rooms() assert TEST_ROOM in encrypted_rooms @ephemeral def test_key_request_saving(self): self._create_ephemeral_account() store = self.ephemeral_store key_requests = store.load_outgoing_key_requests() assert not key_requests request = OutgoingKeyRequest("ABCDF", "ABCDF", TEST_ROOM, "megolm.v1") store.add_outgoing_key_request(request) store = self.ephemeral_store key_requests = store.load_outgoing_key_requests() assert "ABCDF" in key_requests.keys() assert request == key_requests["ABCDF"] def test_new_store_opening(self, matrix_store): account = matrix_store.load_account() assert not account def test_new_store_account_saving(self, matrix_store): account = OlmAccount() matrix_store.save_account(account) store2 = MatrixStore( matrix_store.user_id, matrix_store.device_id, matrix_store.store_path ) loaded_account = store2.load_account() assert account.identity_keys == loaded_account.identity_keys def test_new_store_session(self, store): account = store.load_account() session = OutboundSession(account, BOB_CURVE, BOB_ONETIME) store.save_session(BOB_CURVE, session) store2 = self.copy_store(store) session_store = store2.load_sessions() loaded_session = session_store.get(BOB_CURVE) assert loaded_session assert session.id == loaded_session.id def test_new_store_group_session(self, store): account = store.load_account() out_group = OutboundGroupSession() in_group = InboundGroupSession( out_group.session_key, account.identity_keys["ed25519"], account.identity_keys["curve25519"], TEST_ROOM, TEST_FORWARDING_CHAIN, ) store.save_inbound_group_session(in_group) store2 = self.copy_store(store) session_store = store2.load_inbound_group_sessions() loaded_session = session_store.get( TEST_ROOM, account.identity_keys["curve25519"], in_group.id ) assert loaded_session assert in_group.id == loaded_session.id assert sorted(loaded_session.forwarding_chain) == sorted(TEST_FORWARDING_CHAIN) def test_new_store_device_keys(self, store): store.load_account() devices = self.example_devices assert len(devices) == 11 store.save_device_keys(devices) store2 = self.copy_store(store) device_store = store2.load_device_keys() # pdb.set_trace() bob_device = device_store[BOB_ID][BOB_DEVICE] assert bob_device assert bob_device.user_id == BOB_ID assert bob_device.id == BOB_DEVICE assert bob_device.ed25519 == BOB_ONETIME assert bob_device.curve25519 == BOB_CURVE assert not bob_device.deleted assert len(device_store.users) == 11 def test_new_saving_account_twice(self, store): account = store.load_account() session = OutboundSession(account, BOB_CURVE, BOB_ONETIME) store.save_session(BOB_CURVE, session) store.save_account(account) store2 = self.copy_store(store) session_store = store2.load_sessions() loaded_session = session_store.get(BOB_CURVE) assert loaded_session assert session.id == loaded_session.id def test_new_encrypted_room_saving(self, store): encrypted_rooms = store.load_encrypted_rooms() assert not encrypted_rooms store.save_encrypted_rooms([TEST_ROOM]) store2 = self.copy_store(store) encrypted_rooms = store2.load_encrypted_rooms() assert TEST_ROOM in encrypted_rooms def test_new_encrypted_room_delete(self, store): encrypted_rooms = store.load_encrypted_rooms() assert not encrypted_rooms store.save_encrypted_rooms([TEST_ROOM, TEST_ROOM_2]) store2 = self.copy_store(store) encrypted_rooms = store2.load_encrypted_rooms() assert TEST_ROOM in encrypted_rooms assert TEST_ROOM_2 in encrypted_rooms store.delete_encrypted_room(TEST_ROOM_2) store3 = self.copy_store(store2) encrypted_rooms = store3.load_encrypted_rooms() assert TEST_ROOM in encrypted_rooms assert TEST_ROOM_2 not in encrypted_rooms def test_new_key_request_saving(self, store): key_requests = store.load_outgoing_key_requests() assert not key_requests request = OutgoingKeyRequest("ABCDF", "ABCDF", TEST_ROOM, "megolm.v1") store.add_outgoing_key_request(request) store2 = self.copy_store(store) key_requests = store2.load_outgoing_key_requests() assert "ABCDF" in key_requests.keys() assert request == key_requests["ABCDF"] def test_db_upgrade(self, tempdir): user = "ephemeral" device_id = "DEVICE_ID" user2 = "alice" device_id2 = "ALICE_ID" store = MatrixStore(user, device_id, tempdir, database_name="test.db") account = OlmAccount() session = OutboundSession(account, BOB_CURVE, BOB_ONETIME) out_group = OutboundGroupSession() in_group = InboundGroupSession( out_group.session_key, account.identity_keys["ed25519"], account.identity_keys["curve25519"], TEST_ROOM, TEST_FORWARDING_CHAIN, ) devices = self.example_devices assert len(devices) == 11 store.save_account(account) store.save_session(BOB_CURVE, session) store.save_inbound_group_session(in_group) store.save_device_keys(devices) store2 = MatrixStore(user2, device_id2, tempdir, database_name="test.db") account2 = OlmAccount() store2.save_account(account2) del store store = MatrixStore(user, device_id, tempdir, database_name="test.db") loaded_account = store.load_account() assert account.identity_keys == loaded_account.identity_keys session_store = store.load_sessions() loaded_session = session_store.get(BOB_CURVE) session_store = store.load_inbound_group_sessions() assert loaded_session assert session.id == loaded_session.id loaded_session = session_store.get( TEST_ROOM, account.identity_keys["curve25519"], in_group.id ) device_store = store.load_device_keys() # pdb.set_trace() assert loaded_session assert in_group.id == loaded_session.id assert sorted(loaded_session.forwarding_chain) == sorted(TEST_FORWARDING_CHAIN) bob_device = device_store[BOB_ID][BOB_DEVICE] assert bob_device assert bob_device.user_id == BOB_ID assert bob_device.id == BOB_DEVICE assert bob_device.ed25519 == BOB_ONETIME assert bob_device.curve25519 == BOB_CURVE assert not bob_device.deleted assert len(device_store.users) == 11 def test_store_versioning(self, store): version = store._get_store_version() assert version == 2 def test_sqlitestore_verification(self, sqlstore): devices = self.example_devices bob_device = devices[BOB_ID][BOB_DEVICE] sqlstore.save_device_keys(devices) assert not sqlstore.is_device_verified(bob_device) assert sqlstore.verify_device(bob_device) assert sqlstore.is_device_verified(bob_device) assert not sqlstore.verify_device(bob_device) assert sqlstore.is_device_verified(bob_device) assert sqlstore.unverify_device(bob_device) assert not sqlstore.is_device_verified(bob_device) assert not sqlstore.unverify_device(bob_device) def test_sqlitestore_blacklisting(self, sqlstore): devices = self.example_devices bob_device = devices[BOB_ID][BOB_DEVICE] sqlstore.save_device_keys(devices) assert not sqlstore.is_device_blacklisted(bob_device) assert sqlstore.blacklist_device(bob_device) assert sqlstore.is_device_blacklisted(bob_device) assert not sqlstore.is_device_verified(bob_device) assert not sqlstore.blacklist_device(bob_device) assert sqlstore.unblacklist_device(bob_device) assert not sqlstore.is_device_blacklisted(bob_device) assert not sqlstore.is_device_verified(bob_device) assert not sqlstore.unblacklist_device(bob_device) assert sqlstore.blacklist_device(bob_device) assert sqlstore.is_device_blacklisted(bob_device) assert sqlstore.verify_device(bob_device) assert not sqlstore.is_device_blacklisted(bob_device) assert sqlstore.is_device_verified(bob_device) def test_sqlitememorystore(self, sqlmemorystore): devices = self.example_devices bob_device = devices[BOB_ID][BOB_DEVICE] sqlmemorystore.save_device_keys(devices) assert not sqlmemorystore.is_device_verified(bob_device) assert sqlmemorystore.verify_device(bob_device) assert sqlmemorystore.is_device_verified(bob_device) def test_device_deletion(self, store): store.load_account() devices = self.example_devices assert len(devices) == 11 store.save_device_keys(devices) device_store = store.load_device_keys() bob_device = device_store[BOB_ID][BOB_DEVICE] assert not bob_device.deleted bob_device.deleted = True store.save_device_keys(device_store) device_store = store.load_device_keys() bob_device = device_store[BOB_ID][BOB_DEVICE] assert bob_device.deleted def test_deleting_trusted_device(self, sqlstore): devices = self.example_devices sqlstore.save_device_keys(devices) device_store = sqlstore.load_device_keys() bob_device = device_store[BOB_ID][BOB_DEVICE] sqlstore.verify_device(bob_device) bob_device.deleted = True sqlstore.save_device_keys(device_store) sqlstore.save_device_keys(devices) def test_ignoring_many(self, store): devices = self.example_devices device_list = [device for d in devices.values() for device in d.values()] store.save_device_keys(devices) store.ignore_devices(device_list) for device in device_list: assert store.is_device_ignored(device) def test_ignoring_many_sqlite(self, sqlstore): devices = self.example_devices device_list = [device for d in devices.values() for device in d.values()] sqlstore.save_device_keys(devices) sqlstore.ignore_devices(device_list) for device in device_list: assert sqlstore.is_device_ignored(device) def test_trust_state_updating_sqlite(self, sqlstore): devices = self.example_devices bob_device = devices[BOB_ID][BOB_DEVICE] device_list = [device for d in devices.values() for device in d.values()] sqlstore.save_device_keys(devices) assert bob_device.trust_state == TrustState.unset sqlstore.verify_device(bob_device) assert bob_device.trust_state == TrustState.verified sqlstore.unverify_device(bob_device) assert bob_device.trust_state == TrustState.unset sqlstore.blacklist_device(bob_device) assert bob_device.trust_state == TrustState.blacklisted sqlstore.unblacklist_device(bob_device) assert bob_device.trust_state == TrustState.unset sqlstore.ignore_device(bob_device) assert bob_device.trust_state == TrustState.ignored sqlstore.unignore_device(bob_device) assert bob_device.trust_state == TrustState.unset sqlstore.ignore_devices(device_list) for device in device_list: assert device.trust_state == TrustState.ignored def test_trust_state_updating_default(self, store): devices = self.example_devices bob_device = devices[BOB_ID][BOB_DEVICE] device_list = [device for d in devices.values() for device in d.values()] store.save_device_keys(devices) assert bob_device.trust_state == TrustState.unset assert not bob_device.verified store.verify_device(bob_device) assert bob_device.trust_state == TrustState.verified assert bob_device.verified store.unverify_device(bob_device) assert bob_device.trust_state == TrustState.unset assert not bob_device.verified store.blacklist_device(bob_device) assert bob_device.trust_state == TrustState.blacklisted assert bob_device.blacklisted store.unblacklist_device(bob_device) assert bob_device.trust_state == TrustState.unset assert not bob_device.blacklisted store.ignore_device(bob_device) assert bob_device.trust_state == TrustState.ignored assert bob_device.ignored store.unignore_device(bob_device) assert bob_device.trust_state == TrustState.unset assert not bob_device.ignored store.ignore_devices(device_list) for device in device_list: assert device.trust_state == TrustState.ignored def test_trust_state_loading(self, store): devices = self.example_devices bob_device = devices[BOB_ID][BOB_DEVICE] store.save_device_keys(devices) assert not bob_device.verified store.verify_device(bob_device) assert bob_device.verified store2 = DefaultStore(store.user_id, store.device_id, store.store_path) loaded_devices = store2.load_device_keys() bob_device = loaded_devices[BOB_ID][BOB_DEVICE] assert bob_device.verified def test_trust_state_loading_sql(self, sqlstore): devices = self.example_devices bob_device = devices[BOB_ID][BOB_DEVICE] sqlstore.save_device_keys(devices) assert not bob_device.verified sqlstore.verify_device(bob_device) assert bob_device.verified store2 = SqliteStore(sqlstore.user_id, sqlstore.device_id, sqlstore.store_path) loaded_devices = store2.load_device_keys() bob_device = loaded_devices[BOB_ID][BOB_DEVICE] assert bob_device.verified def test_sync_token_loading(self, sqlstore): token = "1234" sqlstore.save_sync_token(token) loaded_token = sqlstore.load_sync_token() assert token == loaded_token matrix-nio-0.24.0/tox.ini000066400000000000000000000007311455215747700152210ustar00rootroot00000000000000[tox] envlist = coverage isolated_build = True [testenv] deps = -r test-requirements.txt passenv = TOXENV CI commands = pytest --benchmark-disable [testenv:coverage] commands = coverage erase pytest --cov={envsitepackagesdir}/nio --cov-report term-missing --benchmark-disable coverage xml coverage report --show-missing codecov -e TOXENV deps = -rtest-requirements.txt coverage codecov>=1.4.0 setenv = COVERAGE_FILE=.coverage