pax_global_header00006660000000000000000000000064150750370250014516gustar00rootroot0000000000000052 comment=767e118ed193cd16cecb61989614b50dab432aab Python-roborock-python-roborock-32df4f3/000077500000000000000000000000001507503702500203675ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/.github/000077500000000000000000000000001507503702500217275ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/.github/dependabot.yml000066400000000000000000000005351507503702500245620ustar00rootroot00000000000000# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" Python-roborock-python-roborock-32df4f3/.github/workflows/000077500000000000000000000000001507503702500237645ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/.github/workflows/ci.yml000066400000000000000000000045071507503702500251100ustar00rootroot00000000000000name: CI on: push: branches: - main pull_request: concurrency: group: ${{ github.head_ref || github.run_id }} cancel-in-progress: true jobs: # Make sure commit messages follow the conventional commits convention: # https://www.conventionalcommits.org commitlint: name: Lint Commit Messages runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: wagoid/commitlint-github-action@v6.2.1 lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - uses: actions/setup-python@v6 with: python-version: "3.11" - uses: pre-commit/action@v3.0.1 test: strategy: fail-fast: false matrix: python-version: - "3.11" - "3.14" runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - name: Set up uv uses: astral-sh/setup-uv@v7 with: python-version: ${{ matrix.python-version }} activate-environment: true - run: uv pip install pip - name: Test with Pytest run: uv run pytest --log-cli-level=DEBUG -vv -s shell: bash release: runs-on: ubuntu-latest needs: - test concurrency: release if: github.ref == 'refs/heads/main' permissions: contents: write issues: write pull-requests: write id-token: write actions: write packages: write environment: name: release steps: - uses: actions/checkout@v5 with: fetch-depth: 0 persist-credentials: false - name: Python Semantic Release id: release uses: python-semantic-release/python-semantic-release@v10.4.1 with: github_token: ${{ secrets.GH_TOKEN }} - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@v1.13.0 # NOTE: DO NOT wrap the conditional in ${{ }} as it will always evaluate to true. # See https://github.com/actions/runner/issues/1173 if: steps.release.outputs.released == 'true' - name: Publish package distributions to GitHub Releases uses: python-semantic-release/publish-action@v10.4.1 if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ steps.release.outputs.tag }} Python-roborock-python-roborock-32df4f3/.github/workflows/pages.yml000066400000000000000000000017761507503702500256210ustar00rootroot00000000000000--- name: Deploy static content to Pages on: push: branches: - main workflow_dispatch: permissions: contents: read pages: write id-token: write actions: read concurrency: group: "pages" cancel-in-progress: true jobs: deploy: environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} runs-on: ubuntu-latest strategy: fail-fast: false steps: - uses: actions/checkout@v5 - name: Set up uv uses: astral-sh/setup-uv@v7 with: python-version: ${{ matrix.python-version }} activate-environment: true - run: uv pip install . - run: uv run pdoc ./roborock -o docs/pdoc - name: Setup Pages uses: actions/configure-pages@v5 - name: Upload artifact uses: actions/upload-pages-artifact@v4 with: # Upload pdoc output path: 'docs/pdoc/' - name: Deploy to GitHub Pages id: deployment uses: actions/deploy-pages@v4 Python-roborock-python-roborock-32df4f3/.gitignore000066400000000000000000000003401507503702500223540ustar00rootroot00000000000000dist venv .venv .idea roborock/__pycache__ *.pyc .coverage # Sphinx documentation docs/_build/ # mkdocs documentation /site /docs/build/ .DS_Store # gemini-cli settings .gemini/ # GitHub App credentials gha-creds-*.json Python-roborock-python-roborock-32df4f3/.pre-commit-config.yaml000066400000000000000000000025001507503702500246450ustar00rootroot00000000000000# See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks exclude: "CHANGELOG.md" default_stages: [ pre-commit ] repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: debug-statements - id: check-builtin-literals - id: check-case-conflict - id: check-docstring-first - id: check-json - id: check-toml - id: check-yaml - id: detect-private-key - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/uv-pre-commit rev: 0.9.1 hooks: - id: uv-sync args: ["--locked", "--all-packages"] - repo: https://github.com/codespell-project/codespell rev: v2.2.6 hooks: - id: codespell - repo: https://github.com/charliermarsh/ruff-pre-commit rev: v0.13.2 hooks: - id: ruff-format - id: ruff args: - --fix - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.7.1 hooks: - id: mypy exclude: cli.py additional_dependencies: [ "types-paho-mqtt" ] Python-roborock-python-roborock-32df4f3/.readthedocs.yaml000066400000000000000000000002001507503702500236060ustar00rootroot00000000000000version: 2 build: os: ubuntu-22.04 tools: python: "3.10" python: install: - requirements: docs/requirements.txt Python-roborock-python-roborock-32df4f3/.vscode/000077500000000000000000000000001507503702500217305ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/.vscode/launch.json000066400000000000000000000004501507503702500240740ustar00rootroot00000000000000{ "version": "0.2.0", "configurations": [ { "name": "Python: Current File", "type": "python", "request": "launch", "program": "${file}", "console": "integratedTerminal", "justMyCode": false } ] } Python-roborock-python-roborock-32df4f3/.vscode/settings.json000066400000000000000000000000451507503702500244620ustar00rootroot00000000000000{ "esbonio.sphinx.confDir": "" } Python-roborock-python-roborock-32df4f3/CHANGELOG.md000066400000000000000000002736401507503702500222140ustar00rootroot00000000000000# CHANGELOG ## v2.19.0 (2025-05-13) ### Bug Fixes - Add Saros 10 dock type code ([#362](https://github.com/Python-roborock/python-roborock/pull/362), [`240bf59`](https://github.com/Python-roborock/python-roborock/commit/240bf59df1873e85e05356496e5be01f1a000199)) ### Chores - **deps**: Bump aiomqtt from 2.3.2 to 2.4.0 ([#375](https://github.com/Python-roborock/python-roborock/pull/375), [`b243a25`](https://github.com/Python-roborock/python-roborock/commit/b243a25569c2cb6b54e6c0e1eed6dadecb9ad84c)) Bumps [aiomqtt](https://github.com/empicano/aiomqtt) from 2.3.2 to 2.4.0. - [Release notes](https://github.com/empicano/aiomqtt/releases) - [Changelog](https://github.com/empicano/aiomqtt/blob/main/CHANGELOG.md) - [Commits](https://github.com/empicano/aiomqtt/compare/v2.3.2...v2.4.0) --- updated-dependencies: - dependency-name: aiomqtt dependency-version: 2.4.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ### Features - Add some logging for the web api ([#377](https://github.com/Python-roborock/python-roborock/pull/377), [`74c1b5f`](https://github.com/Python-roborock/python-roborock/commit/74c1b5f6e88ce410f95676de802bd04d304963b1)) ## v2.18.2 (2025-05-04) ### Bug Fixes - Add session to home_data_v3 ([#372](https://github.com/Python-roborock/python-roborock/pull/372), [`77061fe`](https://github.com/Python-roborock/python-roborock/commit/77061fe1545a3d2f9e874a3f7e4a94eedfd17706)) ## v2.18.1 (2025-05-04) ### Bug Fixes - Get home_data_v3 working ([#371](https://github.com/Python-roborock/python-roborock/pull/371), [`f9e6c54`](https://github.com/Python-roborock/python-roborock/commit/f9e6c546e68a71a321dafabd5d502abef3e89b31)) ## v2.18.0 (2025-04-06) ### Features - Rate limits for login and home data ([#361](https://github.com/Python-roborock/python-roborock/pull/361), [`93ef8ad`](https://github.com/Python-roborock/python-roborock/commit/93ef8addfd2faa6264606c9d710c46772cd52150)) * feat: rate limits for login and home data * fix: comments * fix: testing and comments ## v2.17.0 (2025-04-05) ### Features - Add support for g20s ultra ([#359](https://github.com/Python-roborock/python-roborock/pull/359), [`593c368`](https://github.com/Python-roborock/python-roborock/commit/593c3687064779ee6790e17f40411cd8129b756e)) ## v2.16.1 (2025-03-22) ### Bug Fixes - Close the session if we created it ([#356](https://github.com/Python-roborock/python-roborock/pull/356), [`96cc718`](https://github.com/Python-roborock/python-roborock/commit/96cc718dbd4106fa344172e2dbf0c3779344ba04)) ## v2.16.0 (2025-03-22) ### Features - Allow forcing of updating cache variables ([#355](https://github.com/Python-roborock/python-roborock/pull/355), [`eae7803`](https://github.com/Python-roborock/python-roborock/commit/eae7803db8973870c396ce45341e5d38cbfaf321)) ## v2.15.0 (2025-03-18) ### Chores - Fix documentation links ([#348](https://github.com/Python-roborock/python-roborock/pull/348), [`404a47c`](https://github.com/Python-roborock/python-roborock/commit/404a47c8c51891ed90093869e567d56386cdc4a2)) ### Features - Allow passing in clientsession ([#354](https://github.com/Python-roborock/python-roborock/pull/354), [`1d31cf6`](https://github.com/Python-roborock/python-roborock/commit/1d31cf619ef38dfdd2891cd42c0acf4550b88c29)) * feat: allow passing in clientsession * fix: test ## v2.14.0 (2025-03-16) ### Features - Add load_multi_map function ([#349](https://github.com/Python-roborock/python-roborock/pull/349), [`23bae12`](https://github.com/Python-roborock/python-roborock/commit/23bae1225389b6ec88bad868b8c6d4a28f458e61)) ## v2.13.0 (2025-03-16) ### Features - Add home_data_v3 ([#347](https://github.com/Python-roborock/python-roborock/pull/347), [`1325fda`](https://github.com/Python-roborock/python-roborock/commit/1325fdaef0f9d920ab499a0550da51cdb8efc0c4)) * feat: add home_data_v3 * fix: address comments ## v2.12.2 (2025-03-11) ### Bug Fixes - Bad dock summary logic ([#345](https://github.com/Python-roborock/python-roborock/pull/345), [`eda1e98`](https://github.com/Python-roborock/python-roborock/commit/eda1e98e5ea177e2eb2390d877b383780f938fd8)) ### Chores - **deps-dev**: Bump pytest from 8.3.4 to 8.3.5 ([#342](https://github.com/Python-roborock/python-roborock/pull/342), [`53635ed`](https://github.com/Python-roborock/python-roborock/commit/53635eda2a2415fc5744f9ebdf8e80fb2df96ef0)) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.4 to 8.3.5. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.3.4...8.3.5) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> - **deps-dev**: Bump ruff from 0.9.9 to 0.9.10 ([#344](https://github.com/Python-roborock/python-roborock/pull/344), [`94b281d`](https://github.com/Python-roborock/python-roborock/commit/94b281daf5906ec572fa679869eb78fab030db59)) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.9.9 to 0.9.10. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.9.9...0.9.10) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ## v2.12.1 (2025-03-04) ### Bug Fixes - Add error for web calls and saros dock ([#343](https://github.com/Python-roborock/python-roborock/pull/343), [`49fb137`](https://github.com/Python-roborock/python-roborock/commit/49fb1372aead96ad5b03222699ab150bf83b31f9)) ### Chores - **deps**: Bump aiohttp from 3.11.11 to 3.11.12 ([#328](https://github.com/Python-roborock/python-roborock/pull/328), [`f2d0c39`](https://github.com/Python-roborock/python-roborock/commit/f2d0c39353aff0d2f63ba5402cbfd1fd5c9f70c3)) --- updated-dependencies: - dependency-name: aiohttp dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> - **deps**: Bump aiohttp from 3.11.12 to 3.11.13 ([#340](https://github.com/Python-roborock/python-roborock/pull/340), [`7c6bb54`](https://github.com/Python-roborock/python-roborock/commit/7c6bb544fe14b0512eb4cc73f3d92f19fc56f4f7)) --- updated-dependencies: - dependency-name: aiohttp dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> - **deps**: Bump python-semantic-release/python-semantic-release ([#338](https://github.com/Python-roborock/python-roborock/pull/338), [`15f7705`](https://github.com/Python-roborock/python-roborock/commit/15f77056b8f2c4dcd2772812c6c2f9647f808bcd)) Bumps [python-semantic-release/python-semantic-release](https://github.com/python-semantic-release/python-semantic-release) from 9.17.0 to 9.21.0. - [Release notes](https://github.com/python-semantic-release/python-semantic-release/releases) - [Changelog](https://github.com/python-semantic-release/python-semantic-release/blob/master/CHANGELOG.rst) - [Commits](https://github.com/python-semantic-release/python-semantic-release/compare/v9.17.0...v9.21.0) --- updated-dependencies: - dependency-name: python-semantic-release/python-semantic-release dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> - **deps-dev**: Bump mypy from 1.14.1 to 1.15.0 ([#329](https://github.com/Python-roborock/python-roborock/pull/329), [`2105cdf`](https://github.com/Python-roborock/python-roborock/commit/2105cdf2a29a1ad1c1c9117e3dff4c4548466d4f)) Bumps [mypy](https://github.com/python/mypy) from 1.14.1 to 1.15.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.14.1...v1.15.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> - **deps-dev**: Bump ruff from 0.9.4 to 0.9.9 ([#341](https://github.com/Python-roborock/python-roborock/pull/341), [`4e80f7a`](https://github.com/Python-roborock/python-roborock/commit/4e80f7a86764240729982de3336173231fac6a08)) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.9.4 to 0.9.9. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.9.4...0.9.9) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ## v2.12.0 (2025-02-21) ### Features - Add cli status ([#333](https://github.com/Python-roborock/python-roborock/pull/333), [`64e77d7`](https://github.com/Python-roborock/python-roborock/commit/64e77d7150babcc78ce3698fe98594891dcb7bd4)) ## v2.11.3 (2025-02-19) ### Bug Fixes - Q revo curv mappings ([#332](https://github.com/Python-roborock/python-roborock/pull/332), [`83d010a`](https://github.com/Python-roborock/python-roborock/commit/83d010acbc100f06ae322adde1eedcfd0f78efc8)) ## v2.11.2 (2025-02-13) ### Bug Fixes - Add some extra data protocol checking ([#331](https://github.com/Python-roborock/python-roborock/pull/331), [`4af1490`](https://github.com/Python-roborock/python-roborock/commit/4af1490ea4db0dbeb5d5666019d9433af4f3d273)) ## v2.11.1 (2025-02-03) ### Bug Fixes - Typing of scene api call ([#324](https://github.com/Python-roborock/python-roborock/pull/324), [`61e27ae`](https://github.com/Python-roborock/python-roborock/commit/61e27aedfbb363913f80ace3932fa4adf61f9792)) ## v2.11.0 (2025-02-03) ### Chores - **deps**: Bump pypa/gh-action-pypi-publish from 1.12.3 to 1.12.4 ([#311](https://github.com/Python-roborock/python-roborock/pull/311), [`cb40279`](https://github.com/Python-roborock/python-roborock/commit/cb4027994e4ee0b72f25d9f51f46f8b3f9522bc5)) - **deps**: Bump python-semantic-release/python-semantic-release ([#312](https://github.com/Python-roborock/python-roborock/pull/312), [`7827af5`](https://github.com/Python-roborock/python-roborock/commit/7827af5ef7e6fb2dedd6eef0cb8c0c8439d2a8ef)) - **deps**: Bump python-semantic-release/upload-to-gh-release ([#290](https://github.com/Python-roborock/python-roborock/pull/290), [`87038e3`](https://github.com/Python-roborock/python-roborock/commit/87038e3a556a359d552775195d7640b6cdbeb1fe)) - **deps**: Bump wagoid/commitlint-github-action from 6.2.0 to 6.2.1 ([#296](https://github.com/Python-roborock/python-roborock/pull/296), [`037e28c`](https://github.com/Python-roborock/python-roborock/commit/037e28c38df282dac09bd4ff9596dc0b3a09c78f)) - **deps-dev**: Bump codespell from 2.3.0 to 2.4.1 ([#321](https://github.com/Python-roborock/python-roborock/pull/321), [`c36d46f`](https://github.com/Python-roborock/python-roborock/commit/c36d46f90780db50f2c5c2e947ada78b6ee4967c)) - **deps-dev**: Bump pytest-asyncio from 0.25.2 to 0.25.3 ([#322](https://github.com/Python-roborock/python-roborock/pull/322), [`9e40fe7`](https://github.com/Python-roborock/python-roborock/commit/9e40fe780224903c8e81c4d210ab61212582948d)) - **deps-dev**: Bump ruff from 0.9.2 to 0.9.4 ([#323](https://github.com/Python-roborock/python-roborock/pull/323), [`25d15a7`](https://github.com/Python-roborock/python-roborock/commit/25d15a78d1f5ffb069159aa652c2ef3f88d3eb03)) ### Features - Add scenes/routines support ([#317](https://github.com/Python-roborock/python-roborock/pull/317), [`090d912`](https://github.com/Python-roborock/python-roborock/commit/090d912872712e16b24597826a0b85d22b37acb3)) * add scenes support --------- Co-authored-by: Luke Lashley ## v2.10.1 (2025-02-03) ### Bug Fixes - Delete in cli ([#320](https://github.com/Python-roborock/python-roborock/pull/320), [`6704f55`](https://github.com/Python-roborock/python-roborock/commit/6704f55915005d771d698e58dcbac5ec46a385e5)) ## v2.10.0 (2025-01-31) ### Features - Add commands to add a new device ([#307](https://github.com/Python-roborock/python-roborock/pull/307), [`430c248`](https://github.com/Python-roborock/python-roborock/commit/430c24806fa06a5cec6c7fb3945a9b9cbfbc2f7a)) * feat: add commands to add a new device * chore: mr comments ## v2.9.8 (2025-01-30) ### Bug Fixes - Ignore ping id during id check ([#316](https://github.com/Python-roborock/python-roborock/pull/316), [`b3d74b4`](https://github.com/Python-roborock/python-roborock/commit/b3d74b4bc9fa581da0485cf68a46c23f53fdbf50)) ## v2.9.7 (2025-01-28) ### Bug Fixes - Never create a new asyncio loop ([#310](https://github.com/Python-roborock/python-roborock/pull/310), [`ed7db1f`](https://github.com/Python-roborock/python-roborock/commit/ed7db1f09f379f509a38a61a445fb2c41b384f25)) ## v2.9.6 (2025-01-26) ### Bug Fixes - Remove the __del__ warning for disconnected clients ([#308](https://github.com/Python-roborock/python-roborock/pull/308), [`235752b`](https://github.com/Python-roborock/python-roborock/commit/235752bd77e4617323366b56439bf8981b071430)) ### Refactoring - Breaking change to remove sync APIs ([#306](https://github.com/Python-roborock/python-roborock/pull/306), [`3c30d93`](https://github.com/Python-roborock/python-roborock/commit/3c30d933f680cc567b10ad6566b02289eade5b3f)) * refactor: breaking change to remove sync APIs * chore: downgrade log to a debug message ## v2.9.5 (2025-01-21) ### Bug Fixes - Fix queue timeout variable and set default in tests of 10 seconds ([#302](https://github.com/Python-roborock/python-roborock/pull/302), [`9c75e3a`](https://github.com/Python-roborock/python-roborock/commit/9c75e3a67fc8f411c5496b5864a9a0e90a573c8a)) * test: set queue timeout of 10 * test: cleanup lint errors * fix: set queue_timeout in the client leaf base classes * chore: fix test fixture after merging - Log an explicit message when intentionally resetting the connection ([#304](https://github.com/Python-roborock/python-roborock/pull/304), [`a20d2ac`](https://github.com/Python-roborock/python-roborock/commit/a20d2ac46c7553c7b7c7dffbbc86ee0da370418d)) ## v2.9.4 (2025-01-21) ### Bug Fixes - Bump paho-mqtt from 1.6.1 to 2.1.0 ([#288](https://github.com/Python-roborock/python-roborock/pull/288), [`777b736`](https://github.com/Python-roborock/python-roborock/commit/777b736440a3633c089bf09ab9d7240e54e0fb0e)) Bumps [paho-mqtt](https://github.com/eclipse/paho.mqtt.python) from 1.6.1 to 2.1.0. - [Release notes](https://github.com/eclipse/paho.mqtt.python/releases) - [Changelog](https://github.com/eclipse-paho/paho.mqtt.python/blob/master/ChangeLog.txt) - [Commits](https://github.com/eclipse/paho.mqtt.python/compare/v1.6.1...v2.1.0) --- updated-dependencies: - dependency-name: paho-mqtt dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> - Set unique sequence numbers on outgoing messages ([#300](https://github.com/Python-roborock/python-roborock/pull/300), [`14f03c7`](https://github.com/Python-roborock/python-roborock/commit/14f03c7df1c574ab87ea056227bb95f9150f4832)) ### Chores - Fix flaky tests by cleaning up threads ([#303](https://github.com/Python-roborock/python-roborock/pull/303), [`6e29e74`](https://github.com/Python-roborock/python-roborock/commit/6e29e7440f61ddde9a67b25c87864ed0cbf1a097)) * chore: set log level to debug to aid in tracking down flaky tests * test: update log format to include timestamps and dates test: update logmessage with package name chore: fix tests to use valid zeo codes * test: fix zeo test assertion * test: add logging when updating future * test: make the client read socket always available for reading to avoid getting blocked * test: revert socket changes * test: set function loop scope * test: add pytest-timeout with a 20 second hard timeout * test: explicitly disconnect threads * test: fix formatting * test: fix lint errors * fix: stop the mqtt loop on disconnect * fix: release the mqtt thread on release * test: revert log changes * chore: cleanup/revert changes * chore: revert mqtt client check * fix: always stop the event loop when disconnecting ## v2.9.3 (2025-01-21) ### Bug Fixes - Remove methods no longer available in paho-mqtt ([#298](https://github.com/Python-roborock/python-roborock/pull/298), [`685edc8`](https://github.com/Python-roborock/python-roborock/commit/685edc825fbf2062d61c3294ea82c4566442dd64)) ### Chores - Remove test that creates abstract base class ([#299](https://github.com/Python-roborock/python-roborock/pull/299), [`a55b804`](https://github.com/Python-roborock/python-roborock/commit/a55b804fddff318d704cc04e6c4190514e3e3375)) - **deps-dev**: Bump aioresponses from 0.7.7 to 0.7.8 ([#295](https://github.com/Python-roborock/python-roborock/pull/295), [`ab7ffb3`](https://github.com/Python-roborock/python-roborock/commit/ab7ffb36190090e6d5b39150da4ebe2f2e22fbd4)) Bumps [aioresponses](https://github.com/pnuckowski/aioresponses) from 0.7.7 to 0.7.8. - [Release notes](https://github.com/pnuckowski/aioresponses/releases) - [Commits](https://github.com/pnuckowski/aioresponses/compare/0.7.7...0.7.8) --- updated-dependencies: - dependency-name: aioresponses dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ## v2.9.2 (2025-01-19) ### Bug Fixes - Update local API protocol broken during refactoring and add additional tests for API calls ([#293](https://github.com/Python-roborock/python-roborock/pull/293), [`ea8e55a`](https://github.com/Python-roborock/python-roborock/commit/ea8e55a0b9c54e7c7d6235ad0e73f7b75ec4de7b)) * test: add an additional local API test and fix bug in test fixture * test: fix formatting * fix: Update local API ### Chores - Remove dacite and update readme ([#294](https://github.com/Python-roborock/python-roborock/pull/294), [`699a2c5`](https://github.com/Python-roborock/python-roborock/commit/699a2c5ed5362ee4004d2888037baf929869e98c)) - Update CI to run on one platform, but multiple python versions ([#292](https://github.com/Python-roborock/python-roborock/pull/292), [`16ab4ff`](https://github.com/Python-roborock/python-roborock/commit/16ab4ff433d25df9daa4bf102569c39bbd686420)) ## v2.9.1 (2025-01-13) ### Bug Fixes - Bump commitlint and allow caps ([#283](https://github.com/Python-roborock/python-roborock/pull/283), [`6211a81`](https://github.com/Python-roborock/python-roborock/commit/6211a8163d130c41594daf65e36be2d87788a5c6)) * fix: bump commitlint and allow caps * fix: error ### Chores - Add end-to-end tests for the MQTT client ([#278](https://github.com/Python-roborock/python-roborock/pull/278), [`0872691`](https://github.com/Python-roborock/python-roborock/commit/0872691c9eeb6e564a1ee47b8ba2bec73eb81a63)) * test: add end-to-end tests for the MQTT client * test: extract connected client to a fixture style: fix formatting of tests refactor: extract variables for mock data used in mqtt tests style: fix lint errors in tests - Add local api test coverage ([#284](https://github.com/Python-roborock/python-roborock/pull/284), [`c8dcd34`](https://github.com/Python-roborock/python-roborock/commit/c8dcd34c8197b9d47ec3c96567313d658e0f36b3)) - Allow type checking in roborock/cloud_api.py ([#280](https://github.com/Python-roborock/python-roborock/pull/280), [`9100bbf`](https://github.com/Python-roborock/python-roborock/commit/9100bbff1390a706a74dc0ec15c1bb1d7dc83d9f)) - Inheritance fixes and simplifications ([#282](https://github.com/Python-roborock/python-roborock/pull/282), [`1013cb5`](https://github.com/Python-roborock/python-roborock/commit/1013cb5f35ec6feb71e58a437395b0cdaa593937)) - Remove level of inheritance in mqtt client ([#286](https://github.com/Python-roborock/python-roborock/pull/286), [`5add0da`](https://github.com/Python-roborock/python-roborock/commit/5add0dac8d1e1e86b184ebad709034ea2a2686a3)) - Remove one level of local client inheritence ([#285](https://github.com/Python-roborock/python-roborock/pull/285), [`1f5a9ec`](https://github.com/Python-roborock/python-roborock/commit/1f5a9ecd907c0314cc156a59156b03151e9c26a8)) - Use asyncio mode in tests ([#272](https://github.com/Python-roborock/python-roborock/pull/272), [`8f779c3`](https://github.com/Python-roborock/python-roborock/commit/8f779c39b21ab429335fc5d179fe3bacc0b5d274)) - **deps**: Bump pre-commit/action from 3.0.0 to 3.0.1 ([#276](https://github.com/Python-roborock/python-roborock/pull/276), [`3f61bcc`](https://github.com/Python-roborock/python-roborock/commit/3f61bccde418c9e9e04ef059ca8a6a2dfcba8312)) - **deps**: Bump pypa/gh-action-pypi-publish from 1.12.2 to 1.12.3 ([#291](https://github.com/Python-roborock/python-roborock/pull/291), [`be52b3d`](https://github.com/Python-roborock/python-roborock/commit/be52b3d48dc7edeb164a006db10b7efe91a18b71)) - **deps-dev**: Bump pre-commit from 3.8.0 to 4.0.1 ([#287](https://github.com/Python-roborock/python-roborock/pull/287), [`f2f0c4c`](https://github.com/Python-roborock/python-roborock/commit/f2f0c4c8fa9f8fe85fd208daf28e5f7dfe02aba3)) - **deps-dev**: Bump pytest-asyncio from 0.25.1 to 0.25.2 ([#275](https://github.com/Python-roborock/python-roborock/pull/275), [`b0611f0`](https://github.com/Python-roborock/python-roborock/commit/b0611f0eb72b0078c10a5c03ae8415d21cc19c03)) - **deps-dev**: Bump ruff from 0.8.6 to 0.9.1 ([#277](https://github.com/Python-roborock/python-roborock/pull/277), [`eb8bbe3`](https://github.com/Python-roborock/python-roborock/commit/eb8bbe317b8d4f98e9c72151d6f9ca105e3c0db0)) ### Refactoring - Simplify future usage within the api clients ([#263](https://github.com/Python-roborock/python-roborock/pull/263), [`39a8661`](https://github.com/Python-roborock/python-roborock/commit/39a8661d4c5ade657cfc655a3ac78a66628bb755)) ## v2.9.0 (2025-01-09) ### Chores - Add example ([#269](https://github.com/Python-roborock/python-roborock/pull/269), [`d7a3af2`](https://github.com/Python-roborock/python-roborock/commit/d7a3af29c91bf2066f88a941789c0dc725eb7431)) - Add some testing and mocks for the web api ([#270](https://github.com/Python-roborock/python-roborock/pull/270), [`2356c16`](https://github.com/Python-roborock/python-roborock/commit/2356c16cd08cdf7210f605f9c890eb1c5631a792)) ### Features - Add dust collection mode name for typing ease ([#271](https://github.com/Python-roborock/python-roborock/pull/271), [`c85232a`](https://github.com/Python-roborock/python-roborock/commit/c85232a00b997dbc84a4b9b99b18ae1c714b7df7)) - Add product v4 and downloading code ([#267](https://github.com/Python-roborock/python-roborock/pull/267), [`b669117`](https://github.com/Python-roborock/python-roborock/commit/b6691174607a66959f4d9046dffb4cd4e782695d)) * feat: add product v4 and downloading code * fix: remove got message - Add support for qrevo curv ([#253](https://github.com/Python-roborock/python-roborock/pull/253), [`e42729a`](https://github.com/Python-roborock/python-roborock/commit/e42729aa5aedd2c77f68230825d6ce832a146f33)) * add support for qrevo curv * add dock support * revert unnecessary changes * fix: lint --------- Co-authored-by: Luke Lashley ## v2.8.5 (2025-01-06) ### Bug Fixes - Add additional log messages to track down concurrency errors ([#266](https://github.com/Python-roborock/python-roborock/pull/266), [`d750234`](https://github.com/Python-roborock/python-roborock/commit/d75023482e58689009c4df96cfc69b6080f5ada9)) - Update log message to include existing request id ([#264](https://github.com/Python-roborock/python-roborock/pull/264), [`ac8d23a`](https://github.com/Python-roborock/python-roborock/commit/ac8d23aa59342d9ae9f7c5d7c857de353e288ffa)) * fix: Update log message to include existing request id * fix: Add protocol to log message ### Chores - Always use time.monotonic ([#265](https://github.com/Python-roborock/python-roborock/pull/265), [`e14802c`](https://github.com/Python-roborock/python-roborock/commit/e14802cadde404d548cdff0c6b5906740a7e8c00)) ## v2.8.4 (2024-12-20) ### Bug Fixes - Update mop intensity, fan speed, and dock mappings for the QRevo Master ([#260](https://github.com/Python-roborock/python-roborock/pull/260), [`77f6d6f`](https://github.com/Python-roborock/python-roborock/commit/77f6d6fc917831f1966d2138bc7355292fa1e5e2)) * fix: update mop intensity, fan speed, and dock mappings for QRevo Master * Fix sorting of imports * Rerun precommit ## v2.8.3 (2024-12-19) ### Bug Fixes - Add support for QRevo Master mop mode ([#259](https://github.com/Python-roborock/python-roborock/pull/259), [`db11c0f`](https://github.com/Python-roborock/python-roborock/commit/db11c0f8ca7c08d2f795f77f7a652db4bfaa91ae)) ## v2.8.2 (2024-12-19) ### Bug Fixes - Add a mop mode to QRevoMaster ([#258](https://github.com/Python-roborock/python-roborock/pull/258), [`bf0feb7`](https://github.com/Python-roborock/python-roborock/commit/bf0feb7ee8bc9933232e8235e6efa92a451ee19e)) ## v2.8.1 (2024-12-18) ### Bug Fixes - Add config github actions ([#247](https://github.com/Python-roborock/python-roborock/pull/247), [`35f888c`](https://github.com/Python-roborock/python-roborock/commit/35f888c653ad3d41ca40d27a5ea7041df47b6bbe)) * fix: add config github actions * fix: remove placeholders - Add gh_token to checkout ([#245](https://github.com/Python-roborock/python-roborock/pull/245), [`ab9fcfe`](https://github.com/Python-roborock/python-roborock/commit/ab9fcfe4526314b09c8fd382527c5b9d9b011315)) - Bad indentation ([#248](https://github.com/Python-roborock/python-roborock/pull/248), [`190f66e`](https://github.com/Python-roborock/python-roborock/commit/190f66e53fca6938b927fd587ebcdb249c908505)) - Bump semantic release ([#236](https://github.com/Python-roborock/python-roborock/pull/236), [`cf067d4`](https://github.com/Python-roborock/python-roborock/commit/cf067d4e4fa4680e766719dc22295afb2a526323)) * fix: bump semantic release * fix: bump versioning and add environment * fix: move if check * fix: some other version bumps - Change to deploy_key ([#254](https://github.com/Python-roborock/python-roborock/pull/254), [`de0a0c7`](https://github.com/Python-roborock/python-roborock/commit/de0a0c73f1f9b415f67412170a754d6685f0c969)) - Change to persist credentials ([#246](https://github.com/Python-roborock/python-roborock/pull/246), [`5b4b769`](https://github.com/Python-roborock/python-roborock/commit/5b4b7694743d96ca7acb57ed28271220791f9802)) - Container issue from api change and ci update ([#257](https://github.com/Python-roborock/python-roborock/pull/257), [`b1e645d`](https://github.com/Python-roborock/python-roborock/commit/b1e645d6acb8de776f5361e2a5a2be59c730237b)) - Give ci more permissions ([#240](https://github.com/Python-roborock/python-roborock/pull/240), [`641a40c`](https://github.com/Python-roborock/python-roborock/commit/641a40c12f38f3dcdca36aa61f17663440f0ba8e)) - Hopefully finalize semantic release ([#244](https://github.com/Python-roborock/python-roborock/pull/244), [`481f01d`](https://github.com/Python-roborock/python-roborock/commit/481f01dc039f27037e269a7234c97006dae91969)) - Move github token to env for semantic release ([#241](https://github.com/Python-roborock/python-roborock/pull/241), [`c61d8de`](https://github.com/Python-roborock/python-roborock/commit/c61d8de1bbf0705d0d7a2699822e6bfef49c3db4)) - Repair semantic release ([#251](https://github.com/Python-roborock/python-roborock/pull/251), [`431bc20`](https://github.com/Python-roborock/python-roborock/commit/431bc2033340267340f4740cef14ec0e4c5e7331)) - Semantic release versioning tag ([#237](https://github.com/Python-roborock/python-roborock/pull/237), [`fcc58ee`](https://github.com/Python-roborock/python-roborock/commit/fcc58ee6de75a61642e73c63cf614d8953318c29)) - Semantic release versioning tag ([#238](https://github.com/Python-roborock/python-roborock/pull/238), [`33a1e72`](https://github.com/Python-roborock/python-roborock/commit/33a1e72d97881aac867119eddca39c4366a549e3)) * fix: semantic release versioning tag * fix: set version back - Set python version in ci ([#239](https://github.com/Python-roborock/python-roborock/pull/239), [`dcad510`](https://github.com/Python-roborock/python-roborock/commit/dcad510ec232380f5bed7646c4455f656b7ca6ae)) - Specify x-access-token ([#249](https://github.com/Python-roborock/python-roborock/pull/249), [`e9f319b`](https://github.com/Python-roborock/python-roborock/commit/e9f319b0ee22cd90e9437d20f279a24228ee62c1)) - Update_gh_token ([#242](https://github.com/Python-roborock/python-roborock/pull/242), [`8a9866c`](https://github.com/Python-roborock/python-roborock/commit/8a9866cce2f6d868ab5f87b13a6b0151034d7a22)) - Update_gh_token ([#243](https://github.com/Python-roborock/python-roborock/pull/243), [`e100ab3`](https://github.com/Python-roborock/python-roborock/commit/e100ab3e8557ed97a5917cadb40968bbf7686b76)) ### Chores - Update README.md ([`5a982b7`](https://github.com/Python-roborock/python-roborock/commit/5a982b723528e67c6d8d664dd8b3eee64436a0c8)) ## v2.8.0 (2024-11-12) ### Chores - Call to super in docs ([#235](https://github.com/Python-roborock/python-roborock/pull/235), [`df331ea`](https://github.com/Python-roborock/python-roborock/commit/df331ea0165d05b093f170fb9107918aaaac03e6)) ### Features - Add some new roborock codes and add custom command ([#234](https://github.com/Python-roborock/python-roborock/pull/234), [`c8507ef`](https://github.com/Python-roborock/python-roborock/commit/c8507eff9cdc24654034fbe4fd63ac89b6de6f99)) * fix: add some new roborock codes and add custom command * fix: lint ## v2.7.2 (2024-11-08) ### Bug Fixes - Add some new roborock codes ([#233](https://github.com/Python-roborock/python-roborock/pull/233), [`59546dd`](https://github.com/Python-roborock/python-roborock/commit/59546dd68f7b40ad368d58fd502680ff9c03c81b)) ## v2.7.1 (2024-10-28) ### Bug Fixes - Check that clean area is not a str ([#230](https://github.com/Python-roborock/python-roborock/pull/230), [`e66a91e`](https://github.com/Python-roborock/python-roborock/commit/e66a91edaf6fedf5d4b2ab9117b7759295add492)) ### Chores - Add some async improvements ([#229](https://github.com/Python-roborock/python-roborock/pull/229), [`e987c17`](https://github.com/Python-roborock/python-roborock/commit/e987c17ee65982c7179f4d94a84e1863aa4830da)) * chore: add some async improvements * chore: improve get_rand_int ## v2.7.0 (2024-10-28) ### Features - Remove dacite ([#227](https://github.com/Python-roborock/python-roborock/pull/227), [`86878a7`](https://github.com/Python-roborock/python-roborock/commit/86878a71d82c2cc707daa16dec109fc07360e3f6)) ## v2.6.1 (2024-10-22) ### Bug Fixes - Add a warning for wrong type of clean area and add new dock ([#224](https://github.com/Python-roborock/python-roborock/pull/224), [`c334eb2`](https://github.com/Python-roborock/python-roborock/commit/c334eb2193091dccd23db0d3ee4863e838733e30)) ## v2.6.0 (2024-06-29) ### Features - Add q revo pro/p10 pro support ([#220](https://github.com/Python-roborock/python-roborock/pull/220), [`5e6a2d6`](https://github.com/Python-roborock/python-roborock/commit/5e6a2d6a7171da146efb3e59ddb3215c2a573507)) ## v2.5.0 (2024-06-25) ### Features - Add some typing ([#219](https://github.com/Python-roborock/python-roborock/pull/219), [`35d0900`](https://github.com/Python-roborock/python-roborock/commit/35d09000b8d144cbaf935069952ea135950d0e78)) ## v2.4.0 (2024-06-25) ### Features - Add some missing codes and make warnings only message once ([#218](https://github.com/Python-roborock/python-roborock/pull/218), [`12361b5`](https://github.com/Python-roborock/python-roborock/commit/12361b58e7a4d368281c4ffd9ac3d8e9d8155e62)) ## v2.3.0 (2024-06-07) ### Features - Add warning in web requests if it fails to decode ([#215](https://github.com/Python-roborock/python-roborock/pull/215), [`6ae69e9`](https://github.com/Python-roborock/python-roborock/commit/6ae69e9bcba6a98736f2f480114922186f6ca458)) ## v2.2.3 (2024-06-04) ### Bug Fixes - S8 maxv has a wash and fill dock ([#213](https://github.com/Python-roborock/python-roborock/pull/213), [`018fd05`](https://github.com/Python-roborock/python-roborock/commit/018fd052360dffd238919e336943809720457c4e)) ### Chores - Add load multi map parameter to docs(#209) ([`2cee5d7`](https://github.com/Python-roborock/python-roborock/commit/2cee5d7e065473232caacf1531c38e83506f0c5b)) - Update documentation for reset_consumable ([#207](https://github.com/Python-roborock/python-roborock/pull/207), [`4071538`](https://github.com/Python-roborock/python-roborock/commit/40715387f5eac6788d198ffefad0c1d25b7c7138)) Document parameter for API function reset_consumable ## v2.2.2 (2024-05-16) ### Bug Fixes - Handle weird clean record response ([#206](https://github.com/Python-roborock/python-roborock/pull/206), [`07ce71a`](https://github.com/Python-roborock/python-roborock/commit/07ce71a2cd8085136952bd7639f6f4a2e273faf9)) ## v2.2.1 (2024-05-11) ### Bug Fixes - Add missing value "high = 203" to RoborockMopIntensityS8MaxVUltra ([#205](https://github.com/Python-roborock/python-roborock/pull/205), [`886b0e6`](https://github.com/Python-roborock/python-roborock/commit/886b0e6a8a4b98ff74964d59f4c8c0fbbf569688)) ## v2.2.0 (2024-05-09) ### Features - Improve some typing ([#204](https://github.com/Python-roborock/python-roborock/pull/204), [`7752db9`](https://github.com/Python-roborock/python-roborock/commit/7752db9066fa49bb93a6268a491e2a0baa608cfc)) ## v2.1.1 (2024-05-08) ### Bug Fixes - Set roommapping when it is only one room ([#203](https://github.com/Python-roborock/python-roborock/pull/203), [`26af66b`](https://github.com/Python-roborock/python-roborock/commit/26af66bd5d8dbfa4c94a9add317ccc9ca9161510)) * fix: set roommapping when it is only one room * fix: add len check ## v2.1.0 (2024-05-08) ### Features - Add s8_maxv_ultra info ([#202](https://github.com/Python-roborock/python-roborock/pull/202), [`aaaf0f0`](https://github.com/Python-roborock/python-roborock/commit/aaaf0f0c381924524a079f600de14db1cd61ed45)) ## v2.0.0 (2024-04-11) ### Features - Add zeo support and fix some a01 weirdness ([#200](https://github.com/Python-roborock/python-roborock/pull/200), [`e825ff5`](https://github.com/Python-roborock/python-roborock/commit/e825ff5811516b4034e9b41769e5912c99cf0166)) * major: add A01 * chore: add init * chore: fix commitlint? * chore: fix commitlint * chore: change refactor to be major tag * refactor: add A01 * feat: add a01 BREAKING CHANGE: You must now specify what version api you want to use with clients. * feat: add initial zeo support * fix: fix A01 support * fix: allow messages to fail * fix: lint * feat: add more zeo things ### Breaking Changes - You must now specify what version api you want to use with clients. ## v1.0.0 (2024-04-09) ### Chores - Move more things around in version 1 api ([#198](https://github.com/Python-roborock/python-roborock/pull/198), [`30d2577`](https://github.com/Python-roborock/python-roborock/commit/30d257756f35b9fc71d64d0479b872661b9176a6)) * chore: move more things around in version 1 api * fix: tests ### Refactoring - Add A01 ([#199](https://github.com/Python-roborock/python-roborock/pull/199), [`16b9e3e`](https://github.com/Python-roborock/python-roborock/commit/16b9e3e8261db3ec38d6bc24661ecf40c6bb0870)) * major: add A01 * chore: add init * chore: fix commitlint? * chore: fix commitlint * chore: change refactor to be major tag * refactor: add A01 * feat: add a01 BREAKING CHANGE: You must now specify what version api you want to use with clients. ### Breaking Changes - You must now specify what version api you want to use with clients. ## v0.41.0 (2024-03-06) ### Features - Add v1 api ([#194](https://github.com/Python-roborock/python-roborock/pull/194), [`9fb124e`](https://github.com/Python-roborock/python-roborock/commit/9fb124ecdd0a979ff8f2c742eb4dd625b7e9292f)) * feat: add v1 api * fix: change some imports * fix: bug and versioning * chore: move location of v1 * fix: random exception ## v0.40.0 (2024-03-03) ### Features - Add nonce to diagnostic data ([#195](https://github.com/Python-roborock/python-roborock/pull/195), [`ceafcb6`](https://github.com/Python-roborock/python-roborock/commit/ceafcb6e30c60f6f6ad3833ab73861c18413b806)) ## v0.39.2 (2024-02-26) ### Bug Fixes - Bump construct and add wm category ([#192](https://github.com/Python-roborock/python-roborock/pull/192), [`2f18b35`](https://github.com/Python-roborock/python-roborock/commit/2f18b35755776844e266c893b126a830622afd43)) ## v0.39.1 (2024-01-24) ### Bug Fixes - Remove problematic code ([#189](https://github.com/Python-roborock/python-roborock/pull/189), [`a9e12ca`](https://github.com/Python-roborock/python-roborock/commit/a9e12ca122b467d74e9cd29dc031802cf0f551bc)) ## v0.39.0 (2024-01-03) ### Chores - Added code from decompiled react and refactoring web api ([#176](https://github.com/Python-roborock/python-roborock/pull/176), [`dab105c`](https://github.com/Python-roborock/python-roborock/commit/dab105c58d11f7789b5f11dd962dd916d5436ced)) * chore: added code from decompiled react and refactoring web api * fix: patches * fix: patch * chore: add info from new_feature_info - Update api_commands.rst app_goto_target ([#163](https://github.com/Python-roborock/python-roborock/pull/163), [`9c83c77`](https://github.com/Python-roborock/python-roborock/commit/9c83c77c732943b2cb9481442afddc3b1ba241c3)) ### Features - Add async_release ([#179](https://github.com/Python-roborock/python-roborock/pull/179), [`ae58627`](https://github.com/Python-roborock/python-roborock/commit/ae58627bda324c29090b7c4ab78776288a30a64d)) ## v0.38.0 (2023-12-11) ### Features - Add information from product api ([#158](https://github.com/Python-roborock/python-roborock/pull/158), [`22720ae`](https://github.com/Python-roborock/python-roborock/commit/22720aee79e582328ae642e61d57dc2e3a92ec1c)) * fix: add information from product api * feat: add dyad protocol ## v0.37.0 (2023-12-10) ### Features - House keeping, version bumping, doc fixes, doc improvements, v2 home data api ([#157](https://github.com/Python-roborock/python-roborock/pull/157), [`f3ca9b4`](https://github.com/Python-roborock/python-roborock/commit/f3ca9b45d3de3a15c57e134421d3abc11095bc22)) * feat: version bumping, docs improvements, mypy fixes, doc fixes * fix: ci steps * feat: convert to v2 of the api * chore: linting, include docs, poetry lock * fix: tests * fix: add ability to remove listener ## v0.36.2 (2023-11-22) ### Bug Fixes - Typing and error checking ([#149](https://github.com/Python-roborock/python-roborock/pull/149), [`d94aa48`](https://github.com/Python-roborock/python-roborock/commit/d94aa48c1e594f7f6cd1cff16da66169368fb86c)) * fix: typing and error checking * chore: lint * fix: merge weirdness ## v0.36.1 (2023-11-08) ### Bug Fixes - Typing for map ([#141](https://github.com/Python-roborock/python-roborock/pull/141), [`64121ee`](https://github.com/Python-roborock/python-roborock/commit/64121eee14e4f0ca24db664b0664aaac5c7332af)) ## v0.36.0 (2023-11-07) ### Features - Update listeners ([#140](https://github.com/Python-roborock/python-roborock/pull/140), [`5498596`](https://github.com/Python-roborock/python-roborock/commit/549859669941e71c8d7ee09a0d4eea9564b4a12f)) * fix: change some typing * fix: include poetry lock * fix: linting * fix: add typing * fix: bugs * fix: none typing * fix: weird merge things * fix: rework listeners and cache a bit more * chore: linting * chore: typo * chore: self listener model * fix: override missing for data protocol ## v0.35.4 (2023-11-03) ### Bug Fixes - Mypy complaints ([#137](https://github.com/Python-roborock/python-roborock/pull/137), [`752e320`](https://github.com/Python-roborock/python-roborock/commit/752e320644449a83a724590628c4011b9d8bacb2)) * fix: change some typing * fix: include poetry lock * fix: linting * fix: add typing * fix: bugs * fix: none typing * Update api.py ## v0.35.3 (2023-10-29) ### Bug Fixes - Typing and versioning ([#134](https://github.com/Python-roborock/python-roborock/pull/134), [`e1dc545`](https://github.com/Python-roborock/python-roborock/commit/e1dc545f20f2a163240eb72d831025cb2ff3ec7c)) * fix: change some typing * fix: include poetry lock * fix: linting ### Chores - **deps**: Bump snok/install-poetry from 1.3.3 to 1.3.4 ([#106](https://github.com/Python-roborock/python-roborock/pull/106), [`1fc0265`](https://github.com/Python-roborock/python-roborock/commit/1fc02658e9d5934c5b5a2e173d7bcba8d8c55c2f)) Bumps [snok/install-poetry](https://github.com/snok/install-poetry) from 1.3.3 to 1.3.4. - [Release notes](https://github.com/snok/install-poetry/releases) - [Commits](https://github.com/snok/install-poetry/compare/v1.3.3...v1.3.4) --- updated-dependencies: - dependency-name: snok/install-poetry dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ## v0.35.2 (2023-10-29) ### Bug Fixes - Error catch and typing ([#133](https://github.com/Python-roborock/python-roborock/pull/133), [`171c302`](https://github.com/Python-roborock/python-roborock/commit/171c30265664b0161db75695d2d30d8b45bbf5b3)) ### Chores - Add some initial documentation ([#94](https://github.com/Python-roborock/python-roborock/pull/94), [`316fc0d`](https://github.com/Python-roborock/python-roborock/commit/316fc0d95f83948da25df0515622913173117ee0)) ## v0.35.1 (2023-10-28) ### Bug Fixes - Add s5 max mop code 207 ([#132](https://github.com/Python-roborock/python-roborock/pull/132), [`adc7ae0`](https://github.com/Python-roborock/python-roborock/commit/adc7ae0bbb75eb5be452efb62ca93de6a5211eef)) ## v0.35.0 (2023-10-18) ### Features - **code_mappings**: Add error n53 cleaning tank full or blocked ([#130](https://github.com/Python-roborock/python-roborock/pull/130), [`ebd57a0`](https://github.com/Python-roborock/python-roborock/commit/ebd57a0b559c0dee605e30eaead58b8433347a84)) Co-authored-by: jalcaras ## v0.34.6 (2023-10-02) ### Bug Fixes - Add missing 207 code ([#127](https://github.com/Python-roborock/python-roborock/pull/127), [`87431a1`](https://github.com/Python-roborock/python-roborock/commit/87431a1f155059a51b1b3e2c8867fe18cc476e16)) ## v0.34.5 (2023-09-29) ### Bug Fixes - Remove alexapy ([#126](https://github.com/Python-roborock/python-roborock/pull/126), [`38ff4eb`](https://github.com/Python-roborock/python-roborock/commit/38ff4eb90a1805ad599f61322d7c3547f465868b)) ## v0.34.4 (2023-09-28) ### Bug Fixes - Parsing potential list of clean record ([#125](https://github.com/Python-roborock/python-roborock/pull/125), [`df7a920`](https://github.com/Python-roborock/python-roborock/commit/df7a920a94a632d9653637e0111b3a955db49356)) ## v0.34.3 (2023-09-24) ### Bug Fixes - Add custom code for p10 ([#123](https://github.com/Python-roborock/python-roborock/pull/123), [`8b57d50`](https://github.com/Python-roborock/python-roborock/commit/8b57d50b0c898ca7d3df7cbdfe3682fd03cf649e)) ## v0.34.2 (2023-09-21) ### Bug Fixes - Make cache not global ([#122](https://github.com/Python-roborock/python-roborock/pull/122), [`e119201`](https://github.com/Python-roborock/python-roborock/commit/e119201f1c700d98e3322653440097c91ef4e14c)) * feat: add datetime parsing in cleanrecord * chore: lint * fix: timezone for non-3.11 * feat: add is_available for ha and here in future * fix: add timeout as a variable and set a longer default timeout for cloud * fix: is_available true by default * fix: status type as class variable * fix: don't update status when it was none before listener * fix: reduce info logs * fix: don't cache device cache * fix: double keepalive * fix: don't continue calling unsupported functions * fix: revert keepalive for now ## v0.34.1 (2023-09-19) ### Bug Fixes - Status reworking ([#121](https://github.com/Python-roborock/python-roborock/pull/121), [`8f4b7d3`](https://github.com/Python-roborock/python-roborock/commit/8f4b7d376d5a475798782496ea52ac9674cb9ae7)) * fix: is_available true by default * fix: status type as class variable * fix: don't update status when it was none before listener * fix: reduce info logs ## v0.34.0 (2023-09-12) ### Chores - Add pyupgrade to ruff ([#118](https://github.com/Python-roborock/python-roborock/pull/118), [`360b240`](https://github.com/Python-roborock/python-roborock/commit/360b240ab89862f8003ece11833e50846b279259)) * chore: add pyupgrade to ruff * chore: make ruff and isort play nice ### Features - Add datetime parsing in cleanrecord ([#119](https://github.com/Python-roborock/python-roborock/pull/119), [`5e67fa6`](https://github.com/Python-roborock/python-roborock/commit/5e67fa648478e573239c2f1dfc4b58c01cae1797)) * feat: add datetime parsing in cleanrecord * fix: timezone for non-3.11 * feat: add is_available for ha and here in future * fix: add timeout as a variable and set a longer default timeout for cloud ## v0.33.2 (2023-09-06) ### Bug Fixes - Add missing s5 codes ([#116](https://github.com/Python-roborock/python-roborock/pull/116), [`4d56021`](https://github.com/Python-roborock/python-roborock/commit/4d560216354fab4ab8b1d452dd6b29008b20d50a)) * fix: add missing codes for s5 max * chore: lint ## v0.33.1 (2023-09-06) ### Bug Fixes - Unknow values on HA component ([#117](https://github.com/Python-roborock/python-roborock/pull/117), [`1323618`](https://github.com/Python-roborock/python-roborock/commit/1323618c6c58bb6dcef5c7f5f2ca12e32969ba0f)) * feat add Q REVO support (RoborockFanSpeedP10 + RoborockMopModeP10) * feat add Q REVO support (model ROBOROCK_P10/roborock.vacuum.a75) * feat add Q REVO support (P10Status) * feat add Q REVO support (status data) * fix(P10Status): Change RoborockMopModeP10 by RoborockMopModeS8ProUltra * fix(RoborockMopModeP10): Remove * fix: change ordering of imports * fix: change q_revo->p10 to be consistent with entire code * fix: for HA component(items: dock_mop_wash_mode_interval, dock_washing_mode) stuck at "unknow" value when using P10 --------- Co-authored-by: jalcaras Co-authored-by: jalcaras Co-authored-by: Luke ## v0.33.0 (2023-09-04) ### Features - Add q revo/p10 support ([#114](https://github.com/Python-roborock/python-roborock/pull/114), [`b2237d9`](https://github.com/Python-roborock/python-roborock/commit/b2237d97384d819cbcc62902bbcbb2c7dbe0072e)) * feat add Q REVO support (RoborockFanSpeedP10 + RoborockMopModeP10) * feat add Q REVO support (model ROBOROCK_P10/roborock.vacuum.a75) * feat add Q REVO support (P10Status) * feat add Q REVO support (status data) * fix(P10Status): Change RoborockMopModeP10 by RoborockMopModeS8ProUltra * fix(RoborockMopModeP10): Remove * fix: change ordering of imports --------- Co-authored-by: jalcaras Co-authored-by: jalcaras Co-authored-by: Luke ## v0.32.4 (2023-08-30) ### Bug Fixes - Refactor cache and call get_status after changing mop mode ([#105](https://github.com/Python-roborock/python-roborock/pull/105), [`8bf70f4`](https://github.com/Python-roborock/python-roborock/commit/8bf70f4f8b3cabe846bffdc3dd3300f9f621ae97)) ### Chores - **deps**: Bump wagoid/commitlint-github-action from 5.4.1 to 5.4.3 ([#96](https://github.com/Python-roborock/python-roborock/pull/96), [`2da7b38`](https://github.com/Python-roborock/python-roborock/commit/2da7b3865bb1693b7ce655bf0d44090753aa5a52)) ## v0.32.3 (2023-08-05) ### Bug Fixes - Resolve unawaited task errors on connect/disconnect ([#103](https://github.com/Python-roborock/python-roborock/pull/103), [`1ad03be`](https://github.com/Python-roborock/python-roborock/commit/1ad03befa84f9b729a0cc7553b794fe5344a22ce)) * fix: resolve unawaited task errors on connect/disconnect * chore: make lint happy ## v0.32.2 (2023-08-04) ### Bug Fixes - Waiting queue ([`ff5376b`](https://github.com/Python-roborock/python-roborock/commit/ff5376be3a4ff4eb90e33118db89214ef699dc6f)) ## v0.32.1 (2023-08-04) ### Bug Fixes - Remove coroutine warning ([`da83078`](https://github.com/Python-roborock/python-roborock/commit/da83078f7ef8f333fa46b75603ce8a88bb97914d)) ## v0.32.0 (2023-08-03) ### Chores - Lint ([`d158dcc`](https://github.com/Python-roborock/python-roborock/commit/d158dcc2c44d2d529e762d95815dc854b5ed674e)) ### Features - Adding device_id to listeners and fixing race condition on connection, disconnection and messages ([`2bee8a1`](https://github.com/Python-roborock/python-roborock/commit/2bee8a11ad30cd4a3c186a4c0a619838adc83a53)) ## v0.31.1 (2023-08-02) ### Bug Fixes - Add error code for invalid credentials ([#101](https://github.com/Python-roborock/python-roborock/pull/101), [`703f48b`](https://github.com/Python-roborock/python-roborock/commit/703f48b66cfd32d20e74eaa959a66cd736ca38c8)) ## v0.31.0 (2023-07-31) ### Features - Add device name to logs ([#100](https://github.com/Python-roborock/python-roborock/pull/100), [`7690d56`](https://github.com/Python-roborock/python-roborock/commit/7690d5644181abb5fb7681d6c1764e2f8750c4b5)) ## v0.30.3 (2023-07-31) ### Bug Fixes - Adding no dustbin to docker errors ([`0e28628`](https://github.com/Python-roborock/python-roborock/commit/0e286280edda21a3b95c656d5bc358cd4229d075)) ## v0.30.2 (2023-07-21) ### Bug Fixes - Possible solution for future invalid state ([`8ac4e72`](https://github.com/Python-roborock/python-roborock/commit/8ac4e72372f26105423213bb85d4c33d7951af4d)) ## v0.30.1 (2023-07-18) ### Bug Fixes - Add missing s8 pro mop code and q revo dock ([#92](https://github.com/Python-roborock/python-roborock/pull/92), [`5d75c3b`](https://github.com/Python-roborock/python-roborock/commit/5d75c3b794db231e07f8b6693f2a96b132f737ce)) ### Chores - **deps**: Bump relekang/python-semantic-release from 7.34.6 to 8.0.0 ([#89](https://github.com/Python-roborock/python-roborock/pull/89), [`9677018`](https://github.com/Python-roborock/python-roborock/commit/96770184e953598e6232dbed4e6d39466f7d7465)) ## v0.30.0 (2023-07-10) ### Bug Fixes - Add missing dock for s7 max ultra ([#88](https://github.com/Python-roborock/python-roborock/pull/88), [`10aff22`](https://github.com/Python-roborock/python-roborock/commit/10aff22bc1e6d17b1b6c2587ebefcfd1d9fb7be7)) - Listeners getting protocol data before it exists. ([#87](https://github.com/Python-roborock/python-roborock/pull/87), [`3d68ea4`](https://github.com/Python-roborock/python-roborock/commit/3d68ea4326da827f17a32b2b5645f1e1e43f3eca)) * fix: listeners getting protocol data before it exists * fix: optimize code ### Features - Created strong foundation for docs ([#86](https://github.com/Python-roborock/python-roborock/pull/86), [`ef88edd`](https://github.com/Python-roborock/python-roborock/commit/ef88eddb8b582f5ad958d8135964e39ba6a05c91)) ## v0.29.2 (2023-06-28) ### Bug Fixes - Downgrade construct ([#84](https://github.com/Python-roborock/python-roborock/pull/84), [`920f59f`](https://github.com/Python-roborock/python-roborock/commit/920f59f1fad2790084ee001225bbaff2e21b3f91)) ## v0.29.1 (2023-06-27) ### Bug Fixes - Adding scene commands ([`fddbe50`](https://github.com/Python-roborock/python-roborock/commit/fddbe508f177dc6bc336223007018f501709c995)) ## v0.29.0 (2023-06-26) ### Features - Adding server timer and retry command compatibility ([`1a1565b`](https://github.com/Python-roborock/python-roborock/commit/1a1565b1f2eb57fa373c9298dd2501a13914bb0a)) ## v0.28.0 (2023-06-26) ### Features - Adding status and consumable listeners ([#83](https://github.com/Python-roborock/python-roborock/pull/83), [`ebdbc90`](https://github.com/Python-roborock/python-roborock/commit/ebdbc907f1f1a2a91ad10953ca6e70b91b9664dd)) * feat: adding status and consumable listeners * fix: api tests * chore: linting ## v0.27.2 (2023-06-22) ### Bug Fixes - Cache concurrency ([`7dd3aa4`](https://github.com/Python-roborock/python-roborock/commit/7dd3aa4933248ede6230a82e6d14e30e8009e27c)) ## v0.27.1 (2023-06-22) ### Bug Fixes - Improving cache and refactoring ([`e88854d`](https://github.com/Python-roborock/python-roborock/commit/e88854d3c6c9109e9fbb4e8ecd3d0ee4ad5d53ff)) ## v0.27.0 (2023-06-22) ### Features - Improving cache and refactoring ([#82](https://github.com/Python-roborock/python-roborock/pull/82), [`e6d48af`](https://github.com/Python-roborock/python-roborock/commit/e6d48af4e1c83fe79104d368918613ac0b332cbb)) ## v0.26.2 (2023-06-21) ### Bug Fixes - #81 - cli raising exception for diagnostic data ([`690b316`](https://github.com/Python-roborock/python-roborock/commit/690b316de35c970454a45418682c82d752b81201)) ## v0.26.1 (2023-06-20) ### Bug Fixes - Changelog ([#80](https://github.com/Python-roborock/python-roborock/pull/80), [`5c4928b`](https://github.com/Python-roborock/python-roborock/commit/5c4928b2d414b9decc1a454348e38d29aeb505fa)) ## v0.26.0 (2023-06-20) ### Chores - Update pyproject ([#79](https://github.com/Python-roborock/python-roborock/pull/79), [`cad97da`](https://github.com/Python-roborock/python-roborock/commit/cad97da7924288524993b32f2d2cd7d71abccee6)) - **deps**: Bump relekang/python-semantic-release from 7.34.4 to 7.34.6 ([#78](https://github.com/Python-roborock/python-roborock/pull/78), [`cebc9d2`](https://github.com/Python-roborock/python-roborock/commit/cebc9d28aa5222e78670bab5e19e162774a9a73f)) ### Features - Adding command cache ([#77](https://github.com/Python-roborock/python-roborock/pull/77), [`505f5e4`](https://github.com/Python-roborock/python-roborock/commit/505f5e45a56e98c248a38236ae3f02908583de12)) * feat: adding command cache * chore: typo * fix: dependencies * feat: adding cache evict time ## v0.25.2 (2023-06-17) ### Bug Fixes - Downgrading construct version ([`d5148ce`](https://github.com/Python-roborock/python-roborock/commit/d5148ce8fc553f73819a9f03c7688d53100bdcd9)) - Moving back to python 3.10 due to python-semantic-release incompatibility ([`8ab9352`](https://github.com/Python-roborock/python-roborock/commit/8ab9352adb2cb82c24057bef3107b28d3a157087)) - Removing python 10 tests ([`46e258b`](https://github.com/Python-roborock/python-roborock/commit/46e258bc495123c8e8325a731e353f3bc5ce3e0c)) ## v0.25.1 (2023-06-16) ### Bug Fixes - Python-semantic-release python version ([`845da45`](https://github.com/Python-roborock/python-roborock/commit/845da456a0d59765d08962fee007b63c8d0c50eb)) ## v0.25.0 (2023-06-16) ### Bug Fixes - Remove dnd timer and valley electricity from props ([#75](https://github.com/Python-roborock/python-roborock/pull/75), [`2035af5`](https://github.com/Python-roborock/python-roborock/commit/2035af5d524605fcbd0b87e20f256c1c61ca9c68)) * fix: remove dnd timer and valley electricity from props * fix: linting * fix: clear out old keep alive before adding new one * chore: remove keep_alive_task * fix: add storing of dnd and valley in api - Remove python 10 from tests ([`31fc34c`](https://github.com/Python-roborock/python-roborock/commit/31fc34c22ad9e5f06b588e6b283412902bd2959d)) - Semantic release ([#76](https://github.com/Python-roborock/python-roborock/pull/76), [`224a566`](https://github.com/Python-roborock/python-roborock/commit/224a5662d2dbdf47d5141554733a9b4aeaf8d4f2)) * fix: remove dnd timer and valley electricity from props * fix: linting * fix: clear out old keep alive before adding new one * chore: remove keep_alive_task * fix: add storing of dnd and valley in api * 0.24.2 Automatically generated by python-semantic-release * fix: add dirty tank latch error ### Chores - Add dependabot ([#70](https://github.com/Python-roborock/python-roborock/pull/70), [`cff6871`](https://github.com/Python-roborock/python-roborock/commit/cff6871012370bc8c1aaeefbea32f08c3a8d21f6)) * add dependabot * chore: update dependabot ignore - Manually releasing 0.24.1 ([`0ab69b3`](https://github.com/Python-roborock/python-roborock/commit/0ab69b3cdfb1697fdd7edb9a644f296f1dfa10a2)) - Updating ci.yml ([`d4c2714`](https://github.com/Python-roborock/python-roborock/commit/d4c2714a5800c38333d292f1bef0c17a38326e40)) - **deps**: Bump wagoid/commitlint-github-action from 5.3.0 to 5.4.1 ([#71](https://github.com/Python-roborock/python-roborock/pull/71), [`951dd5c`](https://github.com/Python-roborock/python-roborock/commit/951dd5c13030e0bc15256d414ed8e11235ff192b)) Bumps [wagoid/commitlint-github-action](https://github.com/wagoid/commitlint-github-action) from 5.3.0 to 5.4.1. - [Changelog](https://github.com/wagoid/commitlint-github-action/blob/master/CHANGELOG.md) - [Commits](https://github.com/wagoid/commitlint-github-action/compare/v5.3.0...v5.4.1) --- updated-dependencies: - dependency-name: wagoid/commitlint-github-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> - **deps**: Update pycryptodome requirement ([#73](https://github.com/Python-roborock/python-roborock/pull/73), [`52dd451`](https://github.com/Python-roborock/python-roborock/commit/52dd451b57e7d292c6f8f01f1777f7a5cb88918b)) Updates the requirements on [pycryptodome](https://github.com/Legrandin/pycryptodome) to permit the latest version. - [Release notes](https://github.com/Legrandin/pycryptodome/releases) - [Changelog](https://github.com/Legrandin/pycryptodome/blob/master/Changelog.rst) - [Commits](https://github.com/Legrandin/pycryptodome/compare/v3.17.0...v3.18.0) --- updated-dependencies: - dependency-name: pycryptodome dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ### Features - Bump python version ([`aae48b1`](https://github.com/Python-roborock/python-roborock/commit/aae48b1395698136ca90b7fe7386a1b6ea8aaa9c)) ## v0.24.1 (2023-06-14) ### Bug Fixes - Device_prop update ([`b6d1ccc`](https://github.com/Python-roborock/python-roborock/commit/b6d1ccc913cff1a7e25745867435146e9f748df7)) - Python-semantic-release ([`80e9c24`](https://github.com/Python-roborock/python-roborock/commit/80e9c24a39f3147b0fbc0a5437631777ab52b027)) ### Chores - Manually releasing 0.24.0 ([`0a08c97`](https://github.com/Python-roborock/python-roborock/commit/0a08c972dae32a8d5670fd049b8220a4af1d3307)) ## v0.24.0 (2023-06-14) ### Features - Adding valley_electricity_timer to props ([`0844067`](https://github.com/Python-roborock/python-roborock/commit/08440670a7fb098f5f3954e2ad09f9a32e64a54e)) ## v0.23.6 (2023-06-08) ### Bug Fixes - Add datetime_time back ([#68](https://github.com/Python-roborock/python-roborock/pull/68), [`a3461dd`](https://github.com/Python-roborock/python-roborock/commit/a3461dd0a08702add2625df8616ba20d239805ce)) ### Chores - Linting ([`90f905d`](https://github.com/Python-roborock/python-roborock/commit/90f905d331125c8536ab1db29444685fcf8bf196)) ## v0.23.5 (2023-06-08) ### Bug Fixes - Issue building roborock message ([`89e1f28`](https://github.com/Python-roborock/python-roborock/commit/89e1f28461baaf03029679aed5f91200bb7dac4e)) ## v0.23.4 (2023-06-06) ### Bug Fixes - Adding method parse_datetime_to_roborock_datetime ([`64c8159`](https://github.com/Python-roborock/python-roborock/commit/64c8159a9695374a4b0599a317418949bdd8f3fe)) ### Chores - Fix mypy ([`c0e7997`](https://github.com/Python-roborock/python-roborock/commit/c0e7997c61f9878436ae65aa8530b1c08b503ed9)) ## v0.23.3 (2023-06-05) ### Bug Fixes - Parse_time_to_datetime method ([`d0fc149`](https://github.com/Python-roborock/python-roborock/commit/d0fc1498e20217d28703455937f760ba45053c61)) ## v0.23.2 (2023-06-05) ### Bug Fixes - Parse_time_to_datetime method ([`bcbc211`](https://github.com/Python-roborock/python-roborock/commit/bcbc2117dd306c21495c1f3364aa3205b3c5cfce)) ## v0.23.1 (2023-06-05) ### Bug Fixes - Parse_time_to_datetime method ([`1c39216`](https://github.com/Python-roborock/python-roborock/commit/1c39216c0ee6a29d350d08adc5d662d8669f85cf)) ## v0.23.0 (2023-06-05) ### Bug Fixes - Merging timer entities ([`22ff7f4`](https://github.com/Python-roborock/python-roborock/commit/22ff7f451166bcfda360552e92d661d0520886ae)) ### Chores - Linting ([`9e2a3c5`](https://github.com/Python-roborock/python-roborock/commit/9e2a3c5f2908c3e69e14bda239112cc6d8bbca15)) ### Features - Add diagnostic data and extra containers ([#67](https://github.com/Python-roborock/python-roborock/pull/67), [`59ef6f4`](https://github.com/Python-roborock/python-roborock/commit/59ef6f4d5366859ba5d02ba66ec1aa2288564179)) * feat: add diagnostic data and extra containers * fix: lint * fix: dock summary as roborockbase * fix: make deviceprop RoborockBase * merge in changes ## v0.22.0 (2023-06-05) ### Features - Adding type cast for send_command ([`4a0b709`](https://github.com/Python-roborock/python-roborock/commit/4a0b70997080012e3059150da2b12fb47f6ef43a)) ## v0.21.1 (2023-06-05) ### Bug Fixes - Cli json serializing ([#66](https://github.com/Python-roborock/python-roborock/pull/66), [`ab13b53`](https://github.com/Python-roborock/python-roborock/commit/ab13b53a15822067112edda285c6feddf389a8b8)) ## v0.21.0 (2023-06-04) ### Features - Add time datetime for valley ([#65](https://github.com/Python-roborock/python-roborock/pull/65), [`c965862`](https://github.com/Python-roborock/python-roborock/commit/c965862f5b8b1f4dfbc83738cdebc1e11122c387)) ## v0.20.2 (2023-06-02) ### Bug Fixes - S6maxvstatus and minor changes ([`01f84ae`](https://github.com/Python-roborock/python-roborock/commit/01f84ae741dd3c9fa3bc5932b718abebcc8e3f0f)) ## v0.20.1 (2023-06-01) ### Bug Fixes - S8 model name and adding api methods get_child_lock_status and get_sound_volume ([`a3b7cee`](https://github.com/Python-roborock/python-roborock/commit/a3b7cee63a70746ac3db5e5cee37c5b507b99478)) ## v0.20.0 (2023-05-31) ### Features - Adds code for duct blockage ([#64](https://github.com/Python-roborock/python-roborock/pull/64), [`84dd5fb`](https://github.com/Python-roborock/python-roborock/commit/84dd5fbdefebe4b33c6bae6879137847522b1bfb)) ## v0.19.0 (2023-05-31) ### Features - Moving clean area to api ([#63](https://github.com/Python-roborock/python-roborock/pull/63), [`7ade218`](https://github.com/Python-roborock/python-roborock/commit/7ade218e3efd44159c6ad40cd88933385bbd1496)) ## v0.18.10 (2023-05-30) ### Bug Fixes - Dict with enum instead of value ([`9653c50`](https://github.com/Python-roborock/python-roborock/commit/9653c50f31b03ce2d3d21e2042d5c194924f4aca)) ## v0.18.9 (2023-05-28) ### Bug Fixes - Mqtt reconnections ([`462d4e4`](https://github.com/Python-roborock/python-roborock/commit/462d4e4a30372c143c9198c7008808ca11800af5)) ### Chores - Linting ([`f850cd1`](https://github.com/Python-roborock/python-roborock/commit/f850cd1f7d10b774516e76f3dac1ba2fec254ad7)) ## v0.18.8 (2023-05-28) ### Bug Fixes - Improve device ping ([`56e4469`](https://github.com/Python-roborock/python-roborock/commit/56e4469c95ac9255604025df99f0d6ac1940dd19)) ## v0.18.7 (2023-05-27) ### Bug Fixes - Change e2 fan codes ([#62](https://github.com/Python-roborock/python-roborock/pull/62), [`7231f1e`](https://github.com/Python-roborock/python-roborock/commit/7231f1efc412f93bfb5719091337536bcb6185d6)) * fix: change e2 fan codes * fix: linting * fix: incorrect balanced code ## v0.18.6 (2023-05-19) ### Bug Fixes - Consumables with time equals 0 ([`ccab5f0`](https://github.com/Python-roborock/python-roborock/commit/ccab5f0724854ae27bbc51b9ee33f2a96ce709f1)) ## v0.18.5 (2023-05-16) ### Bug Fixes - Connection_lost ([`c2ba673`](https://github.com/Python-roborock/python-roborock/commit/c2ba673f2c198bc78e75e1cf6fc9844e385e85bb)) ## v0.18.4 (2023-05-16) ### Bug Fixes - Minor fixes ([`e4a291d`](https://github.com/Python-roborock/python-roborock/commit/e4a291dd2b011e5852c992dbb23068ef5dde0e52)) ## v0.18.3 (2023-05-15) ### Bug Fixes - Keep_alive_func ([`e4aeebc`](https://github.com/Python-roborock/python-roborock/commit/e4aeebc16317a5c9fe3ffcd3bff89be1f2070dbb)) ### Chores - Linting ([`dbffaab`](https://github.com/Python-roborock/python-roborock/commit/dbffaaba59214015a9b721347331b37ff38fb941)) ## v0.18.2 (2023-05-15) ### Bug Fixes - Adding hello command ([`dfa44ff`](https://github.com/Python-roborock/python-roborock/commit/dfa44ff56a794f30e7c93d0a9a270f2a02da7e65)) - Improving new protocols ([`08c6f95`](https://github.com/Python-roborock/python-roborock/commit/08c6f9530b202d17ef80047c2d60836f9f9b8422)) ## v0.18.1 (2023-05-15) ### Bug Fixes - Type checks ([`58b3322`](https://github.com/Python-roborock/python-roborock/commit/58b33225b50a221a5f3100055fe28461f5cff884)) ## v0.18.0 (2023-05-15) ### Features - Keep connection alive ([`691b04b`](https://github.com/Python-roborock/python-roborock/commit/691b04b0135a38cc6b150e284d96e217f18f7f46)) ## v0.17.8 (2023-05-15) ### Bug Fixes - Trying to fix connection leaks ([`a66482a`](https://github.com/Python-roborock/python-roborock/commit/a66482a22cba9a6e7cc449c3f35acc1f230cd211)) ## v0.17.7 (2023-05-15) ### Bug Fixes - Ignoring get_room_mapping for int list response ([`c71d3b5`](https://github.com/Python-roborock/python-roborock/commit/c71d3b549a8dd09d08d1d27cde6882298875269c)) ## v0.17.6 (2023-05-13) ### Bug Fixes - Using cache only a single time ([`1ebfb35`](https://github.com/Python-roborock/python-roborock/commit/1ebfb35b9fe9ec50d4abeb60c695d33a37818768)) ## v0.17.5 (2023-05-12) ### Bug Fixes - Adding log for local disconnection ([`3001798`](https://github.com/Python-roborock/python-roborock/commit/300179839ec6a25e4ab8172f2c11e8beb0ff17ce)) ## v0.17.4 (2023-05-12) ### Bug Fixes - Pycharm typing ([`12d7c0b`](https://github.com/Python-roborock/python-roborock/commit/12d7c0b71bdeae90e9abbc6a16de3e07ebaa82da)) ## v0.17.3 (2023-05-12) ### Bug Fixes - Trigger new release ([`270a65c`](https://github.com/Python-roborock/python-roborock/commit/270a65c24a847cdc58a630e6d6c8e296910de8ea)) ## v0.17.2 (2023-05-11) ### Bug Fixes - Adding fallback cache (to be tested) ([`0e214cd`](https://github.com/Python-roborock/python-roborock/commit/0e214cd0633e9b9baca3323cc505a4f787aa08fb)) - Fallback_cache func ([`8048d84`](https://github.com/Python-roborock/python-roborock/commit/8048d843f669b06960967918570201498e4ae051)) ### Chores - Linting ([`2263190`](https://github.com/Python-roborock/python-roborock/commit/226319078162796c186bcd0bef46b961153e0435)) ## v0.17.1 (2023-05-11) ### Bug Fixes - Improving logs ([`cdd0ea7`](https://github.com/Python-roborock/python-roborock/commit/cdd0ea75d4e336c8f918a79574fd7b642eaffeec)) ## v0.17.0 (2023-05-11) ### Features - Dynamic calculated prefixes ([`d57a0a7`](https://github.com/Python-roborock/python-roborock/commit/d57a0a7d31f851b6bf4381233a84187d19e5782f)) ## v0.16.1 (2023-05-10) ### Bug Fixes - Connection timeouts ([`36a7295`](https://github.com/Python-roborock/python-roborock/commit/36a7295ce878dd0649505dd4a5b5ad662f0655fd)) ## v0.16.0 (2023-05-10) ### Chores - Adding package_parser.py ([`c6cc29b`](https://github.com/Python-roborock/python-roborock/commit/c6cc29b86418c7ed62f30a5684f5a95a6a712834)) - Fix readthedocs ([#59](https://github.com/Python-roborock/python-roborock/pull/59), [`b747ad8`](https://github.com/Python-roborock/python-roborock/commit/b747ad89ec1180ceffc4130d1be1ce9dee203f98)) - Linting ([`3eaed1d`](https://github.com/Python-roborock/python-roborock/commit/3eaed1d48293f474e65914c17c93ea54b7c0a9a5)) ### Features - Adding pcap file parser to cli ([`798287a`](https://github.com/Python-roborock/python-roborock/commit/798287a5100a3e973524aae6dd9404c0af354c11)) ## v0.15.0 (2023-05-09) ### Bug Fixes - Add int for clean summary ([#57](https://github.com/Python-roborock/python-roborock/pull/57), [`4257aa7`](https://github.com/Python-roborock/python-roborock/commit/4257aa7888178703d1b38ed00c12ef932ca1e862)) ### Features - Add docs ([#58](https://github.com/Python-roborock/python-roborock/pull/58), [`959abe1`](https://github.com/Python-roborock/python-roborock/commit/959abe1f3b2be0bfb8705d1bc1f9cbe966577540)) ## v0.14.1 (2023-05-09) ### Bug Fixes - Add types for S8 ([#56](https://github.com/Python-roborock/python-roborock/pull/56), [`125b6e7`](https://github.com/Python-roborock/python-roborock/commit/125b6e728145fde39f49fa6b80168bb985f2cc43)) * fix: add types for S8 * fix: lint ## v0.14.0 (2023-05-08) ### Features - Add more codes for status ([#55](https://github.com/Python-roborock/python-roborock/pull/55), [`cddd765`](https://github.com/Python-roborock/python-roborock/commit/cddd765aa15e31ae50db5a6b29ff6988050aa5cc)) ## v0.13.4 (2023-05-05) ### Bug Fixes - Command prefixes ([`65c5db8`](https://github.com/Python-roborock/python-roborock/commit/65c5db834baadc4c1a61704bd2279c48dd0f6074)) ## v0.13.3 (2023-05-05) ### Bug Fixes - Roborock enum ([`ae0b93e`](https://github.com/Python-roborock/python-roborock/commit/ae0b93ee0f0fc9c62c3f40b436ece209938e9e6c)) ### Chores - Linting ([`250d5fc`](https://github.com/Python-roborock/python-roborock/commit/250d5fcc0a320604ee25519764bd7ac1872dbd0b)) - Linting ([`fea34d6`](https://github.com/Python-roborock/python-roborock/commit/fea34d63400a94447834ab355d0a023b53e77d7d)) ## v0.13.2 (2023-05-05) ### Bug Fixes - Minor changes ([`522734a`](https://github.com/Python-roborock/python-roborock/commit/522734a4bdcf6555feede24e3e97c6a3a98fa760)) ## v0.13.1 (2023-05-05) ### Bug Fixes - Adding app_start_collect_dust prefix ([`3124d7e`](https://github.com/Python-roborock/python-roborock/commit/3124d7ea6277ec08d8e592448b2a4f8cb60fb7db)) ## v0.13.0 (2023-05-05) ### Features - Add s4_max ([#54](https://github.com/Python-roborock/python-roborock/pull/54), [`e7cfd15`](https://github.com/Python-roborock/python-roborock/commit/e7cfd153b3c41215fd1c85d4968a14d1862c91b5)) ## v0.12.1 (2023-05-05) ### Bug Fixes - Changed incorrect s8 pro ultra string ([`c6a37a9`](https://github.com/Python-roborock/python-roborock/commit/c6a37a97da9279af3a6a24dc0fd01770cdd9b3b1)) fixes #52 ## v0.12.0 (2023-05-05) ### Features - Extending device status by device model ([#51](https://github.com/Python-roborock/python-roborock/pull/51), [`8092b67`](https://github.com/Python-roborock/python-roborock/commit/8092b67b8c9a380cca5178217fde3a61746fcf75)) * feat: extending device status by device model * chore: linting ## v0.11.0 (2023-05-04) ### Features - Add error check for invalid user agreement ([#49](https://github.com/Python-roborock/python-roborock/pull/49), [`0374449`](https://github.com/Python-roborock/python-roborock/commit/0374449d7280c93ceb772b7fbe009c6d19d0c462)) * minor: add error check for invalid user agreement * fix: lint * feat: add no user agreement error * fix: version issue * fix: added account to str ## v0.10.3 (2023-05-04) ### Bug Fixes - Port already in use ([`e5d71d8`](https://github.com/Python-roborock/python-roborock/commit/e5d71d88f5144c172482cd6ee71d9a5b01dbbe3f)) ## v0.10.2 (2023-05-03) ### Bug Fixes - Change devices fan speed enum to lower case ([`c559d40`](https://github.com/Python-roborock/python-roborock/commit/c559d40183e47ef8698651281ae8946a99cb897e)) - Test errors ([`6a46515`](https://github.com/Python-roborock/python-roborock/commit/6a465157bbf6fa15bc578a1c4b1dffa17a694a92)) ## v0.10.1 (2023-05-03) ### Bug Fixes - Allow discovering multiple devices ([`ada9e07`](https://github.com/Python-roborock/python-roborock/commit/ada9e0723728b1d7e3ccd6dc37cbbe06a3c6a2cc)) ### Chores - Using python construct for data parsing ([#48](https://github.com/Python-roborock/python-roborock/pull/48), [`71f7f22`](https://github.com/Python-roborock/python-roborock/commit/71f7f2207986cb22c2990ae6d67fd38c2d04b472)) * chore: using python construct for data parsing * chore: linting * fix: roborock message protocol * fix: change local api constructor ## v0.10.0 (2023-05-03) ### Chores - Linting ([`e3f2541`](https://github.com/Python-roborock/python-roborock/commit/e3f25419fcfe00f18e0cca9214c4d50cd5254c80)) ### Features - Add specific device functionality ([#46](https://github.com/Python-roborock/python-roborock/pull/46), [`32abce5`](https://github.com/Python-roborock/python-roborock/commit/32abce5d51d14aab9adef5b9560ceee534186b1a)) * feat: add support for old mop and vacuum codes * fix: linting * feat: using api for single device and adding new commands * fix: using single device api (cherry picked from commit e689e8d141acff998fd524ace923621fc0f91d0c) * chore: linting (cherry picked from commit 2ed367cba5e9b4199fdea935305fb47f85a8c1e7) (cherry picked from commit 58b46835d609794210f8c49daddbc7d25cee011d) * chore: init work * feat: added more device specific * fix: merge issues * feat: finalize specific device work * feat: finished specific device with current info * fix: add fast for S8 * fix: add s8 dock --------- Co-authored-by: humbertogontijo ## v0.9.0 (2023-05-01) ### Chores - Linting ([`a6a55ac`](https://github.com/Python-roborock/python-roborock/commit/a6a55ac4d11d230a0599aeec3d5254895fbaa684)) ### Features - Single device api and discovery method ([`5fef26d`](https://github.com/Python-roborock/python-roborock/commit/5fef26d257433c12d38f6b19731018e54884a150)) ## v0.8.3 (2023-04-28) ### Bug Fixes - Add functionality for missing enum values ([#43](https://github.com/Python-roborock/python-roborock/pull/43), [`49d77f8`](https://github.com/Python-roborock/python-roborock/commit/49d77f8208a65cb0fb86ab7948138df0bf447e45)) * fix: add functionality for missing enum values * fix: temp removed 207 * Revert "chore: linting" This reverts commit 58b46835d609794210f8c49daddbc7d25cee011d. This reverts commit 2ed367cba5e9b4199fdea935305fb47f85a8c1e7. * Revert "fix: using single device api" This reverts commit e689e8d141acff998fd524ace923621fc0f91d0c. ### Chores - Linting ([`58b4683`](https://github.com/Python-roborock/python-roborock/commit/58b46835d609794210f8c49daddbc7d25cee011d)) - Linting ([`2ed367c`](https://github.com/Python-roborock/python-roborock/commit/2ed367cba5e9b4199fdea935305fb47f85a8c1e7)) ## v0.8.2 (2023-04-27) ### Bug Fixes - Using single device api ([`e689e8d`](https://github.com/Python-roborock/python-roborock/commit/e689e8d141acff998fd524ace923621fc0f91d0c)) ### Chores - Linting ([`2e8e307`](https://github.com/Python-roborock/python-roborock/commit/2e8e307e6d82e045856d2a4ae731feba25005fe4)) ## v0.8.1 (2023-04-27) ### Bug Fixes - Adding keepalive to local connection ([`8ff8d2f`](https://github.com/Python-roborock/python-roborock/commit/8ff8d2f13fd85df96b3b334456799244ac878fbe)) ## v0.8.0 (2023-04-27) ### Features - Added error check and deviceprop functionality for core ([#42](https://github.com/Python-roborock/python-roborock/pull/42), [`746eec9`](https://github.com/Python-roborock/python-roborock/commit/746eec99ae0b6115fea6277f51b546036f7b3f18)) * feat: added update to deviceprop * feat: added time remaining to consumable * feat: added more exception checking * fix: linting * feat: add consumable const ## v0.7.8 (2023-04-26) ### Bug Fixes - Local api failing to send message ([`4cc38fe`](https://github.com/Python-roborock/python-roborock/commit/4cc38fe13df487296efda2a1e962c238e3d69168)) ### Chores - Linting ([`c378036`](https://github.com/Python-roborock/python-roborock/commit/c3780369a2ea237f7ed6f5114d68d55fff6b1386)) ## v0.7.7 (2023-04-26) ### Bug Fixes - Local api recover after command fail ([`cb11f14`](https://github.com/Python-roborock/python-roborock/commit/cb11f14d7b771b31c77dafe6435bcd52527c16a8)) ## v0.7.6 (2023-04-26) ### Bug Fixes - Reset_consumable command prefix ([`a1a8c06`](https://github.com/Python-roborock/python-roborock/commit/a1a8c06d369e33e4ebd42cf6f563b9727d0ce24e)) ### Chores - Linting ([`ac7e15a`](https://github.com/Python-roborock/python-roborock/commit/ac7e15a349aa7a6f438339109189d9d715dfa71d)) - Linting ([`4907044`](https://github.com/Python-roborock/python-roborock/commit/4907044e1933ab8afc30f2289df0ca1130cadb28)) ## v0.7.5 (2023-04-25) ### Bug Fixes - Adding missing prefixes ([`66b1833`](https://github.com/Python-roborock/python-roborock/commit/66b183385c96dd7ee395bff143f2d64ef8fb927a)) ### Chores - Linting ([`41af0e2`](https://github.com/Python-roborock/python-roborock/commit/41af0e2469cb2d9786ceab8fbcfdb4701714db69)) - Linting ([`6d6dff5`](https://github.com/Python-roborock/python-roborock/commit/6d6dff5a0131b9a6735023ce0ac47bc9a0622bc9)) ## v0.7.4 (2023-04-25) ### Bug Fixes - Get_room_mapping ([`459119b`](https://github.com/Python-roborock/python-roborock/commit/459119bee90513451bf10a1abeeccb75f3daa539)) ## v0.7.3 (2023-04-25) ### Bug Fixes - Added missing docks ([#40](https://github.com/Python-roborock/python-roborock/pull/40), [`65a6cc4`](https://github.com/Python-roborock/python-roborock/commit/65a6cc4fd19a30bc78f2c34b407d3d88e3aac2b1)) ## v0.7.2 (2023-04-25) ### Bug Fixes - Command prefixes ([`e792728`](https://github.com/Python-roborock/python-roborock/commit/e7927288cc3059a1eced1a65b31f84190718aaf2)) ## v0.7.1 (2023-04-25) ### Bug Fixes - Command prefixes ([`156ac51`](https://github.com/Python-roborock/python-roborock/commit/156ac5182d1a97c93ab16696099c8c099a19155d)) ## v0.7.0 (2023-04-25) ### Features - Add room mapping ([#41](https://github.com/Python-roborock/python-roborock/pull/41), [`aa3e6e4`](https://github.com/Python-roborock/python-roborock/commit/aa3e6e442fbbb679c4eca68840c4d19f9c659fde)) * feat: add room mapping * fix: lint * chore: move room mapping to super class client * chore: linting * Update roborock/api.py Co-authored-by: Humberto Gontijo --------- ## v0.6.17 (2023-04-25) ### Bug Fixes - Adding multi_maps_list to device props ([`7ac0485`](https://github.com/Python-roborock/python-roborock/commit/7ac0485c4a5bb43350c51331323c6773ff1c54fc)) - Removing non-needed classes ([`6ceedad`](https://github.com/Python-roborock/python-roborock/commit/6ceedadf09c20c743c994b07489887e344cd3061)) ## v0.6.16 (2023-04-22) ### Bug Fixes - Improving local integration ([`7657617`](https://github.com/Python-roborock/python-roborock/commit/7657617901d807908e5fd5c364700851b5108ab4)) ## v0.6.15 (2023-04-21) ### Bug Fixes - Get_clean_summary ([`ee81538`](https://github.com/Python-roborock/python-roborock/commit/ee815380a8b70efbac65627fdd69fdf0bb75420e)) ### Chores - Linting ([`0d3b000`](https://github.com/Python-roborock/python-roborock/commit/0d3b00093395a706ec202c5a55639ed9ece54281)) - Linting ([`124fa11`](https://github.com/Python-roborock/python-roborock/commit/124fa115b14430b2a9680d4b1da36f1b70ae85b5)) ## v0.6.14 (2023-04-21) ### Bug Fixes - Get_multi_map_list ([`cfaeb41`](https://github.com/Python-roborock/python-roborock/commit/cfaeb419e188510ade5bc1506214c9b3d2afeb18)) - Linting ([`fdb4484`](https://github.com/Python-roborock/python-roborock/commit/fdb44840741cd6872f7defea70e8f118a9803099)) ## v0.6.13 (2023-04-20) ### Bug Fixes - Check dock_type is not none ([#38](https://github.com/Python-roborock/python-roborock/pull/38), [`84c95e3`](https://github.com/Python-roborock/python-roborock/commit/84c95e3b3bebd940b9cc6cc06b73c1770605c765)) ## v0.6.12 (2023-04-19) ### Bug Fixes - Removed enum type check ([#37](https://github.com/Python-roborock/python-roborock/pull/37), [`585238e`](https://github.com/Python-roborock/python-roborock/commit/585238e505e685e14d867b19819815e7c3e19634)) ## v0.6.11 (2023-04-18) ### Bug Fixes - Lint ([`b0d8996`](https://github.com/Python-roborock/python-roborock/commit/b0d8996d46c2a52f87a8c01eb50fd6aa7bd98ed8)) ## v0.6.10 (2023-04-18) ### Bug Fixes - Lint ([`5ae44e2`](https://github.com/Python-roborock/python-roborock/commit/5ae44e247efca5e9b7958b887f6049f09ae2ced8)) ## v0.6.9 (2023-04-18) ### Bug Fixes - Lint ([`8499522`](https://github.com/Python-roborock/python-roborock/commit/8499522e5fb44abad20af1cfb7a677ca4e03639f)) ## v0.6.8 (2023-04-18) ### Bug Fixes - Lint ([`20bf54b`](https://github.com/Python-roborock/python-roborock/commit/20bf54b0a1834065584bdcb469a3123700c68f1d)) ## v0.6.7 (2023-04-18) ## v0.6.6 (2023-04-17) ### Bug Fixes - Using asyncio future instead of queue ([`1ea5430`](https://github.com/Python-roborock/python-roborock/commit/1ea5430197620dbd2dc87949e4326f24601f4ba8)) ## v0.6.5 (2023-04-13) ### Bug Fixes - Clean_summary for older devices ([`0a0c9e7`](https://github.com/Python-roborock/python-roborock/commit/0a0c9e7c965c183df971e11bd597319c68c8f646)) - Exclude changelog.md from pre-commit ([#36](https://github.com/Python-roborock/python-roborock/pull/36), [`b12c7a2`](https://github.com/Python-roborock/python-roborock/commit/b12c7a229dfdbe0af182d6a120548100b0ca4140)) ### Chores - Fix mypy errors ([#34](https://github.com/Python-roborock/python-roborock/pull/34), [`16bd2d1`](https://github.com/Python-roborock/python-roborock/commit/16bd2d1fab65760670252120fafa4b8e87e968be)) * chore: fix mypy errors * fix: run mypy through pre-commit * fix: spacing for ci * fix: tests changes * fix: cli exclusion * fix: add typing for roborockenum * fix: ignore warnings with mqtt.client * fix: more mypy changes * fix: limit cli mypy * fix: ignore type for containers * fix: add pre-commit information to dev poetry dependencies - New styling ([#35](https://github.com/Python-roborock/python-roborock/pull/35), [`55e6426`](https://github.com/Python-roborock/python-roborock/commit/55e6426129ec70f41a019fd9408b227fb8a03b5a)) ## v0.6.4 (2023-04-11) ### Bug Fixes - Disconnect on timeout so next command can work ([`5ad397b`](https://github.com/Python-roborock/python-roborock/commit/5ad397b3bbb4bc600888baba6c0cc15be9d17ef7)) ## v0.6.3 (2023-04-11) ### Bug Fixes - Semantic_release ([`63b249d`](https://github.com/Python-roborock/python-roborock/commit/63b249d65d3fc40b048320e6596aedc40f588bf9)) ## v0.6.2 (2023-04-11) ### Bug Fixes - Error code nogo_zone_detected ([`722e4b5`](https://github.com/Python-roborock/python-roborock/commit/722e4b5cfd0c4891adc506e9fe99740860027670)) ## v0.6.1 (2023-04-10) ### Bug Fixes - Lowercase true ([`774c3cc`](https://github.com/Python-roborock/python-roborock/commit/774c3cc9765ee76a3a553ca6911751124ae7164c)) - Semantic release not updating changelong ([`eaf6e90`](https://github.com/Python-roborock/python-roborock/commit/eaf6e90264b6ab69549da0e5bc3d17c4c0a2c07c)) - Trigger release ([`f1ce0ed`](https://github.com/Python-roborock/python-roborock/commit/f1ce0ed55a254bccd8567b48974ff74dd9ec8b25)) - Trigger release ([`9a4462c`](https://github.com/Python-roborock/python-roborock/commit/9a4462c800762393cc047085156acbe119cd0fe4)) - Trigger release ([`b7a664b`](https://github.com/Python-roborock/python-roborock/commit/b7a664b15b7c5180d816de325537693f47c24860)) - Trigger release ([`9256849`](https://github.com/Python-roborock/python-roborock/commit/9256849252f019f4fea2f59384bc0ea7c57adb5c)) ### Chores - Update gh token ([`f13690d`](https://github.com/Python-roborock/python-roborock/commit/f13690de8c4b5eb3d72809dff66a0caf275476dc)) ## v0.6.0 (2023-04-08) ### Bug Fixes - Changed prefixes for debugged commands ([`0db6b6d`](https://github.com/Python-roborock/python-roborock/commit/0db6b6dc3b7ef1b7721b8a9536affdd08380d916)) ### Features - Add more commands and prefixes ([`fe85dea`](https://github.com/Python-roborock/python-roborock/commit/fe85deaa1acc053c9c18f2b313ff5b812ba0e2c3)) ## v0.5.9 (2023-04-07) ### Bug Fixes - Assume device prop attr can be none ([`573db33`](https://github.com/Python-roborock/python-roborock/commit/573db337664be1f768254e384e3eef6c957955ba)) - Change to dataclass ([`111d762`](https://github.com/Python-roborock/python-roborock/commit/111d7627aa5999fc82cde650326857e51c4dc4a2)) ## v0.5.8 (2023-04-07) ### Bug Fixes - Changed prefix for set_custom_mode ([`d187eb4`](https://github.com/Python-roborock/python-roborock/commit/d187eb467e6c5c969fcaa48dcc7881d75784663d)) ## v0.5.7 (2023-04-07) ## v0.5.6 (2023-04-06) ### Bug Fixes - Create function for creating roborock code ([`2cf00fe`](https://github.com/Python-roborock/python-roborock/commit/2cf00fe607c7b5b544ea9671dabf87454cdb2322)) - Roborockbase.as_dict ([`bf52b44`](https://github.com/Python-roborock/python-roborock/commit/bf52b44b01e93000268c9fa274a3449ac3f82e36)) ## v0.5.5 (2023-04-06) ### Bug Fixes - Fix cloud_api ([`6159412`](https://github.com/Python-roborock/python-roborock/commit/6159412b577efa3544add18982d6a9859ad8225d)) ## v0.5.4 (2023-04-06) ### Bug Fixes - Minor fixes ([`7579ad5`](https://github.com/Python-roborock/python-roborock/commit/7579ad5266f46102b90be0a7676e5c116f5daefa)) ## v0.5.3 (2023-04-06) ### Bug Fixes - Roborock enum ([`df1262e`](https://github.com/Python-roborock/python-roborock/commit/df1262ef41b2b1cb4fd866cda1527b82723d38cd)) ## v0.5.2 (2023-04-06) ### Bug Fixes - Changing code mappings ([`493ed4b`](https://github.com/Python-roborock/python-roborock/commit/493ed4b9a1fb8f62918ecc4899b9ce716801b4be)) - Code mappings ([`115dad2`](https://github.com/Python-roborock/python-roborock/commit/115dad22c0280edf1853de43ae86ff1169707f5b)) - Roborockdeviceinfo ([`1ced9e9`](https://github.com/Python-roborock/python-roborock/commit/1ced9e95a6d2effb359008c2c5ef340db3243d6e)) - Using dataclass for containers ([`ad25a44`](https://github.com/Python-roborock/python-roborock/commit/ad25a443fb697f90b10a9c42c93bccbf4204c383)) ## v0.5.1 (2023-04-05) ## v0.5.0 (2023-04-05) ### Bug Fixes - Change device info class to dataclass ([`158766f`](https://github.com/Python-roborock/python-roborock/commit/158766fcb70b92aba87e8b7fe2255528fa72f123)) ### Features - Add networking function ([`19746aa`](https://github.com/Python-roborock/python-roborock/commit/19746aa7739da295c4e7c7316596af9f8ff6b0a0)) ## v0.4.16 (2023-04-05) ### Bug Fixes - Mapping prefix for all known commands ([`ad3afc0`](https://github.com/Python-roborock/python-roborock/commit/ad3afc04dfec31a20a4a2635b4c6b52cf236ce17)) ## v0.4.15 (2023-04-04) ### Bug Fixes - Test_get_washing_mode ([`17e72c3`](https://github.com/Python-roborock/python-roborock/commit/17e72c34c6ac133025450eab68f4be7025ab138b)) - **local_api**: Receiving multiple messages ([`e3c419c`](https://github.com/Python-roborock/python-roborock/commit/e3c419c98f64bc3adada4cc78ce4de366b5267cb)) ## v0.4.14 (2023-04-03) ### Bug Fixes - Adding is_valid function to RoborockBase ([`7575aee`](https://github.com/Python-roborock/python-roborock/commit/7575aeea3b1ca4cfe4a1fb0cb3cea29e964f52b7)) ## v0.4.13 (2023-04-03) ### Bug Fixes - Adiing broken pipe exception log ([`7e73eb2`](https://github.com/Python-roborock/python-roborock/commit/7e73eb2ac7b93f6d0d7331515cf9db5da2c92dc5)) ## v0.4.12 (2023-04-03) ### Bug Fixes - Add containers for dock information ([`77dc414`](https://github.com/Python-roborock/python-roborock/commit/77dc4146b16906807d8a5fbc5025c4a8344c62f0)) ### Chores - Add changelog ([`cc3f378`](https://github.com/Python-roborock/python-roborock/commit/cc3f378d9427c95a66ecdd5c1277a7415e322850)) - Pypi cleanup ([`1878e8e`](https://github.com/Python-roborock/python-roborock/commit/1878e8e42692a2f56679fbdd667da29dfcf759e3)) ## v0.4.11 (2023-04-01) ### Bug Fixes - Changing RoborockDeviceInfo to serializable ([`6dd8ff8`](https://github.com/Python-roborock/python-roborock/commit/6dd8ff8e622d5021e20caf19d36812e34e6c435f)) ## v0.4.10 (2023-04-01) ### Bug Fixes - Using entire object for roborock device info ([`599d461`](https://github.com/Python-roborock/python-roborock/commit/599d461af69c7d6b220973c5d905decc5657ce0f)) ## v0.4.9 (2023-04-01) ### Bug Fixes - Cloud_api.py ([`39fd964`](https://github.com/Python-roborock/python-roborock/commit/39fd964a9ccd0a33310747d6f7d764db1b7c3c23)) ## v0.4.8 (2023-04-01) ### Bug Fixes - Refactor roborock device info ([`291a6b2`](https://github.com/Python-roborock/python-roborock/commit/291a6b295943d6635116e79f7f56c97a553a7c62)) ## v0.4.7 (2023-04-01) ### Bug Fixes - Local_api should receive ip for each device ([`b2f2f15`](https://github.com/Python-roborock/python-roborock/commit/b2f2f1566a27505ebf456aef360b76d001a1351c)) ## v0.4.6 (2023-04-01) ### Bug Fixes - Adding local_api disconnection ([`a010304`](https://github.com/Python-roborock/python-roborock/commit/a01030480353b8d6524c71e463455802082f4066)) - Move add_status_listener from cloud_api to base_api ([`dcad915`](https://github.com/Python-roborock/python-roborock/commit/dcad91545ba18e163ba4ceca887065817b0a4e0c)) ## v0.4.5 (2023-04-01) ### Bug Fixes - Close socket on broken pipe ([`bf8c8d5`](https://github.com/Python-roborock/python-roborock/commit/bf8c8d52b390b27b442a3b7dd046f8ece483bc2e)) ### Chores - Fix cloud_api.py ([`b954c9c`](https://github.com/Python-roborock/python-roborock/commit/b954c9c22977b8239b034e346292a23afe5acbfb)) ## v0.4.4 (2023-04-01) ### Bug Fixes - Removing local_api.py nonworking commands from api.py ([`12bf756`](https://github.com/Python-roborock/python-roborock/commit/12bf756d8d5193bd4cfd9b59d85f11ec3ad4f6e0)) ### Chores - Add new commands ([`e0869cf`](https://github.com/Python-roborock/python-roborock/commit/e0869cf83e87d4c35986acdddf25f650acbd92ee)) - Removing local_api.py nonworking commands from api.py ([`70c04a3`](https://github.com/Python-roborock/python-roborock/commit/70c04a32878cb98c1e009860f2b6d8ede83a6e47)) ## v0.4.3 (2023-04-01) ### Bug Fixes - Minor fixes ([`29bdb45`](https://github.com/Python-roborock/python-roborock/commit/29bdb4542e1c32b956ea8b739f9a610b92e27259)) ## v0.4.2 (2023-04-01) ### Bug Fixes - Refactoring api ([`aa66e1d`](https://github.com/Python-roborock/python-roborock/commit/aa66e1d31ed635690104f9b30b62421e8a2ba663)) ## v0.4.1 (2023-03-31) ### Bug Fixes - Code cleaning ([`d6e3b34`](https://github.com/Python-roborock/python-roborock/commit/d6e3b34bfa5e1803b5e5e494711e56b7d909f1ea)) ## v0.4.0 (2023-03-31) ### Features - Sppliting clients into local and cloud ([`8019313`](https://github.com/Python-roborock/python-roborock/commit/8019313ccb50233610b74d2626ae87e79f55204e)) ## v0.3.1 (2023-03-30) ### Bug Fixes - Minor fixes to offline integration ([`1b4926e`](https://github.com/Python-roborock/python-roborock/commit/1b4926e1d79401f21bee68e4676235426e253191)) ## v0.3.0 (2023-03-30) ### Features - Adding offline.py for others to test local api ([`22680bf`](https://github.com/Python-roborock/python-roborock/commit/22680bfd7929d77b12c27c270478c3253d0cfada)) ## v0.2.3 (2023-03-29) ### Bug Fixes - Bug with dock commands ([`2f2cfb6`](https://github.com/Python-roborock/python-roborock/commit/2f2cfb6b702b6a6f9500e3b272761962ed15ed09)) ## v0.2.2 (2023-03-28) ### Bug Fixes - Change semantic_release from tag_only to tag ([`cad8973`](https://github.com/Python-roborock/python-roborock/commit/cad897381515530ba221b2f92a75ebb3fde876bd)) ## v0.2.1 (2023-03-28) ### Bug Fixes - Repository variable for python-semantic-release ([`b9e21a3`](https://github.com/Python-roborock/python-roborock/commit/b9e21a3d2f5db0a426b96031e154a2a001bc3242)) ## v0.2.0 (2023-03-28) ### Bug Fixes - Add version source ([`c46e503`](https://github.com/Python-roborock/python-roborock/commit/c46e503b91159468e7cf4afb9549c720c1d3dee0)) - Change github token from user defined secret to default secret ([`5886535`](https://github.com/Python-roborock/python-roborock/commit/58865350d583ffa1c4e00a2c22c12b8cf60d3c5f)) - Change to timeout from wait_for ([`eaa4dee`](https://github.com/Python-roborock/python-roborock/commit/eaa4dee1dca696a5817205cd4387b92ce93df0bf)) wait_for creates a task, async_timeout does the same work and avoids the task creation - Removed unneeded line ([`f2b4c89`](https://github.com/Python-roborock/python-roborock/commit/f2b4c89500ac169e9dc021de6e250474f6f75b15)) - Rename github_token to gh_token ([`012cd9d`](https://github.com/Python-roborock/python-roborock/commit/012cd9d0ec065d78063472dc66e60e9545547e24)) - Version source from pyproject.toml ([`20d3c59`](https://github.com/Python-roborock/python-roborock/commit/20d3c59bab6fee2093b892cdc062f929a2b83304)) ### Chores - Add typing to user_data property ([`16f1d5d`](https://github.com/Python-roborock/python-roborock/commit/16f1d5dc10123987ee480bc4696a9a80a5bbe376)) - Added some typing ([`3a72b58`](https://github.com/Python-roborock/python-roborock/commit/3a72b58273d80f0a5d8d8da473e2b0e16aeea722)) - Added typing for containers ([`be20ae1`](https://github.com/Python-roborock/python-roborock/commit/be20ae1fb8c3055b54de083b542cee86874ba9f7)) - Bump pycryptodome to 3.17 ([`1931073`](https://github.com/Python-roborock/python-roborock/commit/193107361f81706e2a67b9558b9e0ad56607166b)) - Bump version ([`33ab4d1`](https://github.com/Python-roborock/python-roborock/commit/33ab4d1523aa21dc692685cd109f878888ee4d78)) - Fix tests with new code mapping ([`4dac8f5`](https://github.com/Python-roborock/python-roborock/commit/4dac8f5ced0dbe0c948a8e8ca335d05f39b27634)) - Moved code mappings to api ([`81bf2e2`](https://github.com/Python-roborock/python-roborock/commit/81bf2e24342dd0b5c1fee3d0c32c38cf4791f7d0)) ### Features - Add dock error mapping ([`4694c66`](https://github.com/Python-roborock/python-roborock/commit/4694c661edaa09a2f637a4ad2191a3b587613ffb)) - Added semantic release ([`2bb2279`](https://github.com/Python-roborock/python-roborock/commit/2bb2279187609a7a7cf4c1a854ede54e8a671860)) - Adding more options to commands ([`9b20345`](https://github.com/Python-roborock/python-roborock/commit/9b203456c3bd5e075e2945be24e1aa65620af12f)) Python-roborock-python-roborock-32df4f3/LICENSE000066400000000000000000001045151507503702500214020ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . Python-roborock-python-roborock-32df4f3/README.md000066400000000000000000000055451507503702500216570ustar00rootroot00000000000000# Roborock

PyPI Version Supported Python versions License

Roborock library for online and offline control of your vacuums. ## Installation Install this via pip (or your favourite package manager): `pip install python-roborock` ## Functionality You can see all of the commands supported [here](https://python-roborock.readthedocs.io/en/latest/api_commands.html) ## Sending Commands Here is an example that requires no manual intervention and can be done all automatically. You can skip some steps by caching values or looking at them and grabbing them manually. ```python import asyncio from roborock import HomeDataProduct, DeviceData, RoborockCommand from roborock.version_1_apis import RoborockMqttClientV1, RoborockLocalClientV1 from roborock.web_api import RoborockApiClient async def main(): web_api = RoborockApiClient(username="youremailhere") # Login via your password user_data = await web_api.pass_login(password="pass_here") # Or login via a code await web_api.request_code() code = input("What is the code?") user_data = await web_api.code_login(code) # Get home data home_data = await web_api.get_home_data_v2(user_data) # Get the device you want device = home_data.devices[0] # Get product ids: product_info: dict[str, HomeDataProduct] = { product.id: product for product in home_data.products } # Create the Mqtt(aka cloud required) Client device_data = DeviceData(device, product_info[device.product_id].model) mqtt_client = RoborockMqttClientV1(user_data, device_data) networking = await mqtt_client.get_networking() local_device_data = DeviceData(device, product_info[device.product_id].model, networking.ip) local_client = RoborockLocalClientV1(local_device_data) # You can use the send_command to send any command to the device status = await local_client.send_command(RoborockCommand.GET_STATUS) # Or use existing functions that will give you data classes status = await local_client.get_status() asyncio.run(main()) ``` ## Supported devices You can find what devices are supported [here](https://python-roborock.readthedocs.io/en/latest/supported_devices.html). Please note this may not immediately contain the latest devices. ## Credits Thanks @rovo89 for https://gist.github.com/rovo89/dff47ed19fca0dfdda77503e66c2b7c7 And thanks @PiotrMachowski for https://github.com/PiotrMachowski/Home-Assistant-custom-components-Xiaomi-Cloud-Map-Extractor Python-roborock-python-roborock-32df4f3/SUPPORTED_FEATURES.md000066400000000000000000000150011507503702500235710ustar00rootroot00000000000000| Feature | roborock.vacuum.a15 | roborock.vacuum.a87 | |---|---|---| | Product Nickname | TANOSS | PEARLPLUS | | Protocol Version | 1.0 | 1.0 | | New Feature Info | 636084721975295 | 4499197267967999 | | New Feature Info Str | 0000000000002000 | 508A977F7EFEFFFF | | `111` | X | X | | `112` | X | X | | `113` | X | X | | `114` | X | X | | `115` | X | X | | `116` | X | X | | `117` | X | X | | `118` | X | X | | `119` | X | X | | `120` | X | X | | `121` | | X | | `122` | X | X | | `123` | X | X | | `124` | X | X | | `125` | X | X | | `is_activate_video_charging_and_standby_supported` | | | | `is_analysis_supported` | X | X | | `is_any_state_transit_goto_supported` | X | X | | `is_auto_collection_2_supported` | | | | `is_auto_delivery_field_in_global_status_supported` | | X | | `is_auto_tear_down_mop_supported` | | | | `is_avoid_collision_mode_supported` | | X | | `is_avoid_collision_supported` | | X | | `is_back_charge_auto_wash_supported` | | X | | `is_careful_slow_mop_supported` | X | X | | `is_carpet_custom_clean_supported` | | X | | `is_carpet_deep_clean_supported` | | X | | `is_carpet_long_haired_supported` | | | | `is_carpet_pressure_use_origin_paras_supported` | | | | `is_carpet_show_on_map` | | X | | `is_carpet_supported` | X | X | | `is_ces2022_supported` | | | | `is_clean_count_setting_supported` | | X | | `is_clean_direct_status_supported` | | | | `is_clean_history_time_line_supported` | | | | `is_clean_route_deep_slow_plus_supported` | | | | `is_clean_route_fast_mode_supported` | | X | | `is_clean_route_setting_supported` | | | | `is_collect_dust_mode_supported` | X | X | | `is_corner_clean_mode_supported` | | | | `is_corner_mop_stretch_supported` | | X | | `is_current_map_restore_enabled` | X | X | | `is_custom_clean_mode_count_supported` | | X | | `is_custom_mode_supported` | X | X | | `is_custom_water_box_distance_supported` | | X | | `is_dirty_object_detect_supported` | | | | `is_dirty_replenish_clean_supported` | | X | | `is_dry_interval_timer_supported` | | | | `is_dss_believable` | | X | | `is_dust_collection_setting_supported` | X | X | | `is_dynamically_add_clean_zones_supported` | | X | | `is_dynamically_skip_clean_zone_supported` | | X | | `is_egg_dance_mode_supported` | | | | `is_egg_mode_supported_from_new_features` | | | | `is_exact_custom_mode_supported` | | X | | `is_exhibition_function_supported` | | | | `is_floor_dir_clean_any_time_supported` | | X | | `is_flow_led_setting_supported` | X | | | `is_fw_filter_obstacle_supported` | X | X | | `is_gap_deep_clean_supported` | | | | `is_goto_pure_clean_path_supported` | | X | | `is_hot_wash_towel_supported` | | X | | `is_identify_room_supported` | | | | `is_ignore_unknown_map_object_supported` | X | X | | `is_lds_lifting_supported` | | | | `is_led_status_switch_supported` | X | X | | `is_left_water_drain_supported` | | X | | `is_main_brush_up_down_supported_from_str` | | X | | `is_map_beautify_internal_debug_supported` | X | X | | `is_map_carpet_add_support` | | X | | `is_map_eraser_supported` | | | | `is_matter_supported` | | | | `is_max_plus_mode_supported` | | | | `is_max_zone_opened_supported` | | | | `is_midway_back_to_dock_supported` | | | | `is_min_battery_15_to_clean_task_supported` | | X | | `is_mop_forbidden_supported` | | | | `is_mop_path_supported` | X | X | | `is_mop_shake_water_max_supported` | | | | `is_multi_floor_supported` | X | X | | `is_multi_map_segment_timer_supported` | X | X | | `is_new_ai_recognition_supported` | | | | `is_new_data_for_clean_history` | X | X | | `is_new_data_for_clean_history_detail` | X | X | | `is_new_endpoint_supported` | | X | | `is_new_remote_view_supported` | | | | `is_no_need_carpet_press_set_supported` | | | | `is_none_pure_clean_mop_with_max_plus` | | | | `is_object_detect_check_supported` | | | | `is_offline_map_supported` | | X | | `is_optimize_battery_supported` | | | | `is_order_clean_supported` | X | X | | `is_pet_snapshot_supported` | | | | `is_pet_supplies_deep_clean_supported` | | | | `is_pumping_water_supported` | | | | `is_pure_clean_mop_supported` | | | | `is_re_segment_supported` | X | X | | `is_record_allowed` | X | X | | `is_remote_supported` | X | X | | `is_right_brush_stretch_supported` | | | | `is_room_name_supported` | X | X | | `is_rpc_retry_supported` | | X | | `is_rubber_brush_carpet_supported` | | | | `is_set_child_supported` | X | X | | `is_setting_carpet_first_supported` | | X | | `is_shake_mop_set_supported` | X | X | | `is_show_clean_finish_reason_supported` | X | X | | `is_show_general_obstacle_supported` | | | | `is_show_obstacle_photo_supported` | | | | `is_small_side_mop_supported` | | | | `is_smart_clean_mode_set_supported` | | X | | `is_soft_clean_mode_supported` | | | | `is_super_deep_wash_supported` | | X | | `is_support_backup_map` | X | X | | `is_support_clean_estimate` | | X | | `is_support_cliff_zone` | | X | | `is_support_custom_carpet` | | | | `is_support_custom_dnd` | | X | | `is_support_custom_door_sill` | | X | | `is_support_custom_mode_in_cleaning` | | X | | `is_support_fetch_timer_summary` | X | X | | `is_support_floor_direction` | | X | | `is_support_floor_edit` | | X | | `is_support_furniture` | | X | | `is_support_incremental_map` | | X | | `is_support_main_brush_up_down_supported` | | | | `is_support_mop_back_pwm_set` | | | | `is_support_quick_map_builder` | X | X | | `is_support_remote_control_in_call` | | X | | `is_support_room_tag` | | X | | `is_support_set_switch_map_mode` | | X | | `is_support_set_volume_in_call` | | X | | `is_support_side_brush_up_down_supported` | | | | `is_support_smart_door_sill` | | X | | `is_support_smart_global_clean_with_custom_mode` | | X | | `is_support_smart_scene` | | X | | `is_support_stuck_zone` | | X | | `is_support_voice_control_debug` | | | | `is_support_water_mode` | | | | `is_supported_download_test_voice` | | X | | `is_supported_drying` | | X | | `is_supported_valley_electricity` | | X | | `is_two_key_real_time_video_supported` | | X | | `is_two_key_rtv_in_charging_supported` | | X | | `is_unsave_map_reason_supported` | X | X | | `is_uvc_sterilize_supported` | | | | `is_video_monitor_supported` | X | X | | `is_video_patrol_supported` | | | | `is_video_setting_supported` | X | X | | `is_voice_control_led_supported` | | | | `is_voice_control_supported` | | X | | `is_wash_then_charge_cmd_supported` | | X | | `is_water_leak_check_supported` | | X | | `is_water_up_down_drain_supported` | | X | | `is_wifi_manage_supported` | | X | | `is_workday_holiday_supported` | | | Python-roborock-python-roborock-32df4f3/commitlint.config.mjs000066400000000000000000000004411507503702500245240ustar00rootroot00000000000000export default { extends: ["@commitlint/config-conventional"], ignores: [(msg) => /Signed-off-by: dependabot\[bot]/m.test(msg)], rules: { // Disable the rule that enforces lowercase in subject "subject-case": [0], // 0 = disable, 1 = warn, 2 = error }, }; Python-roborock-python-roborock-32df4f3/device_info.yaml000066400000000000000000000010741507503702500235270ustar00rootroot00000000000000roborock.vacuum.a15: Protocol Version: '1.0' Product Nickname: TANOSS New Feature Info: 636084721975295 New Feature Info Str: '0000000000002000' Feature Info: - 111 - 112 - 113 - 114 - 115 - 116 - 117 - 118 - 119 - 120 - 122 - 123 - 124 - 125 roborock.vacuum.a87: Protocol Version: '1.0' Product Nickname: PEARLPLUS New Feature Info: 4499197267967999 New Feature Info Str: 508A977F7EFEFFFF Feature Info: - 111 - 112 - 113 - 114 - 115 - 116 - 117 - 118 - 119 - 120 - 121 - 122 - 123 - 124 - 125 Python-roborock-python-roborock-32df4f3/docs/000077500000000000000000000000001507503702500213175ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/docs/Makefile000066400000000000000000000011761507503702500227640ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) Python-roborock-python-roborock-32df4f3/docs/make.bat000066400000000000000000000013741507503702500227310ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=source set BUILDDIR=build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd Python-roborock-python-roborock-32df4f3/docs/requirements.txt000066400000000000000000000000311507503702500245750ustar00rootroot00000000000000sphinx sphinx_rtd_theme Python-roborock-python-roborock-32df4f3/docs/source/000077500000000000000000000000001507503702500226175ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/docs/source/_templates/000077500000000000000000000000001507503702500247545ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/docs/source/_templates/footer.html000066400000000000000000000004051507503702500271370ustar00rootroot00000000000000{% extends "!footer.html" %} {%- block contentinfo %} {{ super() }}

We are looking for contributors to help with our documentation, if you are interested please contribute here. {% endblock %} Python-roborock-python-roborock-32df4f3/docs/source/api_commands.rst000066400000000000000000001041111507503702500260010ustar00rootroot00000000000000Api commands ============ This page is still under construction. All of the following are the commands we have reverse engineered. It is not an exhaustive list of all the possible commands. Commands do not immediately make it to this page. You can find more commands [here](https://github.com/humbertogontijo/python-roborock/blob/main/roborock/roborock_typing.py#L18) Commands can have multiple parameters that can change from one model to another. * :ref:`app_charge` * :ref:`app_get_dryer_setting` * :ref:`app_get_init_status` * :ref:`app_pause` * :ref:`app_rc_end` * :ref:`app_rc_move` * :ref:`app_rc_start` * :ref:`app_rc_stop` * :ref:`app_segment_clean` * :ref:`app_set_dryer_setting` * :ref:`app_start_collect_dust` * :ref:`app_start_wash` * :ref:`app_start` * :ref:`app_stop_collect_dust` * :ref:`app_stop_wash` * :ref:`app_stop` * :ref:`change_sound_volume` * :ref:`close_dnd_timer` * :ref:`del_server_timer` * :ref:`dnld_install_sound` * :ref:`get_clean_sequence` * :ref:`get_consumable` * :ref:`get_custom_mode` * :ref:`get_customize_clean_mode` * :ref:`get_dnd_timer` * :ref:`get_dust_collection_mode` * :ref:`get_clean_follow_ground_material_status` * :ref:`get_identify_furniture_status` * :ref:`get_identify_ground_material_status` * :ref:`get_led_status` * :ref:`get_map_v1` * :ref:`get_multi_map` * :ref:`get_multi_maps_list` * :ref:`get_network_info` * :ref:`get_prop` * :ref:`get_room_mapping` * :ref:`get_scenes_valid_tids` * :ref:`get_serial_number` * :ref:`get_smart_wash_params` * :ref:`get_sound_progress` * :ref:`get_status` * :ref:`get_timezone` * :ref:`get_turn_server` * :ref:`get_valley_electricity_timer` * :ref:`get_wash_towel_mode` * :ref:`load_multi_map` * :ref:`name_segment` * :ref:`reset_consumable` * :ref:`resume_segment_clean` * :ref:`resume_zoned_clean` * :ref:`retry_request` * :ref:`reunion_scenes` * :ref:`save_map` * :ref:`send_ice_to_robot` * :ref:`send_sdp_to_robot` * :ref:`set_server_timer` * :ref:`set_clean_motor_mode` * :ref:`set_customize_clean_mode` * :ref:`set_dnd_timer` * :ref:`set_dust_collection_mode` * :ref:`set_fds_endpoint` * :ref:`set_identify_furniture_status` * :ref:`set_identify_ground_material_status` * :ref:`set_led_status` * :ref:`set_mop_mode` * :ref:`set_scenes_segments` * :ref:`set_scenes_zones` * :ref:`set_segment_ground_material` * :ref:`set_smart_wash_params` * :ref:`set_timezone` * :ref:`set_valley_electricity_timer` * :ref:`set_wash_towel_mode` * :ref:`set_water_box_custom_mode` * :ref:`start_camera_preview` * :ref:`start_edit_map` * :ref:`start_voice_chat` * :ref:`start_wash_then_charge` * :ref:`stop_camera_preview` * :ref:`stop_segment_clean` * :ref:`test_sound_volume` * :ref:`upd_server_timer` Robot status ------------ get_status ~~~~~~~~~~ Description: Returns the current status of the vacuum Parameters: None Returns: msg_ver: msg_seq: state: battery: Battery level of your device. clean_time: Total clean time in hours. clean_area: Total clean area in meters. error_code: map_reset: in_cleaning: in_returning: in_fresh_state: lab_status: water_box_status: back_type: wash_phase: wash_ready: fan_power: dnd_enabled: map_status: is_locating: lock_status: water_box_mode: water_box_carriage_status: mop_forbidden_enable: camera_status: is_exploring: home_sec_status: home_sec_enable_password: adbumper_status: water_shortage_status: dock_type: dust_collection_status: auto_dust_collection: avoid_count: mop_mode: debug_mode: collision_avoid_status: switch_map_mode: dock_error_status: charge_status: unsave_map_reason: unsave_map_flag: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= App vacuum control ------------------ app_start ~~~~~~~~~ Description: Parameters: app_pause ~~~~~~~~~ Description: This pauses the vacuum's current task Parameters: None Returns ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= app_stop ~~~~~~~~ Description: Parameters: app_start_collect_dust ~~~~~~~~~~~~~~~~~~~~~~ Description: This empties the bin while docked Parameters: None ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= app_stop_collect_dust ~~~~~~~~~~~~~~~~~~~~~~ Description: This stops the emptying of the dust bin while docked Parameters: None ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= app_start_wash ~~~~~~~~~~~~~~ Description: This washes the mop while docked Parameters: None ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= app_stop_wash ~~~~~~~~~~~~~ Description: This stops washing the mop whiloe docked Parameters: None ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= app_goto_target ~~~~~~~~~~~~~~~ Description: Got to target Parameters: - X coordinate as integer (e.g.: 23450) - Y coordinate as integer (e.g.: 16450) Returns ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= app_charge ~~~~~~~~~~ Description: This tells your vacuum to go back to the dock and charge. Parameters: None Returns : ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= App status ---------- app_get_init_status ~~~~~~~~~~~~~~~~~~~ Description: Returns details on the app being used to interact with Roborock servers ?? In this case the app is backend supporting the HA integration ? Parameters: None Returns: local_info: name: Name of the app bom: Version of the app location: Location of the app language: Language of the app wifiplan: Wifi plan of the app timezone: Timezone of the app logserver: Log server of the app featureset: Featureset of the app feature_info: List of features new_feature_info: New feature info Return example:: {'local_info': {'name': 'custom_A.03.0342_CE', 'bom': 'A.03.0342', 'location': 'de', 'language': 'en', 'wifiplan': '', 'timezone': 'Europe/Berlin', 'logserver': 'awsde0.fds.api.xiaomi.com', 'featureset': 3}, 'feature_info': [111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125], 'new_feature_info': 2247395306799103, 'new_feature_info_str': '00000008009EFFFE'} ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= App dryer settings ------------------ app_get_dryer_setting ~~~~~~~~~~~~~~~~~~~~~ Description: Get dock dryer settings. Parameters: None Returns: status: on: cliff_on: cliff_off count: dry_time: Duration dryer remains on in seconds. off: cliff_on: cliff_off: count: Return example:: {'status': 1, 'on': {'cliff_on': 1, 'cliff_off': 1, 'count': 10, 'dry_time': 7200}, 'off': {'cliff_on': 2, 'cliff_off': 1, 'count': 10}} Source: Roborock S7 MaxV Ultra ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= app_set_dryer_setting ~~~~~~~~~~~~~~~~~~~~~ Description: Set the time for the dryer to run Parameters: '{"status":1,"on":{"dry_time":14400}}' dry_time is the time in seconds the dryer will run for Returns ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= App remote control ------------------ app_rc_start ~~~~~~~~~~~~ Description: Starts remote control. Parameters: None Returns ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= app_rc_move ~~~~~~~~~~~ Description: Moves the robot in the direction specified Parameters: To be documented Returns ok or error .. Need to document the parameters - will need to explore the app to find out what they are app_rc_stop ~~~~~~~~~~~ Description: Stops the remote control Parameters: None Returns ok or error .. Assume stop stops a move ?? Need to check app_rc_end ~~~~~~~~~~ Description: Ends the remote control task Parameters: Returns ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= App other --------- app_set_smart_cliff_forbidden ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: app_spot ~~~~~~~~ Description: Parameters: app_stat ~~~~~~~~ Description: This returns the current status of the vacuum Parameters: None Returns: ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= app_wakeup_robot ~~~~~~~~~~~~~~~~ Description: Parameters: app_zoned_clean ~~~~~~~~~~~~~~~ Description: Starts a zone clean Parameters: .. Us this the last known zone Returns: ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= Segments and Zones ------------------ app_segment_clean ~~~~~~~~~~~~~~~~~ Description: This starts a segment clean and repeats it the number of times specified. Parameters: An array of segments to clean. Each segment is an integer with the segment id and the number of times to clean it. For example, to clean segment 18 twice, the parameter would be [{'segments': [18], 'repeat': 2}] .. Comment: The segment id can be obtained from the initial data returned on login Command: roborock -d command --device_id deviceIdRedacted --cmd app_segment_clean --params '[{"segments": [17,19], "repeat": 2}]' Returns ok or error ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_segment_ground_material ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Sets the groud material for the segment Parameters: "{'data':[[22,3,0]]}" Returns ok or error name_segment ~~~~~~~~~~~~ Description: Parameters: To be determined .. Need to work out parameter format Does this allow us to name a segment ? resume_segment_clean ~~~~~~~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= stop_segment_clean ~~~~~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_scenes_zones ~~~~~~~~~~~~~~~~ Description: Parameters: set_scenes_segments ~~~~~~~~~~~~~~~~~~~ Description: Parameters: get_scenes_valid_tids ~~~~~~~~~~~~~~~~~~~~~ Description: To be confirmed Parameters: None .. Appears to be associated with rooms ?? Returns:: [{'tid': '1699679077347', 'map_flag': 0, 'segs': [{'sid': 24}, {'sid': 20}, {'sid': 22}, {'sid': 18}]}, {'tid': '1699679236553', 'map_flag': 0, 'segs': [{'sid': 24}, {'sid': 20}, {'sid': 22}]}, {'tid': '1699679386045', 'map_flag': 0, 'segs': [{'sid': 16}, {'sid': 19}, {'sid': 17}]}, {'tid': '1699679335823', 'map_flag': 0, 'segs': [{'sid': 19}, {'sid': 16}, {'sid': 17}]}] ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= resume_zoned_clean ~~~~~~~~~~~~~~~~~~ Description: Parameters: reunion_scenes ~~~~~~~~~~~~~~ Description: Parameters: Camera ------ start_camera_preview ~~~~~~~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= stop_camera_preview ~~~~~~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= get_camera_status ~~~~~~~~~~~~~~~~~ Description: Get camera status. Parameters: None Returns: 3457  387 Roborock S8 Pro Ultra Source: Roborock S7 MaxV Ultra set_camera_status ~~~~~~~~~~~~~~~~~ Description: Parameters: start_voice_chat ~~~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= Clean modes ----------------- get_carpet_clean_mode ~~~~~~~~~~~~~~~~~~~~~ Description: Get carpet clean mode. Parameters: Returns: carpet_clean_mode: Enumeration for carpet clean mode. Return example:: {'carpet_clean_mode': 3} Source: Roborock S7 MaxV Ultra set_carpet_clean_mode ~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: get_carpet_mode ~~~~~~~~~~~~~~~ Description: Parameters: None Returns: enable: current_integral: current_high: current_low: stall_time: Return example:: {'enable': 1, 'current_integral': 450, 'current_high': 500, 'current_low': 400, 'stall_time': 10} ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= set_carpet_mode ~~~~~~~~~~~~~~~~~~~~ Description: Parameters: get_smart_wash_params ~~~~~~~~~~~~~~~~~~~~~ Description: Returns the smartwash parameters Parameters: None .. Not clear what this does Returns: smart_wash: 0 is off, 1 is on wash_interval: The interval in seconds between washes Example:: {'smart_wash': 0, 'wash_interval': 1200} ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_smart_wash_params ~~~~~~~~~~~~~~~~~~~~~ Description: Sets the smartwash parameters Parameters: smart_wash: 0 is off, 1 is on wash_interval: The interval in seconds between washes {'smart_wash': 0, 'wash_interval': 1200} ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= Cleaning history ---------------- get_clean_record ~~~~~~~~~~~~~~~~ Description: Parameters: To be determined get_clean_record_map ~~~~~~~~~~~~~~~~~~~~ Description: Parameters: get_clean_sequence ~~~~~~~~~~~~~~~~~~ Description: Parameters: get_clean_summary ~~~~~~~~~~~~~~~~~ Description: Get a summary of cleaning history. Parameters: None Returns: clean_time: clean_area: clean_count: dust_collection_count: records: Return example:: {'clean_time': 568146, 'clean_area': 8816865000, 'clean_count': 178, 'dust_collection_count': 172, 'records': [1689740211, 1689555788, 1689259450, 1688999113, 1688852350, 1688693213, 1688692357, 1688614354, 1688613280, 1688606676, 1688325265, 1688174717, 1688149381, 1688092832, 1688001593, 1687921414, 1687890618, 1687743256, 1687655018, 1687631444]} Source: Roborock S7 MaxV Ultra ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= get_mop_template_params_summary ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= Child lock ---------- get_child_lock_status ~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: This gets the child lock status of the device. 0 is off, 1 is on. Parameters: None Returns: lock_status: Return example:: {'lock_status': 0} ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_child_lock_status ~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: This sets the child lock status of the device. Parameters: '{"lock_status" :0}' Returns: ok ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= Consumables ----------- get_consumable ~~~~~~~~~~~~~~ Description: This gets the status of all of the consumables for your device. Parameters: None Returns: main_brush_work_time: This is the amount of time the main brush has been used in seconds since it was last replaced side_brush_work_time: This is the amount of time the side brush has been used in seconds since it was last replaced filter_work_time: This is the amount of time the air filter inside the vacuum has been used in seconds since it was last replaced filter_element_work_time: sensor_dirty_time: This is the amount of time since you have cleaned the sensors on the bottom of your vacuum. strainer_work_times: dust_collection_work_times: cleaning_brush_work_times: Return examples:: {'main_brush_work_time': 14151, 'side_brush_work_time': 41638, 'filter_work_time': 14151, 'filter_element_work_time': 0, 'sensor_dirty_time': 41522, 'strainer_work_times': 44, 'dust_collection_work_times': 19, 'cleaning_brush_work_times': 44} reset_consumable ~~~~~~~~~~~~~~~~ Description: Parameters: List of consumables to reset. For example, to reset consumables 'strainer_work_times' and 'sensor_dirty_time' the parameter would be ['strainer_work_times', 'sensor_dirty_time'] ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= Custom modes ------------ get_custom_mode ~~~~~~~~~~~~~~~~~~~~ Description: It returns the current custom mode. Parameters: None Returns: integer value of the current custom mode Return example:: 102 .. Not clear what a custom mode is = will explore ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= set_custom_mode ~~~~~~~~~~~~~~~~~~~~ Description: Parameters: get_customize_clean_mode ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: set_customize_clean_mode ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: Furniture and ground material ----------------------------- get_identify_furniture_status ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: .. Does not return anything for S8 Pro Ultra when docked may require vacumm to be cleaning set_identify_furniture_status ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: .. Method not known for S8 Pro Ultra get_identify_ground_material_status ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: .. Does not return anything for S8 Pro Ultra when docked may require vacumm to be cleaning set_identify_ground_material_status ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: .. Method not known for S8 Pro Ultra LEDs ---- get_flow_led_status ~~~~~~~~~~~~~~~~~~~ Description: Parameters: set_flow_led_status ~~~~~~~~~~~~~~~~~~~ Description: Parameters: get_led_status ~~~~~~~~~~~~~~~~~~~ Description: Returns the LED status. If disabled the indicator light will turn off 1 minute after fully charged Parameters: Returns: led_status: 0 is off, 1 is on ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_led_status ~~~~~~~~~~~~~~ Description: Sets the LED status. If disabled the indicator light will turn off 1 minute after fully charged Parameters: ???? .. Need to work out parameter format Maps ---- get_multi_map ~~~~~~~~~~~~~ Description: Parameters: Comment: Response timed out for S8 Pro Ultra .. times out after 4 secs get_multi_maps_list ~~~~~~~~~~~~~~~~~~~ Description: Returns a list of map information stored on the device. Parameters: None required Returns: max_multi_map: max_bak_map: multi_map_count: map_info:: mapFlag: add_time: length: name: bak_maps:: mapFlag: add_time: Return example:: {'max_multi_map': 4, 'max_bak_map': 1, 'multi_map_count': 2, 'map_info': [{'mapFlag': 0, 'add_time': 1699919699, 'length': 4, 'name': 'Home', 'bak_maps': [{'mapFlag': 4, 'add_time': 1699823921}]}, {'mapFlag': 1, 'add_time': 1699828035, 'length': 13, 'name': 'Boys bathroom', 'bak_maps': [{'mapFlag': 5, 'add_time': 1699828035}]}]} Source: S8 Pro Ultra ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= get_map_v1 ~~~~~~~~~~ Description: Returns the map Parameters: Unknown Comment: Returns a map in a format that is not yet understood by me .. Explore what parameters it may take Extend code to return byte stream ? start_edit_map ~~~~~~~~~~~~~~ Description: Parameters: get_room_mapping ~~~~~~~~~~~~~~~~ Description: Returns a list of rooms, ids as discovered by Parameters: None Returns: room_id Return example:: [[16, '14731399', 12], [17, '2220009', 2], [18, '2219688', 12], [19, '2219685', 9], [20, '2219691', 12], [21, '2431758', 12], [22, '2219677', 13], [23, '2312548', 12], [24, '2219678', 14], [25, '2219686', 15], [26, '2219772', 12], [27, '14768755', 12]] ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= load_multi_map ~~~~~~~~~~~~~~ Description: Parameters: number (the floor/map index) .. Need to work out parameter format save_map ~~~~~~~~ Description: Parameters: Operating modes --------------- get_mop_mode ~~~~~~~~~~~~ Description: Get mop mode. Parameters: None Returns: Enumeration for mop mode. 300 Example for S8 Pro Ultra:: standard = 300 deep = 301 deep_plus = 303 fast = 304 custom = 302 ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_mop_mode ~~~~~~~~~~~~ Description: Set mop mode. Parameters: mop_mode 300 ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_clean_motor_mode ~~~~~~~~~~~~~~~~~~~~ Description: Parameters: get_dust_collection_mode ~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: None Returns: mode: Return example:: {'mode': 0} Source: Roborock S7 MaxV Ultra ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= set_dust_collection_mode ~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: get_wash_towel_mode ~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: None Returns: wash_mode: Return example:: {'wash_mode': 1} Source: Roborock S7 MaxV Ultra unknown = -9999 light = 0 balanced = 1 deep = 2 ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= set_wash_towel_mode ~~~~~~~~~~~~~~~~~~~~~~~~ Description: Sets the wash wash_towel_mode Parameters: {'wash_mode': 2} Returns: ok or error Source: S8 Pro Ultra ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= get_collision_avoid_status ~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: None Returns: status: Return example:: {'status': 1} ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= set_collision_avoid_status ~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Update collision avoid status. Parameters: '{"status" :1}' Returns: ok ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= start_wash_then_charge ~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: .. While this returns ok on the S8 Pro Ultra it does not appear to do anything switch_water_mark ~~~~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= .. Not found for S8 Pro Ultra System information ------------------ get_network_info ~~~~~~~~~~~~~~~~ Description: Get the device's network information. Parameters: None Returns: ssid: SSID of the wirelness network the device is connected to. ip: IP address of the device. mac: MAC address of the device. bssid: BSSID of the device. rssi: RSSI of the device. Return example:: {'ssid': 'My WiFi Network', 'ip': '192.168.1.29', 'mac': 'a0:2b:47:3d:24:51', 'bssid': '18:3b:1a:23:41:3c', 'rssi': -32} Source: Roborock S7 MaxV Ultra ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= get_serial_number ~~~~~~~~~~~~~~~~~ Description: Get serial number of the vacuum. Parameters: None Returns:: serial_number: Serial number of the vacuum. Return example:: {'serial_number': 'B16EVD12345678'} Source: Roborock S7 MaxV Ultra ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= get_prop ~~~~~~~~ Description: Generic get property command Parameters: The property to get Example:: roborock -d command --device_id aHiddenDeviceId --cmd get_prop --params '["battery"]' Comment : This example returns the same as get_status. Initial testing has shown that not all get commands are supported by this method get_turn_server ~~~~~~~~~~~~~~~ Description: Parameters: .. Not found for S8 Pro Ultra ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= enable_log_upload ~~~~~~~~~~~~~~~~~ Description: Parameters: find_me ~~~~~~~ Description: This makes your vacuum speak so you can find it. Parameters: None upd_server_timer ~~~~~~~~~~~~~~~~ Description: Parameters: get_homesec_connect_status ~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= set_fds_endpoint ~~~~~~~~~~~~~~~~ Description: Parameters: send_ice_to_robot ~~~~~~~~~~~~~~~~~ Description: Parameters: send_sdp_to_robot ~~~~~~~~~~~~~~~~~ Description: Parameters: get_device_ice ~~~~~~~~~~~~~~ .. This doeas not appear to be supported on S8 Pro Ultra Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= get_device_sdp ~~~~~~~~~~~~~~ Description: Parameters: ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra No ====================== ========= retry_request ~~~~~~~~~~~~~ Description: Parameters: Timers ------ del_server_timer ~~~~~~~~~~~~~~~~ Description: Parameters: dnd_timer ~~~~~~~~~ get_dnd_timer ~~~~~~~~~~~~~ Description: Gets the do not disturb timer start_hour: The hour you want dnd to start start_minute: The minute you want dnd to start end_hour: The hour you want dnd to be turned off end_minute: The minute you want dnd to be turned off enabled: If the switch is currently turned on in the app for DnD Parameters: None set_dnd_timer ~~~~~~~~~~~~~ Description: Parameters: close_dnd_timer ~~~~~~~~~~~~~~~ Description: This disables the dnd timer Parameters: None get_server_timer ~~~~~~~~~~~~~~~~ Description: Parameters: set_server_timer ~~~~~~~~~~~~~~~~ Description: Parameters: get_timezone ~~~~~~~~~~~~~~~~~ Description: Get the device's time zone. Parameters: None Returns: Time zone by the TZ identifier (e.g., America/Los_Angeles) ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= set_timezone ~~~~~~~~~~~~~~~~~ Description: Sets the device's time zone Parameters: Sound ------------ get_sound_volume ~~~~~~~~~~~~~~~~ Description: Returns the volume of the sound played by the vacuum Parameters: None Returns: volume: The volume of the sound played by the vacuum Example:: 72 ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= change_sound_volume ~~~~~~~~~~~~~~~~~~~ Description: Sets the volume of the sound played by the vacuum Parameters: volume Returns: ok or error roborock -d command --device_id aHiddenDeviceId --cmd change_sound_volume --params 72 ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= test_sound_volume ~~~~~~~~~~~~~~~~~ Description: Plays a sound on the vacumm to identity volume Parameters: None ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= get_sound_progress ~~~~~~~~~~~~~~~~~~ Description: Parameters: Returns:: {'sid_in_progress': 0, 'progress': 0, 'state': 0, 'error': 0} .. Is this where the vacumm is currently located ? get_current_sound ~~~~~~~~~~~~~~~~~ .. Is this an app setting ? Description: Parameters: Return example:: {'sid_in_use': 122, 'sid_version': 1, 'sid_in_progress': 0, 'location': 'de', 'bom': 'A.03.0342', 'language': 'en', 'msg_ver': 2} ====================== ========= Vacuum Model Supported ====================== ========= Roborock S7 MaxV Ultra Yes Roborock S8 Pro Ultra Yes ====================== ========= dnld_install_sound ~~~~~~~~~~~~~~~~~~ Description: Parameters: Off peak charging ----------------- get_valley_electricity_timer ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Get valley electricity timer. Parameters: None Returns: start_hour: The hour you want valley electricity to start start_minute: The minute you want valley electricity to start end_hour: The hour you want valley electricity to be turned off end_minute: The minute you want valley electricity to be turned off enabled: If the switch is currently turned on in the app for valley electricity ``` {'start_hour': 0, 'start_minute': 0, 'end_hour': 0, 'end_minute': 0, 'enabled': 0} ``` ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_valley_electricity_timer ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Sets the valley electricity timer Parameters: start_hour: The hour you want valley electricity to start start_minute: The minute you want valley electricity to start end_hour: The hour you want valley electricity to be turned off end_minute: The minute you want valley electricity to be turned off enabled: If the switch is currently turned on in the app for valley electricity Example:: {'start_hour': 0, 'start_minute': 0, 'end_hour': 0, 'end_minute': 0, 'enabled': 0} .. This does not appear to have any effect on the S8 Pro Ultra - Params accepted however no affect ?? ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra ??? ====================== ========= Water box mode -------------- get_water_box_custom_mode ~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Get water box mode. Parameters: None Returns: Enumeration for water box mode. 203 .. Not clear what this does - require Enumeration get_clean_follow_ground_material_status ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Parameters: None ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= set_water_box_custom_mode ~~~~~~~~~~~~~~~~~~~~~~~~~ Description: Set the water box mode. Parameters: {'water_box_mode': 203} Returns: ok or error .. Not clear what this does - require Enumeration ====================== ========= Vacuum Model Supported ====================== ========= Roborock S8 Pro Ultra Yes ====================== ========= Python-roborock-python-roborock-32df4f3/docs/source/conf.py000066400000000000000000000013751507503702500241240ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # -- Project information project = "Python Roborock" author = "Humberto gontijo & Lash-L" release = "0.1" version = "0.1.0" # -- General configuration extensions = [ "sphinx.ext.duration", "sphinx.ext.doctest", "sphinx.ext.autodoc", "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.autosectionlabel", "sphinx_rtd_theme", ] intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), "sphinx": ("https://www.sphinx-doc.org/en/master/", None), } intersphinx_disabled_domains = ["std"] templates_path = ["_templates"] # -- Options for HTML output html_theme = "sphinx_rtd_theme" # -- Options for EPUB output epub_show_urls = "footnote" Python-roborock-python-roborock-32df4f3/docs/source/error.rst000066400000000000000000000021031507503702500244760ustar00rootroot00000000000000Error ===== Dock Errors ----------- These are the potential errors your dock can have and their corresponding number: ok = 0 duct_blockage = 34 water_empty = 38 waste_water_tank_full = 39 dirty_tank_latch_open = 44 no_dustbin = 46 cleaning_tank_full_or_blocked = 53 Vacuum Errors ------------- These are the potential errors your vacuum can have and their corresponding code lidar_blocked = 1 bumper_stuck = 2 wheels_suspended = 3 cliff_sensor_error = 4 main_brush_jammed = 5 side_brush_jammed = 6 wheels_jammed = 7 robot_trapped = 8 no_dustbin = 9 low_battery = 12 charging_error = 13 battery_error = 14 wall_sensor_dirty = 15 robot_tilted = 16 side_brush_error = 17 fan_error = 18 vertical_bumper_pressed = 21 dock_locator_error = 22 return_to_dock_fail = 23 nogo_zone_detected = 24 vibrarise_jammed = 27 robot_on_carpet = 28 filter_blocked = 29 invisible_wall_detected = 30 cannot_cross_carpet = 31 internal_error = 32 Python-roborock-python-roborock-32df4f3/docs/source/index.rst000066400000000000000000000010141507503702500244540ustar00rootroot00000000000000Welcome to Roborock's documentation! ==================================== **Roborock** is a Python library for controlling your Roborock vacuum .. note:: This project is under active development. You can get a Home Assistant integration for Roborock in core `here `__ or as a custom integration `here `__ Contents -------- .. toctree:: usage status error api_commands supported_devices Python-roborock-python-roborock-32df4f3/docs/source/status.rst000066400000000000000000000030601507503702500246730ustar00rootroot00000000000000Status ====== Status is a core piece of information for our system. It is used to get a wide variety of data about the vacuum and is broadcasted. msg_ver: msg_seq: state: battery: The battery percentage of the vacuum clean_time: How long (total) this vacuum has cleaned for clean_area: How much area (total) this vacuum has cleaned in micrometers error_code: The error code of the vacuum map_present: in_cleaning: If the vacuum is currently cleaning in_returning: If the vacuum is currently returning to the dock. in_fresh_state: lab_status: water_box_status: back_type: wash_phase: wash_ready: fan_power: The strength of the fan suction. Listed as an integer that corresponds to a enum value. dnd_enabled: 0 or 1 that states if there is a dnd time enabled (does not mean that dnd is on now) map_status: is_locating: lock_status: water_box_mode: water_box_carriage_status: mop_forbidden_enable: camera_status: is_exploring: home_sec_status: home_sec_enable_password: adbumper_status: water_shortage_status: dock_type: dust_collection_status: auto_dust_collection: avoid_count: mop_mode: debug_mode: collision_avoid_status: switch_map_mode: dock_error_status: charge_status: unsave_map_reason: unsave_map_flag: wash_status: distance_off: in_warmup: dry_status: rdt: clean_percent: rss: dss: common_status: corner_clean_mode: Python-roborock-python-roborock-32df4f3/docs/source/supported_devices.rst000066400000000000000000000043261507503702500271050ustar00rootroot00000000000000Supported Devices ================== Note: These links are tracking links with Amazon or Roborock. This allows us to get some analytics and helps us get 'negotiation' power with Roborock. We would like to be able to open a channel of communication with Roborock, and getting information like this is a great first step. Note, I have only added links to the new devices, older devices are no longer sold directly by roborock, so to buy them you have to find them used. .. list-table:: Robot Vacuums :widths: 30 20 20 :header-rows: 1 * - Vacuum Model - Amazon - Roborock * - Roborock S4 - - * - Roborock S4 Max - - * - Roborock S5 Max - - * - Roborock S6 - - * - Roborock S6 Pure - - * - Roborock S6 Max - - * - Roborock S6 MaxV - - * - Roborock S7 - - * - Roborock S7 MaxV - - * - Roborock S7 Max Ultra - `Link `__ - `Link `__ * - Roborock S8 - `Link `__ - `Link `__ * - Roborock S8 Pro Ultra - `Link `__ - `Link `__ * - Roborock Q5 - `Link `__ - `Link `__ * - Roborock Q5 Pro - `Link `__ - `Link `__ * - Roborock Q7 - `Link `__ - `Link `__ * - Roborock Q7 Max - `Link `__ - `Link `__ * - Roborock Q8 Max - `Link `__ - `Link `__ * - Roborock Q Revo - `Link `__ - `Link `__ Roborock has recently added two other categories of devices, handheld vacuums, and washing machines. Neither are supported at this time. There are plans to support the handheld ones, but it uses a newer version of the api that I am still trying to reverse engineer. Python-roborock-python-roborock-32df4f3/docs/source/usage.rst000077500000000000000000000012751507503702500244650ustar00rootroot00000000000000Usage ===== Installation ------------ To use Python-Roborock, first install it using pip: .. code-block:: console (.venv) $ pip install python-roborock Login ----- .. code-block:: console (.venv) $ roborock login --email username --password password List devices ------------ This will list all devices associated with the account: .. code-block:: console (.venv) $ roborock list-devices Known devices MyRobot: 7kI9d66UoPXd6sd9gfd75W The deviceId 7kI9d66UoPXd6sd9gfd75W can be used to run commands on the device. Run a command ------------- To run a command: .. code-block:: console (.venv) $ roborock -d command --device_id 7kI9d66UoPXd6sd9gfd75W --cmd get_status Python-roborock-python-roborock-32df4f3/mypy.ini000066400000000000000000000001211507503702500220600ustar00rootroot00000000000000[mypy] check_untyped_defs = True [mypy-construct] ignore_missing_imports = True Python-roborock-python-roborock-32df4f3/pyproject.toml000066400000000000000000000044761507503702500233160ustar00rootroot00000000000000[project] name = "python-roborock" version = "2.59.0" description = "A package to control Roborock vacuums." authors = [{ name = "humbertogontijo", email = "humbertogontijo@users.noreply.github.com" }, {name="Lash-L"}, {name="allenporter"}] requires-python = ">=3.11, <4" readme = "README.md" license = "GPL-3.0-only" keywords = [ "roborock", "vacuum", "homeassistant", ] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Natural Language :: English", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries", ] dependencies = [ "click>=8", "aiohttp>=3.8.2,<4", "pycryptodome~=3.18", "pycryptodomex~=3.18 ; sys_platform == 'darwin'", "paho-mqtt>=1.6.1,<3.0.0", "construct>=2.10.57,<3", "vacuum-map-parser-roborock", "pyrate-limiter>=3.7.0,<4", "aiomqtt>=2.3.2,<3", "click-shell~=2.1", ] [project.urls] Repository = "https://github.com/humbertogontijo/python-roborock" Documentation = "https://python-roborock.readthedocs.io/" [project.scripts] roborock = "roborock.cli:main" [dependency-groups] dev = [ "pytest-asyncio>=1.1.0", "pytest", "pre-commit>=3.5,<5.0", "mypy", "ruff==0.14.0", "codespell", "pyshark>=0.6,<0.7", "aioresponses>=0.7.7,<0.8", "freezegun>=1.5.1,<2", "pytest-timeout>=2.3.1,<3", "syrupy>=4.9.1,<5", "pdoc>=15.0.4,<16", ] [tool.hatch.build.targets.sdist] include = ["roborock"] [tool.hatch.build.targets.wheel] include = ["roborock"] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.semantic_release] branch = "main" version_toml = ["pyproject.toml:tool.poetry.version"] build_command = "pip install poetry && poetry build" [tool.semantic_release.commit_parser_options] allowed_tags = [ "chore", "docs", "feat", "fix", "refactor" ] major_tags= ["refactor"] [tool.ruff] lint.ignore = ["F403", "E741"] lint.select=["E", "F", "UP", "I"] line-length = 120 [tool.ruff.lint.per-file-ignores] "*/__init__.py" = ["F401"] [tool.pytest.ini_options] asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" timeout = 30 log_format = "%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s" [tool.uv] dev-dependencies = [ "pyyaml>=6.0.3", "pyshark>=0.6", ] Python-roborock-python-roborock-32df4f3/roborock/000077500000000000000000000000001507503702500222075ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/__init__.py000066400000000000000000000013441507503702500243220ustar00rootroot00000000000000"""Roborock API. .. include:: ../README.md """ from roborock.b01_containers import * from roborock.code_mappings import * from roborock.containers import * from roborock.exceptions import * from roborock.roborock_typing import * from . import ( b01_containers, clean_modes, cloud_api, code_mappings, const, containers, exceptions, roborock_typing, version_1_apis, version_a01_apis, web_api, ) __all__ = [ "web_api", "version_1_apis", "version_a01_apis", "containers", "b01_containers", "const", "cloud_api", "clean_modes", "code_mappings", "roborock_typing", "exceptions", # Add new APIs here in the future when they are public e.g. devices/ ] Python-roborock-python-roborock-32df4f3/roborock/api.py000066400000000000000000000065141507503702500233400ustar00rootroot00000000000000"""The Roborock api.""" from __future__ import annotations import asyncio import logging import time from abc import ABC, abstractmethod from typing import Any from .containers import ( DeviceData, ) from .exceptions import ( RoborockTimeout, UnknownMethodError, ) from .roborock_future import RoborockFuture from .roborock_message import ( RoborockMessage, ) from .util import get_next_int _LOGGER = logging.getLogger(__name__) KEEPALIVE = 70 class RoborockClient(ABC): """Roborock client base class.""" _logger: logging.LoggerAdapter queue_timeout: int def __init__(self, device_info: DeviceData) -> None: """Initialize RoborockClient.""" self.device_info = device_info self._waiting_queue: dict[int, RoborockFuture] = {} self._last_device_msg_in = time.monotonic() self._last_disconnection = time.monotonic() self.keep_alive = KEEPALIVE self._diagnostic_data: dict[str, dict[str, Any]] = {} self.is_available: bool = True async def async_release(self) -> None: await self.async_disconnect() @property def diagnostic_data(self) -> dict: return self._diagnostic_data @abstractmethod async def async_connect(self): """Connect to the Roborock device.""" @abstractmethod async def async_disconnect(self) -> Any: """Disconnect from the Roborock device.""" @abstractmethod def is_connected(self) -> bool: """Return True if the client is connected to the device.""" @abstractmethod def on_message_received(self, messages: list[RoborockMessage]) -> None: """Handle received incoming messages from the device.""" def on_connection_lost(self, exc: Exception | None) -> None: self._last_disconnection = time.monotonic() self._logger.info("Roborock client disconnected") if exc is not None: self._logger.warning(exc) def should_keepalive(self) -> bool: now = time.monotonic() # noinspection PyUnresolvedReferences if now - self._last_disconnection > self.keep_alive**2 and now - self._last_device_msg_in > self.keep_alive: return False return True async def _wait_response(self, request_id: int, queue: RoborockFuture) -> Any: try: response = await queue.async_get(self.queue_timeout) if response == "unknown_method": raise UnknownMethodError("Unknown method") return response except (TimeoutError, asyncio.CancelledError): raise RoborockTimeout(f"id={request_id} Timeout after {self.queue_timeout} seconds") from None finally: self._waiting_queue.pop(request_id, None) def _async_response(self, request_id: int, protocol_id: int = 0) -> Any: queue = RoborockFuture(protocol_id) if request_id in self._waiting_queue: new_id = get_next_int(10000, 32767) self._logger.warning( "Attempting to create a future with an existing id %s (%s)... New id is %s. " "Code may not function properly.", request_id, protocol_id, new_id, ) request_id = new_id self._waiting_queue[request_id] = queue return asyncio.ensure_future(self._wait_response(request_id, queue)) Python-roborock-python-roborock-32df4f3/roborock/b01_containers.py000066400000000000000000000312471507503702500253770ustar00rootroot00000000000000from dataclasses import dataclass, field from .code_mappings import RoborockModeEnum from .containers import RoborockBase class WorkStatusMapping(RoborockModeEnum): """Maps the general status of the robot.""" SLEEPING = ("sleeping", 0) WAITING_FOR_ORDERS = ("waiting_for_orders", 1) PAUSED = ("paused", 2) DOCKING = ("docking", 3) CHARGING = ("charging", 4) SWEEP_MOPING = ("sweep_moping", 5) SWEEP_MOPING_2 = ("sweep_moping_2", 6) MOPING = ("moping", 7) UPDATING = ("updating", 8) MOP_CLEANING = ("mop_cleaning", 9) MOP_AIRDRYING = ("mop_airdrying", 10) class SCWindMapping(RoborockModeEnum): """Maps suction power levels.""" SILENCE = ("quiet", 0) STANDARD = ("balanced", 1) STRONG = ("turbo", 2) SUPER_STRONG = ("max", 3) MAX = ("max_plus", 4) class WaterLevelMapping(RoborockModeEnum): """Maps water flow levels.""" LOW = ("low", 0) MEDIUM = ("medium", 1) HIGH = ("high", 2) class CleanTypeMapping(RoborockModeEnum): """Maps the type of cleaning (Vacuum, Mop, or both).""" VACUUM = ("vacuum", 0) VAC_AND_MOP = ("vac_and_mop", 1) MOP = ("mop", 2) class CleanRepeatMapping(RoborockModeEnum): """Maps the cleaning repeat parameter.""" ONCE = ("once", 0) TWICE = ("twice", 1) class WorkModeMapping(RoborockModeEnum): """Maps the detailed work modes of the robot.""" IDLE = ("idle", 0) AUTO = ("auto", 1) MANUAL = ("manual", 2) AREA = ("area", 3) AUTO_PAUSE = ("auto_pause", 4) BACK_CHARGE = ("back_charge", 5) POINT = ("point", 6) NAVI = ("navi", 7) AREA_PAUSE = ("area_pause", 8) NAVI_PAUSE = ("navi_pause", 9) GLOBAL_GO_HOME = ("global_go_home", 10) GLOBAL_BROKEN = ("global_broken", 11) NAVI_GO_HOME = ("navi_go_home", 12) POINT_GO_HOME = ("point_go_home", 13) NAVI_IDLE = ("navi_idle", 14) SCREW = ("screw", 20) SCREW_GO_HOME = ("screw_go_home", 21) POINT_IDLE = ("point_idle", 22) SCREW_IDLE = ("screw_idle", 23) BORDER = ("border", 25) BORDER_GO_HOME = ("border_go_home", 26) BORDER_PAUSE = ("border_pause", 27) BORDER_BROKEN = ("border_broken", 28) BORDER_IDLE = ("border_idle", 29) PLAN_AREA = ("plan_area", 30) PLAN_AREA_PAUSE = ("plan_area_pause", 31) PLAN_AREA_GO_HOME = ("plan_area_go_home", 32) PLAN_AREA_BROKEN = ("plan_area_broken", 33) PLAN_AREA_IDLE = ("plan_area_idle", 35) MOPPING = ("mopping", 36) MOPPING_PAUSE = ("mopping_pause", 37) MOPPING_GO_HOME = ("mopping_go_home", 38) MOPPING_BROKEN = ("mopping_broken", 39) MOPPING_IDLE = ("mopping_idle", 40) EXPLORING = ("exploring", 45) EXPLORE_PAUSE = ("explore_pause", 46) EXPLORE_GO_HOME = ("explore_go_home", 47) EXPLORE_BROKEN = ("explore_broken", 48) EXPLORE_IDLE = ("explore_idle", 49) class StationActionMapping(RoborockModeEnum): """Maps actions for the cleaning/drying station.""" STOP_CLEAN_OR_AIRDRY = ("stop_clean_or_airdry", 0) MOP_CLEAN = ("mop_clean", 1) MOP_AIRDRY = ("mop_airdry", 2) class CleanTaskTypeMapping(RoborockModeEnum): """Maps the high-level type of cleaning task selected.""" ALL = ("full", 0) ROOM = ("room", 1) AREA = ("zones", 4) ROOM_NORMAL = ("room_normal", 5) CUSTOM_MODE = ("customize", 6) ALL_CUSTOM = ("all_custom", 11) AREA_CUSTOM = ("area_custom", 99) class CarpetModeMapping(RoborockModeEnum): """Maps carpet handling parameters.""" FOLLOW_GLOBAL = ("follow_global", 0) ON = ("on", 1) OFF = ("off", 2) @dataclass class NetStatus(RoborockBase): """Represents the network status of the device.""" rssi: str loss: int ping: int ip: str mac: str ssid: str frequency: int bssid: str @dataclass class OrderTotal(RoborockBase): """Represents the order total information.""" total: int enable: int @dataclass class Privacy(RoborockBase): """Represents the privacy settings of the device.""" ai_recognize: int dirt_recognize: int pet_recognize: int carpet_turbo: int carpet_avoid: int carpet_show: int map_uploads: int ai_agent: int ai_avoidance: int record_uploads: int along_floor: int auto_upgrade: int @dataclass class PvCharging(RoborockBase): """Represents the photovoltaic charging status.""" status: int begin_time: int end_time: int @dataclass class Recommend(RoborockBase): """Represents cleaning recommendations.""" sill: int wall: int room_id: list[int] = field(default_factory=list) class B01Fault(RoborockModeEnum): """B01 fault codes and their descriptions.""" F_0 = ("fault_0", 0) F_407 = ("cleaning_in_progress", 407) # Cleaning in progress. Scheduled cleanup ignored. F_500 = ( "lidar_blocked", 500, ) # LiDAR turret or laser blocked. Check for obstruction and retry. LiDAR sensor obstructed or stuck. # Remove foreign objects if any. If the problem persists, move the robot away and restart. F_501 = ( "robot_suspended", 501, ) # Robot suspended. Move the robot away and restart. Cliff sensors dirty. Wipe them clean. F_502 = ( "low_battery", 502, ) # Low battery. Recharge now. Battery low. Put the robot on the dock to charge it to 20% before starting. F_503 = ( "dustbin_not_installed", 503, ) # Check that the dustbin and filter are installed properly. Reinstall the dustbin and filter in place. # If the problem persists, replace the filter. F_504 = ("fault_504", 504) F_505 = ("fault_505", 505) F_506 = ("fault_506", 506) F_507 = ("fault_507", 507) F_508 = ("fault_508", 508) F_509 = ("cliff_sensor_error", 509) # Cliff sensors error. Clean them, move the robot away from drops, and restart. F_510 = ( "bumper_stuck", 510, ) # Bumper stuck. Clean it and lightly tap to release it. Tap it repeatedly to release it. If no foreign object # exists, move the robot away and restart. F_511 = ( "docking_error", 511, ) # Docking error. Put the robot on the dock. Clear obstacles around the dock, clean charging contacts, and put # the robot on the dock. F_512 = ( "docking_error", 512, ) # Docking error. Put the robot on the dock. Clear obstacles around the dock, clean charging contacts, and put # the robot on the dock. F_513 = ( "robot_trapped", 513, ) # Robot trapped. Move the robot away and restart. Clear obstacles around robot or move robot away and restart. F_514 = ( "robot_trapped", 514, ) # Robot trapped. Move the robot away and restart. Clear obstacles around robot or move robot away and restart. F_515 = ("fault_515", 515) F_517 = ("fault_517", 517) F_518 = ( "low_battery", 518, ) # Low battery. Recharge now. Battery low. Put the robot on the dock to charge it to 20% before starting. F_519 = ("fault_519", 519) F_520 = ("fault_520", 520) F_521 = ("fault_521", 521) F_522 = ("mop_not_installed", 522) # Check that the mop is properly installed. Mop not installed. Reinstall it. F_523 = ("fault_523", 523) F_525 = ("fault_525", 525) F_526 = ("fault_526", 526) F_527 = ("fault_527", 527) F_528 = ("fault_528", 528) F_529 = ("fault_529", 529) F_530 = ("fault_530", 530) F_531 = ("fault_531", 531) F_532 = ("fault_532", 532) F_533 = ("long_sleep", 533) # About to shut down after a long time of sleep. Charge the robot. F_534 = ( "low_battery_shutdown", 534, ) # Low battery. Turning off. About to shut down due to low battery. Charge the robot. F_535 = ("fault_535", 535) F_536 = ("fault_536", 536) F_540 = ("fault_540", 540) F_541 = ("fault_541", 541) F_542 = ("fault_542", 542) F_550 = ("fault_550", 550) F_551 = ("fault_551", 551) F_559 = ("fault_559", 559) F_560 = ("side_brush_entangled", 560) # Side brush entangled. Remove and clean it. F_561 = ("fault_561", 561) F_562 = ("fault_562", 562) F_563 = ("fault_563", 563) F_564 = ("fault_564", 564) F_565 = ("fault_565", 565) F_566 = ("fault_566", 566) F_567 = ("fault_567", 567) F_568 = ("main_wheels_entangled", 568) # Clean main wheels, move the robot away and restart. F_569 = ("main_wheels_entangled", 569) # Clean main wheels, move the robot away and restart. F_570 = ("main_brush_entangled", 570) # Main brush entangled. Remove and clean it and its bearing. F_571 = ("fault_571", 571) F_572 = ("main_brush_entangled", 572) # Main brush entangled. Remove and clean it and its bearing. F_573 = ("fault_573", 573) F_574 = ("fault_574", 574) F_580 = ("fault_580", 580) F_581 = ("fault_581", 581) F_582 = ("fault_582", 582) F_583 = ("fault_583", 583) F_584 = ("fault_584", 584) F_585 = ("fault_585", 585) F_586 = ("fault_586", 586) F_587 = ("fault_587", 587) F_588 = ("fault_588", 588) F_589 = ("fault_589", 589) F_590 = ("fault_590", 590) F_591 = ("fault_591", 591) F_592 = ("fault_592", 592) F_593 = ("fault_593", 593) F_594 = ( "dust_bag_not_installed", 594, ) # Make sure the dust bag is properly installed. Dust bag not installed. Check that it is installed properly. F_601 = ("fault_601", 601) F_602 = ("fault_602", 602) F_603 = ("fault_603", 603) F_604 = ("fault_604", 604) F_605 = ("fault_605", 605) F_611 = ("positioning_failed", 611) # Positioning failed. Move the robot back to the dock and remap. F_612 = ( "map_changed", 612, ) # Map changed. Positioning failed. Try again. New environment detected. Map changed. Positioning failed. # Try again after remapping. F_629 = ("mop_mount_fell_off", 629) # Mop cloth mount fell off. Reinstall it to resume working. F_668 = ( "system_error", 668, ) # Robot error. Reset the system. Fan error. Reset the system. If the problem persists, contact customer service. F_2000 = ("fault_2000", 2000) F_2003 = ("low_battery_schedule_canceled", 2003) # Battery level below 20%. Scheduled task canceled. F_2007 = ( "cannot_reach_target", 2007, ) # Unable to reach the target. Cleaning ended. Ensure the door to the target area is open or unobstructed. F_2012 = ( "cannot_reach_target", 2012, ) # Unable to reach the target. Cleaning ended. Ensure the door to the target area is open or unobstructed. F_2013 = ("fault_2013", 2013) F_2015 = ("fault_2015", 2015) F_2017 = ("fault_2017", 2017) F_2100 = ( "low_battery_resume_later", 2100, ) # Low battery. Resume cleaning after recharging. Low battery. Starting to recharge. Resume cleaning after # charging. F_2101 = ("fault_2101", 2101) F_2102 = ("cleaning_complete", 2102) # Cleaning completed. Returning to the dock. F_2103 = ("fault_2103", 2103) F_2104 = ("fault_2104", 2104) F_2105 = ("fault_2105", 2105) F_2108 = ("fault_2108", 2108) F_2109 = ("fault_2109", 2109) F_2110 = ("fault_2110", 2110) F_2111 = ("fault_2111", 2111) F_2112 = ("fault_2112", 2112) F_2113 = ("fault_2113", 2113) F_2114 = ("fault_2114", 2114) F_2115 = ("fault_2115", 2115) @dataclass class B01Props(RoborockBase): """ Represents the complete properties and status for a Roborock B01 model. This dataclass is generated based on the device's status JSON object. """ status: WorkStatusMapping fault: B01Fault wind: SCWindMapping water: int mode: int quantity: int alarm: int volume: int hypa: int main_brush: int side_brush: int mop_life: int main_sensor: int net_status: NetStatus repeat_state: int tank_state: int sweep_type: int clean_path_preference: int cloth_state: int time_zone: int time_zone_info: str language: int cleaning_time: int real_clean_time: int cleaning_area: int custom_type: int sound: int work_mode: WorkModeMapping station_act: int charge_state: int current_map_id: int map_num: int dust_action: int quiet_is_open: int quiet_begin_time: int quiet_end_time: int clean_finish: int voice_type: int voice_type_version: int order_total: OrderTotal build_map: int privacy: Privacy dust_auto_state: int dust_frequency: int child_lock: int multi_floor: int map_save: int light_mode: int green_laser: int dust_bag_used: int order_save_mode: int manufacturer: str back_to_wash: int charge_station_type: int pv_cut_charge: int pv_charging: PvCharging serial_number: str recommend: Recommend add_sweep_status: int Python-roborock-python-roborock-32df4f3/roborock/broadcast_protocol.py000066400000000000000000000077311507503702500264540ustar00rootroot00000000000000from __future__ import annotations import asyncio import hashlib import json import logging from asyncio import BaseTransport, Lock from construct import ( # type: ignore Bytes, Checksum, GreedyBytes, Int16ub, Int32ub, Prefixed, RawCopy, Struct, ) from Crypto.Cipher import AES from roborock import RoborockException from roborock.containers import BroadcastMessage from roborock.protocol import EncryptionAdapter, Utils, _Parser _LOGGER = logging.getLogger(__name__) BROADCAST_TOKEN = b"qWKYcdQWrbm9hPqe" class RoborockProtocol(asyncio.DatagramProtocol): def __init__(self, timeout: int = 5): self.timeout = timeout self.transport: BaseTransport | None = None self.devices_found: list[BroadcastMessage] = [] self._mutex = Lock() def datagram_received(self, data: bytes, _): """Handle incoming broadcast datagrams.""" try: version = data[:3] if version == b"L01": [parsed_msg], _ = L01Parser.parse(data) encrypted_payload = parsed_msg.payload if encrypted_payload is None: raise RoborockException("No encrypted payload found in broadcast message") ciphertext = encrypted_payload[:-16] tag = encrypted_payload[-16:] key = hashlib.sha256(BROADCAST_TOKEN).digest() iv_digest_input = data[:9] digest = hashlib.sha256(iv_digest_input).digest() iv = digest[:12] cipher = AES.new(key, AES.MODE_GCM, nonce=iv) decrypted_payload_bytes = cipher.decrypt_and_verify(ciphertext, tag) json_payload = json.loads(decrypted_payload_bytes) parsed_message = BroadcastMessage(duid=json_payload["duid"], ip=json_payload["ip"], version=version) _LOGGER.debug(f"Received L01 broadcast: {parsed_message}") self.devices_found.append(parsed_message) else: # Fallback to the original protocol parser for other versions [broadcast_message], _ = BroadcastParser.parse(data) if broadcast_message.payload: json_payload = json.loads(broadcast_message.payload) parsed_message = BroadcastMessage(duid=json_payload["duid"], ip=json_payload["ip"], version=version) _LOGGER.debug(f"Received broadcast: {parsed_message}") self.devices_found.append(parsed_message) except Exception as e: _LOGGER.warning(f"Failed to decode message: {data!r}. Error: {e}") async def discover(self) -> list[BroadcastMessage]: async with self._mutex: try: loop = asyncio.get_event_loop() self.transport, _ = await loop.create_datagram_endpoint(lambda: self, local_addr=("0.0.0.0", 58866)) await asyncio.sleep(self.timeout) return self.devices_found finally: self.close() self.devices_found = [] def close(self): self.transport.close() if self.transport else None _BroadcastMessage = Struct( "message" / RawCopy( Struct( "version" / Bytes(3), "seq" / Int32ub, "protocol" / Int16ub, "payload" / EncryptionAdapter(lambda ctx: BROADCAST_TOKEN), ) ), "checksum" / Checksum(Int32ub, Utils.crc, lambda ctx: ctx.message.data), ) _L01BroadcastMessage = Struct( "message" / RawCopy( Struct( "version" / Bytes(3), "field1" / Bytes(4), # Unknown field "field2" / Bytes(2), # Unknown field "payload" / Prefixed(Int16ub, GreedyBytes), # Encrypted payload with length prefix ) ), "checksum" / Checksum(Int32ub, Utils.crc, lambda ctx: ctx.message.data), ) BroadcastParser: _Parser = _Parser(_BroadcastMessage, False) L01Parser: _Parser = _Parser(_L01BroadcastMessage, False) Python-roborock-python-roborock-32df4f3/roborock/callbacks.py000066400000000000000000000101111507503702500244720ustar00rootroot00000000000000"""Module for managing callback utility functions.""" import logging from collections.abc import Callable from typing import Generic, TypeVar _LOGGER = logging.getLogger(__name__) K = TypeVar("K") V = TypeVar("V") def safe_callback(callback: Callable[[V], None], logger: logging.Logger | None = None) -> Callable[[V], None]: """Wrap a callback to catch and log exceptions. This is useful for ensuring that errors in callbacks do not propagate and cause unexpected behavior. Any failures during callback execution will be logged. """ if logger is None: logger = _LOGGER def wrapper(value: V) -> None: try: callback(value) except Exception as ex: # noqa: BLE001 logger.error("Uncaught error in callback '%s': %s", callback.__name__, ex) return wrapper class CallbackMap(Generic[K, V]): """A mapping of callbacks for specific keys. This allows for registering multiple callbacks for different keys and invoking them when a value is received for a specific key. """ def __init__(self, logger: logging.Logger | None = None) -> None: self._callbacks: dict[K, list[Callable[[V], None]]] = {} self._logger = logger or _LOGGER def keys(self) -> list[K]: """Get all keys in the callback map.""" return list(self._callbacks.keys()) def add_callback(self, key: K, callback: Callable[[V], None]) -> Callable[[], None]: """Add a callback for a specific key. Any failures during callback execution will be logged. Returns a callable that can be used to remove the callback. """ self._callbacks.setdefault(key, []).append(callback) def remove_callback() -> None: """Remove the callback for the specific key.""" if cb_list := self._callbacks.get(key): cb_list.remove(callback) if not cb_list: del self._callbacks[key] return remove_callback def get_callbacks(self, key: K) -> list[Callable[[V], None]]: """Get all callbacks for a specific key.""" return self._callbacks.get(key, []) def __call__(self, key: K, value: V) -> None: """Invoke all callbacks for a specific key.""" for callback in self.get_callbacks(key): safe_callback(callback, self._logger)(value) class CallbackList(Generic[V]): """A list of callbacks that can be invoked. This combines a list of callbacks into a single callable. Callers can add additional callbacks to the list at any time. """ def __init__(self, logger: logging.Logger | None = None) -> None: self._callbacks: list[Callable[[V], None]] = [] self._logger = logger or _LOGGER def add_callback(self, callback: Callable[[V], None]) -> Callable[[], None]: """Add a callback to the list. Any failures during callback execution will be logged. Returns a callable that can be used to remove the callback. """ self._callbacks.append(callback) return lambda: self._callbacks.remove(callback) def __call__(self, value: V) -> None: """Invoke all callbacks in the list.""" for callback in self._callbacks: safe_callback(callback, self._logger)(value) def decoder_callback( decoder: Callable[[K], list[V]], callback: Callable[[V], None], logger: logging.Logger | None = None ) -> Callable[[K], None]: """Create a callback that decodes messages using a decoder and invokes a callback. The decoder converts a value into a list of values. The callback is then invoked for each value in the list. Any failures during decoding or invoking the callbacks will be logged. """ if logger is None: logger = _LOGGER safe_cb = safe_callback(callback, logger) def wrapper(data: K) -> None: if not (messages := decoder(data)): logger.warning("Failed to decode message: %s", data) return for message in messages: _LOGGER.debug("Decoded message: %s", message) safe_cb(message) return wrapper Python-roborock-python-roborock-32df4f3/roborock/clean_modes.py000066400000000000000000000143471507503702500250430ustar00rootroot00000000000000from __future__ import annotations from .code_mappings import RoborockModeEnum from .device_features import DeviceFeatures class VacuumModes(RoborockModeEnum): GENTLE = ("gentle", 105) OFF = ("off", 105) QUIET = ("quiet", 101) BALANCED = ("balanced", 102) TURBO = ("turbo", 103) MAX = ("max", 104) MAX_PLUS = ("max_plus", 108) CUSTOMIZED = ("custom", 106) SMART_MODE = ("smart_mode", 110) class CleanRoutes(RoborockModeEnum): STANDARD = ("standard", 300) DEEP = ("deep", 301) DEEP_PLUS = ("deep_plus", 303) FAST = ("fast", 304) DEEP_PLUS_CN = ("deep_plus", 305) SMART_MODE = ("smart_mode", 306) CUSTOMIZED = ("custom", 302) class VacuumModesOld(RoborockModeEnum): QUIET = ("quiet", 38) BALANCED = ("balanced", 60) TURBO = ("turbo", 75) MAX = ("max", 100) class WaterModes(RoborockModeEnum): OFF = ("off", 200) LOW = ("low", 201) MILD = ("mild", 201) MEDIUM = ("medium", 202) STANDARD = ("standard", 202) HIGH = ("high", 203) INTENSE = ("intense", 203) CUSTOMIZED = ("custom", 204) CUSTOM = ("custom_water_flow", 207) EXTREME = ("extreme", 208) SMART_MODE = ("smart_mode", 209) PURE_WATER_FLOW_START = ("slight", 221) PURE_WATER_FLOW_SMALL = ("low", 225) PURE_WATER_FLOW_MIDDLE = ("medium", 235) PURE_WATER_FLOW_LARGE = ("moderate", 245) PURE_WATER_SUPER_BEGIN = ("high", 248) PURE_WATER_FLOW_END = ("extreme", 250) class WashTowelModes(RoborockModeEnum): SMART = ("smart", 10) LIGHT = ("light", 0) BALANCED = ("balanced", 1) DEEP = ("deep", 2) SUPER_DEEP = ("super_deep", 8) def get_wash_towel_modes(features: DeviceFeatures) -> list[WashTowelModes]: """Get the valid wash towel modes for the device""" modes = [WashTowelModes.LIGHT, WashTowelModes.BALANCED, WashTowelModes.DEEP] if features.is_super_deep_wash_supported and not features.is_dirty_replenish_clean_supported: modes.append(WashTowelModes.SUPER_DEEP) elif features.is_dirty_replenish_clean_supported: modes.append(WashTowelModes.SMART) return modes def get_clean_modes(features: DeviceFeatures) -> list[VacuumModes]: """Get the valid clean modes for the device - also known as 'fan power' or 'suction mode'""" modes = [VacuumModes.QUIET, VacuumModes.BALANCED, VacuumModes.TURBO, VacuumModes.MAX] if features.is_max_plus_mode_supported or features.is_none_pure_clean_mop_with_max_plus: # If the vacuum has max plus mode supported modes.append(VacuumModes.MAX_PLUS) if features.is_pure_clean_mop_supported: # If the vacuum is capable of 'pure mop clean' aka no vacuum modes.append(VacuumModes.OFF) else: # If not, we can add gentle modes.append(VacuumModes.GENTLE) if features.is_smart_clean_mode_set_supported: modes.append(VacuumModes.SMART_MODE) if features.is_customized_clean_supported: modes.append(VacuumModes.CUSTOMIZED) return modes def get_clean_routes(features: DeviceFeatures, region: str) -> list[CleanRoutes]: """The routes that the vacuum will take while mopping""" if features.is_none_pure_clean_mop_with_max_plus: return [CleanRoutes.FAST, CleanRoutes.STANDARD] supported = [CleanRoutes.STANDARD, CleanRoutes.DEEP] if features.is_careful_slow_mop_supported: if not ( features.is_corner_clean_mode_supported and features.is_clean_route_deep_slow_plus_supported and region == "cn" ): # for some reason there is a china specific deep plus mode supported.append(CleanRoutes.DEEP_PLUS_CN) else: supported.append(CleanRoutes.DEEP_PLUS) if features.is_clean_route_fast_mode_supported: supported.append(CleanRoutes.FAST) if features.is_smart_clean_mode_set_supported: supported.append(CleanRoutes.SMART_MODE) if features.is_customized_clean_supported: supported.append(CleanRoutes.CUSTOMIZED) return supported def get_water_modes(features: DeviceFeatures) -> list[WaterModes]: """Get the valid water modes for the device - also known as 'water flow' or 'water level'""" # If the device supports water slide mode, it uses a completely different set of modes. Technically, it can even # support values in between. But for now we will just support the main values. if features.is_water_slide_mode_supported: return [ WaterModes.PURE_WATER_FLOW_START, WaterModes.PURE_WATER_FLOW_SMALL, WaterModes.PURE_WATER_FLOW_MIDDLE, WaterModes.PURE_WATER_FLOW_LARGE, WaterModes.PURE_WATER_SUPER_BEGIN, WaterModes.PURE_WATER_FLOW_END, ] supported_modes = [WaterModes.OFF] if features.is_mop_shake_module_supported: # For mops that have the vibrating mop pad, they do mild standard intense supported_modes.extend([WaterModes.MILD, WaterModes.STANDARD, WaterModes.INTENSE]) else: supported_modes.extend([WaterModes.LOW, WaterModes.MEDIUM, WaterModes.HIGH]) if features.is_custom_water_box_distance_supported: # This is for devices that allow you to set a custom water flow from 0-100 supported_modes.append(WaterModes.CUSTOM) if features.is_mop_shake_module_supported and features.is_mop_shake_water_max_supported: supported_modes.append(WaterModes.EXTREME) if features.is_smart_clean_mode_set_supported: supported_modes.append(WaterModes.SMART_MODE) if features.is_customized_clean_supported: supported_modes.append(WaterModes.CUSTOMIZED) return supported_modes def is_mode_customized(clean_mode: VacuumModes, water_mode: WaterModes, mop_mode: CleanRoutes) -> bool: """Check if any of the cleaning modes are set to a custom value.""" return ( clean_mode == VacuumModes.CUSTOMIZED or water_mode == WaterModes.CUSTOMIZED or mop_mode == CleanRoutes.CUSTOMIZED ) def is_smart_mode_set(water_mode: WaterModes, clean_mode: VacuumModes, mop_mode: CleanRoutes) -> bool: """Check if the smart mode is set for the given water mode and clean mode""" return ( water_mode == WaterModes.SMART_MODE or clean_mode == VacuumModes.SMART_MODE or mop_mode == CleanRoutes.SMART_MODE ) Python-roborock-python-roborock-32df4f3/roborock/cli.py000066400000000000000000001036561507503702500233430ustar00rootroot00000000000000"""Command line interface for python-roborock. The CLI supports both one-off commands and an interactive session mode. In session mode, an asyncio event loop is created in a separate thread, allowing users to interactively run commands that require async operations. Typical CLI usage: ``` $ roborock login --email [--password ] $ roborock discover $ roborock list-devices $ roborock status --device_id ``` ... Session mode usage: ``` $ roborock session roborock> list-devices ... roborock> status --device_id ``` """ import asyncio import datetime import functools import json import logging import sys import threading from collections.abc import Callable from dataclasses import asdict, dataclass from pathlib import Path from typing import Any, cast import click import click_shell import yaml from pyshark import FileCapture # type: ignore from pyshark.capture.live_capture import LiveCapture, UnknownInterfaceException # type: ignore from pyshark.packet.packet import Packet # type: ignore from roborock import SHORT_MODEL_TO_ENUM, RoborockCommand from roborock.containers import CombinedMapInfo, DeviceData, HomeData, NetworkInfo, RoborockBase, UserData from roborock.device_features import DeviceFeatures from roborock.devices.cache import Cache, CacheData from roborock.devices.device import RoborockDevice from roborock.devices.device_manager import DeviceManager, create_device_manager, create_home_data_api from roborock.devices.traits import Trait from roborock.devices.traits.v1 import V1TraitMixin from roborock.devices.traits.v1.consumeable import ConsumableAttribute from roborock.devices.traits.v1.map_content import MapContentTrait from roborock.exceptions import RoborockException, RoborockUnsupportedFeature from roborock.protocol import MessageParser from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1 from roborock.web_api import RoborockApiClient _LOGGER = logging.getLogger(__name__) if sys.platform == "win32": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) def dump_json(obj: Any) -> Any: """Dump an object as JSON.""" def custom_json_serializer(obj): if isinstance(obj, datetime.time): return obj.isoformat() raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable") return json.dumps(obj, default=custom_json_serializer) def async_command(func): """Decorator for async commands that work in both CLI and session modes. The CLI supports two execution modes: 1. CLI mode: One-off commands that create their own event loop 2. Session mode: Interactive shell with a persistent background event loop This decorator ensures async commands work correctly in both modes: - CLI mode: Uses asyncio.run() to create a new event loop - Session mode: Uses the existing session event loop via run_in_session() """ @functools.wraps(func) def wrapper(*args, **kwargs): ctx = args[0] context: RoborockContext = ctx.obj async def run(): return await func(*args, **kwargs) if context.is_session_mode(): # Session mode - run in the persistent loop return context.run_in_session(run()) else: # CLI mode - just run normally (asyncio.run handles loop creation) return asyncio.run(run()) return wrapper @dataclass class ConnectionCache(RoborockBase): """Cache for Roborock data. This is used to store data retrieved from the Roborock API, such as user data and home data to avoid repeated API calls. This cache is superset of `LoginData` since we used to directly store that dataclass, but now we also store additional data. """ user_data: UserData email: str home_data: HomeData | None = None network_info: dict[str, NetworkInfo] | None = None home_cache: dict[int, CombinedMapInfo] | None = None class DeviceConnectionManager: """Manages device connections for both CLI and session modes.""" def __init__(self, context: "RoborockContext", loop: asyncio.AbstractEventLoop | None = None): self.context = context self.loop = loop self.device_manager: DeviceManager | None = None self._devices: dict[str, RoborockDevice] = {} async def ensure_device_manager(self) -> DeviceManager: """Ensure device manager is initialized.""" if self.device_manager is None: cache_data = self.context.cache_data() home_data_api = create_home_data_api(cache_data.email, cache_data.user_data) self.device_manager = await create_device_manager(cache_data.user_data, home_data_api, self.context) # Cache devices for quick lookup devices = await self.device_manager.get_devices() self._devices = {device.duid: device for device in devices} return self.device_manager async def get_device(self, device_id: str) -> RoborockDevice: """Get a device by ID, creating connections if needed.""" await self.ensure_device_manager() if device_id not in self._devices: raise RoborockException(f"Device {device_id} not found") return self._devices[device_id] async def close(self): """Close device manager connections.""" if self.device_manager: await self.device_manager.close() self.device_manager = None self._devices = {} class RoborockContext(Cache): """Context that handles both CLI and session modes internally.""" roborock_file = Path("~/.roborock").expanduser() _cache_data: ConnectionCache | None = None def __init__(self): self.reload() self._session_loop: asyncio.AbstractEventLoop | None = None self._session_thread: threading.Thread | None = None self._device_manager: DeviceConnectionManager | None = None def reload(self): if self.roborock_file.is_file(): with open(self.roborock_file) as f: data = json.load(f) if data: self._cache_data = ConnectionCache.from_dict(data) def update(self, cache_data: ConnectionCache): data = json.dumps(cache_data.as_dict(), default=vars, indent=4) with open(self.roborock_file, "w") as f: f.write(data) self.reload() def validate(self): if self._cache_data is None: raise RoborockException("You must login first") def cache_data(self) -> ConnectionCache: """Get the cache data.""" self.validate() return cast(ConnectionCache, self._cache_data) def start_session_mode(self): """Start session mode with a background event loop.""" if self._session_loop is not None: return # Already started self._session_loop = asyncio.new_event_loop() self._session_thread = threading.Thread(target=self._run_session_loop) self._session_thread.daemon = True self._session_thread.start() def _run_session_loop(self): """Run the session event loop in a background thread.""" assert self._session_loop is not None # guaranteed by start_session_mode asyncio.set_event_loop(self._session_loop) self._session_loop.run_forever() def is_session_mode(self) -> bool: return self._session_loop is not None def run_in_session(self, coro): """Run a coroutine in the session loop (session mode only).""" if not self._session_loop: raise RoborockException("Not in session mode") future = asyncio.run_coroutine_threadsafe(coro, self._session_loop) return future.result() async def get_device_manager(self) -> DeviceConnectionManager: """Get device manager, creating if needed.""" await self.get_devices() if self._device_manager is None: self._device_manager = DeviceConnectionManager(self, self._session_loop) return self._device_manager async def refresh_devices(self) -> ConnectionCache: """Refresh device data from server (always fetches fresh data).""" cache_data = self.cache_data() client = RoborockApiClient(cache_data.email) home_data = await client.get_home_data_v3(cache_data.user_data) cache_data.home_data = home_data self.update(cache_data) return cache_data async def get_devices(self) -> ConnectionCache: """Get device data (uses cache if available, fetches if needed).""" cache_data = self.cache_data() if not cache_data.home_data: cache_data = await self.refresh_devices() return cache_data async def cleanup(self): """Clean up resources (mainly for session mode).""" if self._device_manager: await self._device_manager.close() self._device_manager = None # Stop session loop if running if self._session_loop: self._session_loop.call_soon_threadsafe(self._session_loop.stop) if self._session_thread: self._session_thread.join(timeout=5.0) self._session_loop = None self._session_thread = None def finish_session(self) -> None: """Finish the session and clean up resources.""" if self._session_loop: future = asyncio.run_coroutine_threadsafe(self.cleanup(), self._session_loop) future.result(timeout=5.0) async def get(self) -> CacheData: """Get cached value.""" _LOGGER.debug("Getting cache data") connection_cache = self.cache_data() return CacheData( home_data=connection_cache.home_data, network_info=connection_cache.network_info or {}, home_cache=connection_cache.home_cache, ) async def set(self, value: CacheData) -> None: """Set value in the cache.""" _LOGGER.debug("Setting cache data") connection_cache = self.cache_data() connection_cache.home_data = value.home_data connection_cache.network_info = value.network_info connection_cache.home_cache = value.home_cache self.update(connection_cache) @click.option("-d", "--debug", default=False, count=True) @click.version_option(package_name="python-roborock") @click.group() @click.pass_context def cli(ctx, debug: int): logging_config: dict[str, Any] = {"level": logging.DEBUG if debug > 0 else logging.INFO} logging.basicConfig(**logging_config) # type: ignore ctx.obj = RoborockContext() @click.command() @click.option("--email", required=True) @click.option( "--password", required=False, help="Password for the Roborock account. If not provided, an email code will be requested.", ) @click.pass_context @async_command async def login(ctx, email, password): """Login to Roborock account.""" context: RoborockContext = ctx.obj try: context.validate() _LOGGER.info("Already logged in") return except RoborockException: pass client = RoborockApiClient(email) if password is not None: user_data = await client.pass_login(password) else: print(f"Requesting code for {email}") await client.request_code() code = click.prompt("A code has been sent to your email, please enter the code", type=str) user_data = await client.code_login(code) print("Login successful") context.update(ConnectionCache(user_data=user_data, email=email)) def _shell_session_finished(ctx): """Callback for when shell session finishes.""" context: RoborockContext = ctx.obj try: context.finish_session() except Exception as e: click.echo(f"Error during cleanup: {e}", err=True) click.echo("Session finished") @click_shell.shell( prompt="roborock> ", on_finished=_shell_session_finished, ) @click.pass_context def session(ctx): """Start an interactive session.""" context: RoborockContext = ctx.obj # Start session mode with background loop context.start_session_mode() context.run_in_session(context.get_device_manager()) click.echo("OK") @session.command() @click.pass_context @async_command async def discover(ctx): """Discover devices.""" context: RoborockContext = ctx.obj # Use the explicit refresh method for the discover command cache_data = await context.refresh_devices() home_data = cache_data.home_data click.echo(f"Discovered devices {', '.join([device.name for device in home_data.get_all_devices()])}") @session.command() @click.pass_context @async_command async def list_devices(ctx): context: RoborockContext = ctx.obj cache_data = await context.get_devices() home_data = cache_data.home_data device_name_id = {device.name: device.duid for device in home_data.get_all_devices()} click.echo(json.dumps(device_name_id, indent=4)) @click.command() @click.option("--device_id", required=True) @click.pass_context @async_command async def list_scenes(ctx, device_id): context: RoborockContext = ctx.obj cache_data = await context.get_devices() client = RoborockApiClient(cache_data.email) scenes = await client.get_scenes(cache_data.user_data, device_id) output_list = [] for scene in scenes: output_list.append(scene.as_dict()) click.echo(json.dumps(output_list, indent=4)) @click.command() @click.option("--scene_id", required=True) @click.pass_context @async_command async def execute_scene(ctx, scene_id): context: RoborockContext = ctx.obj cache_data = await context.get_devices() client = RoborockApiClient(cache_data.email) await client.execute_scene(cache_data.user_data, scene_id) async def _v1_trait(context: RoborockContext, device_id: str, display_func: Callable[[], V1TraitMixin]) -> Trait: device_manager = await context.get_device_manager() device = await device_manager.get_device(device_id) if device.v1_properties is None: raise RoborockException(f"Device {device.name} does not support V1 protocol") await device.v1_properties.discover_features() trait = display_func(device.v1_properties) await trait.refresh() return trait async def _display_v1_trait(context: RoborockContext, device_id: str, display_func: Callable[[], Trait]) -> None: try: trait = await _v1_trait(context, device_id, display_func) except RoborockUnsupportedFeature: click.echo("Feature not supported by device") return except RoborockException as e: click.echo(f"Error: {e}") return click.echo(dump_json(trait.as_dict())) @session.command() @click.option("--device_id", required=True) @click.pass_context @async_command async def status(ctx, device_id: str): """Get device status.""" context: RoborockContext = ctx.obj await _display_v1_trait(context, device_id, lambda v1: v1.status) @session.command() @click.option("--device_id", required=True) @click.pass_context @async_command async def clean_summary(ctx, device_id: str): """Get device clean summary.""" context: RoborockContext = ctx.obj await _display_v1_trait(context, device_id, lambda v1: v1.clean_summary) @session.command() @click.option("--device_id", required=True) @click.pass_context @async_command async def volume(ctx, device_id: str): """Get device volume.""" context: RoborockContext = ctx.obj await _display_v1_trait(context, device_id, lambda v1: v1.sound_volume) @session.command() @click.option("--device_id", required=True) @click.option("--volume", required=True, type=int) @click.pass_context @async_command async def set_volume(ctx, device_id: str, volume: int): """Set the devicevolume.""" context: RoborockContext = ctx.obj volume_trait = await _v1_trait(context, device_id, lambda v1: v1.sound_volume) await volume_trait.set_volume(volume) click.echo(f"Set Device {device_id} volume to {volume}") @session.command() @click.option("--device_id", required=True) @click.pass_context @async_command async def maps(ctx, device_id: str): """Get device maps info.""" context: RoborockContext = ctx.obj await _display_v1_trait(context, device_id, lambda v1: v1.maps) @session.command() @click.option("--device_id", required=True) @click.option("--output-file", required=True, help="Path to save the map image.") @click.pass_context @async_command async def map_image(ctx, device_id: str, output_file: str): """Get device map image and save it to a file.""" context: RoborockContext = ctx.obj trait: MapContentTrait = await _v1_trait(context, device_id, lambda v1: v1.map_content) if trait.image_content: with open(output_file, "wb") as f: f.write(trait.image_content) click.echo(f"Map image saved to {output_file}") else: click.echo("No map image content available.") @session.command() @click.option("--device_id", required=True) @click.option("--include_path", is_flag=True, default=False, help="Include path data in the output.") @click.pass_context @async_command async def map_data(ctx, device_id: str, include_path: bool): """Get parsed map data as JSON.""" context: RoborockContext = ctx.obj trait: MapContentTrait = await _v1_trait(context, device_id, lambda v1: v1.map_content) if not trait.map_data: click.echo("No parsed map data available.") return # Pick some parts of the map data to display. data_summary = { "charger": trait.map_data.charger.as_dict() if trait.map_data.charger else None, "image_size": trait.map_data.image.data.size if trait.map_data.image else None, "vacuum_position": trait.map_data.vacuum_position.as_dict() if trait.map_data.vacuum_position else None, "calibration": trait.map_data.calibration(), "zones": [z.as_dict() for z in trait.map_data.zones or ()], } if include_path and trait.map_data.path: data_summary["path"] = trait.map_data.path.as_dict() click.echo(dump_json(data_summary)) @session.command() @click.option("--device_id", required=True) @click.pass_context @async_command async def consumables(ctx, device_id: str): """Get device consumables.""" context: RoborockContext = ctx.obj await _display_v1_trait(context, device_id, lambda v1: v1.consumables) @session.command() @click.option("--device_id", required=True) @click.option("--consumable", required=True, type=click.Choice([e.value for e in ConsumableAttribute])) @click.pass_context @async_command async def reset_consumable(ctx, device_id: str, consumable: str): """Reset a specific consumable attribute.""" context: RoborockContext = ctx.obj trait = await _v1_trait(context, device_id, lambda v1: v1.consumables) attribute = ConsumableAttribute.from_str(consumable) await trait.reset_consumable(attribute) click.echo(f"Reset {consumable} for device {device_id}") @session.command() @click.option("--device_id", required=True) @click.option("--enabled", type=bool, help="Enable (True) or disable (False) the child lock.") @click.pass_context @async_command async def child_lock(ctx, device_id: str, enabled: bool | None): """Get device child lock status.""" context: RoborockContext = ctx.obj try: trait = await _v1_trait(context, device_id, lambda v1: v1.child_lock) except RoborockUnsupportedFeature: click.echo("Feature not supported by device") return if enabled is not None: if enabled: await trait.enable() else: await trait.disable() click.echo(f"Set child lock to {enabled} for device {device_id}") await trait.refresh() click.echo(dump_json(trait.as_dict())) @session.command() @click.option("--device_id", required=True) @click.option("--enabled", type=bool, help="Enable (True) or disable (False) the DND status.") @click.pass_context @async_command async def dnd(ctx, device_id: str, enabled: bool | None): """Get Do Not Disturb Timer status.""" context: RoborockContext = ctx.obj try: trait = await _v1_trait(context, device_id, lambda v1: v1.dnd) except RoborockUnsupportedFeature: click.echo("Feature not supported by device") return if enabled is not None: if enabled: await trait.enable() else: await trait.disable() click.echo(f"Set DND to {enabled} for device {device_id}") await trait.refresh() click.echo(dump_json(trait.as_dict())) @session.command() @click.option("--device_id", required=True) @click.option("--enabled", required=False, type=bool, help="Enable (True) or disable (False) the Flow LED.") @click.pass_context @async_command async def flow_led_status(ctx, device_id: str, enabled: bool | None): """Get device Flow LED status.""" context: RoborockContext = ctx.obj try: trait = await _v1_trait(context, device_id, lambda v1: v1.flow_led_status) except RoborockUnsupportedFeature: click.echo("Feature not supported by device") return if enabled is not None: if enabled: await trait.enable() else: await trait.disable() click.echo(f"Set Flow LED to {enabled} for device {device_id}") await trait.refresh() click.echo(dump_json(trait.as_dict())) @session.command() @click.option("--device_id", required=True) @click.option("--enabled", required=False, type=bool, help="Enable (True) or disable (False) the LED.") @click.pass_context @async_command async def led_status(ctx, device_id: str, enabled: bool | None): """Get device LED status.""" context: RoborockContext = ctx.obj try: trait = await _v1_trait(context, device_id, lambda v1: v1.led_status) except RoborockUnsupportedFeature: click.echo("Feature not supported by device") return if enabled is not None: if enabled: await trait.enable() else: await trait.disable() click.echo(f"Set LED Status to {enabled} for device {device_id}") await trait.refresh() click.echo(dump_json(trait.as_dict())) @session.command() @click.option("--device_id", required=True) @click.option("--enabled", required=True, type=bool, help="Enable (True) or disable (False) the child lock.") @click.pass_context @async_command async def set_child_lock(ctx, device_id: str, enabled: bool): """Set the child lock status.""" context: RoborockContext = ctx.obj trait = await _v1_trait(context, device_id, lambda v1: v1.child_lock) await trait.set_child_lock(enabled) status = "enabled" if enabled else "disabled" click.echo(f"Child lock {status} for device {device_id}") @session.command() @click.option("--device_id", required=True) @click.pass_context @async_command async def rooms(ctx, device_id: str): """Get device room mapping info.""" context: RoborockContext = ctx.obj await _display_v1_trait(context, device_id, lambda v1: v1.rooms) @session.command() @click.option("--device_id", required=True) @click.pass_context @async_command async def features(ctx, device_id: str): """Get device room mapping info.""" context: RoborockContext = ctx.obj await _display_v1_trait(context, device_id, lambda v1: v1.device_features) @session.command() @click.option("--device_id", required=True) @click.option("--refresh", is_flag=True, default=False, help="Refresh status before discovery.") @click.pass_context @async_command async def home(ctx, device_id: str, refresh: bool): """Discover and cache home layout (maps and rooms).""" context: RoborockContext = ctx.obj device_manager = await context.get_device_manager() device = await device_manager.get_device(device_id) if device.v1_properties is None: raise RoborockException(f"Device {device.name} does not support V1 protocol") # Ensure we have the latest status before discovery await device.v1_properties.status.refresh() home_trait = device.v1_properties.home await home_trait.discover_home() if refresh: await home_trait.refresh() # Display the discovered home cache if home_trait.home_cache: cache_summary = { map_flag: { "name": map_data.name, "room_count": len(map_data.rooms), "rooms": [{"segment_id": room.segment_id, "name": room.name} for room in map_data.rooms], } for map_flag, map_data in home_trait.home_cache.items() } click.echo(dump_json(cache_summary)) else: click.echo("No maps discovered") @click.command() @click.option("--device_id", required=True) @click.option("--cmd", required=True) @click.option("--params", required=False) @click.pass_context @async_command async def command(ctx, cmd, device_id, params): context: RoborockContext = ctx.obj device_manager = await context.get_device_manager() device = await device_manager.get_device(device_id) if device.v1_properties is None: raise RoborockException(f"Device {device.name} does not support V1 protocol") command_trait: Trait = device.v1_properties.command result = await command_trait.send(cmd, json.loads(params) if params is not None else None) if result: click.echo(dump_json(result)) @click.command() @click.option("--local_key", required=True) @click.option("--device_ip", required=True) @click.option("--file", required=False) @click.pass_context @async_command async def parser(_, local_key, device_ip, file): file_provided = file is not None if file_provided: capture = FileCapture(file) else: _LOGGER.info("Listen for interface rvi0 since no file was provided") capture = LiveCapture(interface="rvi0") buffer = {"data": b""} def on_package(packet: Packet): if hasattr(packet, "ip"): if packet.transport_layer == "TCP" and (packet.ip.dst == device_ip or packet.ip.src == device_ip): if hasattr(packet, "DATA"): if hasattr(packet.DATA, "data"): if packet.ip.dst == device_ip: try: f, buffer["data"] = MessageParser.parse( buffer["data"] + bytes.fromhex(packet.DATA.data), local_key, ) print(f"Received request: {f}") except BaseException as e: print(e) pass elif packet.ip.src == device_ip: try: f, buffer["data"] = MessageParser.parse( buffer["data"] + bytes.fromhex(packet.DATA.data), local_key, ) print(f"Received response: {f}") except BaseException as e: print(e) pass try: await capture.packets_from_tshark(on_package, close_tshark=not file_provided) except UnknownInterfaceException: raise RoborockException( "You need to run 'rvictl -s XXXXXXXX-XXXXXXXXXXXXXXXX' first, with an iPhone connected to usb port" ) @click.command() @click.pass_context @async_command async def get_device_info(ctx: click.Context): """ Connects to devices and prints their feature information in YAML format. """ click.echo("Discovering devices...") context: RoborockContext = ctx.obj cache_data = await context.get_devices() home_data = cache_data.home_data all_devices = home_data.get_all_devices() if not all_devices: click.echo("No devices found.") return click.echo(f"Found {len(all_devices)} devices. Fetching data...") all_products_data = {} for device in all_devices: click.echo(f" - Processing {device.name} ({device.duid})") product_info = home_data.product_map[device.product_id] device_data = DeviceData(device, product_info.model) mqtt_client = RoborockMqttClientV1(cache_data.user_data, device_data) try: init_status_result = await mqtt_client.send_command( RoborockCommand.APP_GET_INIT_STATUS, ) product_nickname = SHORT_MODEL_TO_ENUM.get(product_info.model.split(".")[-1]).name current_product_data = { "Protocol Version": device.pv, "Product Nickname": product_nickname, "New Feature Info": init_status_result.get("new_feature_info"), "New Feature Info Str": init_status_result.get("new_feature_info_str"), "Feature Info": init_status_result.get("feature_info"), } all_products_data[product_info.model] = current_product_data except Exception as e: click.echo(f" - Error processing device {device.name}: {e}", err=True) finally: await mqtt_client.async_release() if all_products_data: click.echo("\n--- Device Information (copy to your YAML file) ---\n") # Use yaml.dump to print in a clean, copy-paste friendly format click.echo(yaml.dump(all_products_data, sort_keys=False)) @click.command() @click.option("--data-file", default="../device_info.yaml", help="Path to the YAML file with device feature data.") @click.option("--output-file", default="../SUPPORTED_FEATURES.md", help="Path to the output markdown file.") def update_docs(data_file: str, output_file: str): """ Generates a markdown file by processing raw feature data from a YAML file. """ data_path = Path(data_file) output_path = Path(output_file) if not data_path.exists(): click.echo(f"Error: Data file not found at '{data_path}'", err=True) return click.echo(f"Loading data from {data_path}...") with open(data_path, encoding="utf-8") as f: product_data_from_yaml = yaml.safe_load(f) if not product_data_from_yaml: click.echo("No data found in YAML file. Exiting.", err=True) return product_features_map = {} all_feature_names = set() # Process the raw data from YAML to build the feature map for model, data in product_data_from_yaml.items(): # Reconstruct the DeviceFeatures object from the raw data in the YAML file device_features = DeviceFeatures.from_feature_flags( new_feature_info=data.get("New Feature Info"), new_feature_info_str=data.get("New Feature Info Str"), feature_info=data.get("Feature Info"), product_nickname=data.get("Product Nickname"), ) features_dict = asdict(device_features) # This dictionary will hold the final data for the markdown table row current_product_data = { "Product Nickname": data.get("Product Nickname", ""), "Protocol Version": data.get("Protocol Version", ""), "New Feature Info": data.get("New Feature Info", ""), "New Feature Info Str": data.get("New Feature Info Str", ""), } # Populate features from the calculated DeviceFeatures object for feature, is_supported in features_dict.items(): all_feature_names.add(feature) if is_supported: current_product_data[feature] = "X" supported_codes = data.get("Feature Info", []) if isinstance(supported_codes, list): for code in supported_codes: feature_name = str(code) all_feature_names.add(feature_name) current_product_data[feature_name] = "X" product_features_map[model] = current_product_data # --- Helper function to write the markdown table --- def write_markdown_table(product_features: dict[str, dict[str, any]], all_features: set[str]): """Writes the data into a markdown table (products as columns).""" sorted_products = sorted(product_features.keys()) special_rows = [ "Product Nickname", "Protocol Version", "New Feature Info", "New Feature Info Str", ] # Regular features are the remaining keys, sorted alphabetically # We filter out the special rows to avoid duplicating them. sorted_features = sorted(list(all_features - set(special_rows))) header = ["Feature"] + sorted_products click.echo(f"Writing documentation to {output_path}...") with open(output_path, "w", encoding="utf-8") as f: f.write("| " + " | ".join(header) + " |\n") f.write("|" + "---|" * len(header) + "\n") # Write the special metadata rows first for row_name in special_rows: row_values = [str(product_features[p].get(row_name, "")) for p in sorted_products] f.write("| " + " | ".join([row_name] + row_values) + " |\n") # Write the feature rows for feature in sorted_features: # Use backticks for feature names that are just numbers (from the list) display_feature = f"`{feature}`" feature_row = [display_feature] for product in sorted_products: # Use .get() to place an 'X' or an empty string feature_row.append(product_features[product].get(feature, "")) f.write("| " + " | ".join(feature_row) + " |\n") write_markdown_table(product_features_map, all_feature_names) click.echo("Done.") cli.add_command(login) cli.add_command(discover) cli.add_command(list_devices) cli.add_command(list_scenes) cli.add_command(execute_scene) cli.add_command(status) cli.add_command(command) cli.add_command(parser) cli.add_command(session) cli.add_command(get_device_info) cli.add_command(update_docs) cli.add_command(clean_summary) cli.add_command(volume) cli.add_command(set_volume) cli.add_command(maps) cli.add_command(map_image) cli.add_command(map_data) cli.add_command(consumables) cli.add_command(reset_consumable) cli.add_command(rooms) cli.add_command(home) cli.add_command(features) cli.add_command(child_lock) cli.add_command(dnd) cli.add_command(flow_led_status) cli.add_command(led_status) def main(): return cli() if __name__ == "__main__": main() Python-roborock-python-roborock-32df4f3/roborock/cloud_api.py000066400000000000000000000241331507503702500245230ustar00rootroot00000000000000from __future__ import annotations import asyncio import logging import threading from abc import ABC from asyncio import Lock from typing import Any import paho.mqtt.client as mqtt from paho.mqtt.enums import MQTTErrorCode # Mypy is not seeing this for some reason. It wants me to use the depreciated ReasonCodes from paho.mqtt.reasoncodes import ReasonCode # type: ignore from .api import KEEPALIVE, RoborockClient from .containers import DeviceData, UserData from .exceptions import RoborockException, VacuumError from .protocol import ( Decoder, Encoder, create_mqtt_decoder, create_mqtt_encoder, create_mqtt_params, ) from .roborock_future import RoborockFuture _LOGGER = logging.getLogger(__name__) CONNECT_REQUEST_ID = 0 DISCONNECT_REQUEST_ID = 1 class _Mqtt(mqtt.Client): """Internal MQTT client. This is a subclass of the Paho MQTT client that adds some additional functionality for error cases where things get stuck. """ _thread: threading.Thread def __init__(self) -> None: """Initialize the MQTT client.""" super().__init__(protocol=mqtt.MQTTv5) def maybe_restart_loop(self) -> None: """Ensure that the MQTT loop is running in case it previously exited.""" if not self._thread or not self._thread.is_alive(): if self._thread: _LOGGER.info("Stopping mqtt loop") super().loop_stop() _LOGGER.info("Starting mqtt loop") super().loop_start() class RoborockMqttClient(RoborockClient, ABC): """Roborock MQTT client base class.""" def __init__(self, user_data: UserData, device_info: DeviceData) -> None: """Initialize the Roborock MQTT client.""" rriot = user_data.rriot if rriot is None: raise RoborockException("Got no rriot data from user_data") RoborockClient.__init__(self, device_info) mqtt_params = create_mqtt_params(rriot) self._mqtt_user = rriot.u self._hashed_user = mqtt_params.username self._mqtt_host = mqtt_params.host self._mqtt_port = mqtt_params.port self._mqtt_client = _Mqtt() self._mqtt_client.on_connect = self._mqtt_on_connect self._mqtt_client.on_message = self._mqtt_on_message # Due to the incorrect ReasonCode, it is confused by typing self._mqtt_client.on_disconnect = self._mqtt_on_disconnect # type: ignore if mqtt_params.tls: self._mqtt_client.tls_set() self._mqtt_client.username_pw_set(mqtt_params.username, mqtt_params.password) self._waiting_queue: dict[int, RoborockFuture] = {} self._mutex = Lock() self._decoder: Decoder = create_mqtt_decoder(device_info.device.local_key) self._encoder: Encoder = create_mqtt_encoder(device_info.device.local_key) self.received_message_since_last_disconnect = False self._topic = f"rr/m/o/{self._mqtt_user}/{self._hashed_user}/{self.device_info.device.duid}" def _mqtt_on_connect( self, client: mqtt.Client, userdata: object, flags: dict[str, int], rc: ReasonCode, properties: mqtt.Properties | None = None, ): connection_queue = self._waiting_queue.get(CONNECT_REQUEST_ID) if rc.is_failure: message = f"Failed to connect ({rc})" self._logger.error(message) if connection_queue: connection_queue.set_exception(VacuumError(message)) else: self._logger.debug("Failed to notify connect future, not in queue") return self._logger.info(f"Connected to mqtt {self._mqtt_host}:{self._mqtt_port}") (result, mid) = self._mqtt_client.subscribe(self._topic) if result != 0: message = f"Failed to subscribe ({str(rc)})" self._logger.error(message) if connection_queue: connection_queue.set_exception(VacuumError(message)) return self._logger.info(f"Subscribed to topic {self._topic}") if connection_queue: connection_queue.set_result(True) def _mqtt_on_message(self, *args, **kwargs): self.received_message_since_last_disconnect = True client, __, msg = args try: messages = self._decoder(msg.payload) super().on_message_received(messages) except Exception as ex: self._logger.exception(ex) def _mqtt_on_disconnect( self, client: mqtt.Client, data: object, flags: dict[str, int], rc: ReasonCode | None, properties: mqtt.Properties | None = None, ): try: exc = RoborockException(str(rc)) if rc is not None and rc.is_failure else None super().on_connection_lost(exc) connection_queue = self._waiting_queue.get(DISCONNECT_REQUEST_ID) if connection_queue: connection_queue.set_result(True) except Exception as ex: self._logger.exception(ex) def is_connected(self) -> bool: """Check if the mqtt client is connected.""" return self._mqtt_client.is_connected() def _sync_disconnect(self) -> Any: if not self.is_connected(): return None self._logger.info("Disconnecting from mqtt") disconnected_future = self._async_response(DISCONNECT_REQUEST_ID) rc = self._mqtt_client.disconnect() if rc == mqtt.MQTT_ERR_NO_CONN: disconnected_future.cancel() return None if rc != mqtt.MQTT_ERR_SUCCESS: disconnected_future.cancel() raise RoborockException(f"Failed to disconnect ({str(rc)})") return disconnected_future def _sync_connect(self) -> Any: if self.is_connected(): self._mqtt_client.maybe_restart_loop() return None if self._mqtt_port is None or self._mqtt_host is None: raise RoborockException("Mqtt information was not entered. Cannot connect.") self._logger.debug("Connecting to mqtt") connected_future = self._async_response(CONNECT_REQUEST_ID) self._mqtt_client.connect(host=self._mqtt_host, port=self._mqtt_port, keepalive=KEEPALIVE) self._mqtt_client.maybe_restart_loop() return connected_future async def async_disconnect(self) -> None: async with self._mutex: if disconnected_future := self._sync_disconnect(): # There are no errors set on this future await disconnected_future loop = asyncio.get_running_loop() await loop.run_in_executor(None, self._mqtt_client.loop_stop) async def async_connect(self) -> None: async with self._mutex: if connected_future := self._sync_connect(): try: await connected_future except VacuumError as err: raise RoborockException(err) from err def _send_msg_raw(self, msg: bytes) -> None: info = self._mqtt_client.publish( f"rr/m/i/{self._mqtt_user}/{self._hashed_user}/{self.device_info.device.duid}", msg ) if info.rc != mqtt.MQTT_ERR_SUCCESS: raise RoborockException(f"Failed to publish ({mqtt.error_string(info.rc)})") async def _unsubscribe(self) -> MQTTErrorCode: """Unsubscribe from the topic.""" loop = asyncio.get_running_loop() (result, mid) = await loop.run_in_executor(None, self._mqtt_client.unsubscribe, self._topic) if result != 0: message = f"Failed to unsubscribe ({mqtt.error_string(result)})" self._logger.error(message) else: self._logger.info(f"Unsubscribed from topic {self._topic}") return result async def _subscribe(self) -> MQTTErrorCode: """Subscribe to the topic.""" loop = asyncio.get_running_loop() (result, mid) = await loop.run_in_executor(None, self._mqtt_client.subscribe, self._topic) if result != 0: message = f"Failed to subscribe ({mqtt.error_string(result)})" self._logger.error(message) else: self._logger.info(f"Subscribed to topic {self._topic}") return result async def _reconnect(self) -> None: """Reconnect to the MQTT broker.""" await self.async_disconnect() await self.async_connect() async def _validate_connection(self) -> None: """Override the default validate connection to try to re-subscribe rather than disconnect. When something seems to be wrong with our connection, we should follow the following steps: 1. Try to unsubscribe and resubscribe from the topic. 2. If we don't end up getting a message, we should completely disconnect and reconnect to the MQTT broker. 3. We will continue to try to disconnect and reconnect until we get a message. 4. If we get a message, the next time connection is lost, We will go back to step 1. """ # If we should no longer keep the current connection alive... if not self.should_keepalive(): self._logger.info("Resetting Roborock connection due to keepalive timeout") if not self.received_message_since_last_disconnect: # If we have already tried to unsub and resub, and we are still in this state, # we should try to reconnect. return await self._reconnect() try: # Mark that we have tried to unsubscribe and resubscribe self.received_message_since_last_disconnect = False if await self._unsubscribe() != 0: # If we fail to unsubscribe, reconnect to the broker return await self._reconnect() if await self._subscribe() != 0: # If we fail to subscribe, reconnected to the broker. return await self._reconnect() except Exception: # noqa # If we get any errors at all, we should just reconnect. return await self._reconnect() # Call connect to make sure everything is still in a good state. await self.async_connect() Python-roborock-python-roborock-32df4f3/roborock/code_mappings.py000066400000000000000000000611131507503702500253730ustar00rootroot00000000000000from __future__ import annotations import logging from collections import namedtuple from enum import Enum, IntEnum, StrEnum _LOGGER = logging.getLogger(__name__) completed_warnings = set() class RoborockEnum(IntEnum): """Roborock Enum for codes with int values""" @property def name(self) -> str: return super().name.lower() @classmethod def _missing_(cls: type[RoborockEnum], key) -> RoborockEnum: if hasattr(cls, "unknown"): warning = f"Missing {cls.__name__} code: {key} - defaulting to 'unknown'" if warning not in completed_warnings: completed_warnings.add(warning) _LOGGER.warning(warning) return cls.unknown # type: ignore default_value = next(item for item in cls) warning = f"Missing {cls.__name__} code: {key} - defaulting to {default_value}" if warning not in completed_warnings: completed_warnings.add(warning) _LOGGER.warning(warning) return default_value @classmethod def as_dict(cls: type[RoborockEnum]): return {i.name: i.value for i in cls if i.name != "missing"} @classmethod def as_enum_dict(cls: type[RoborockEnum]): return {i.value: i for i in cls if i.name != "missing"} @classmethod def values(cls: type[RoborockEnum]) -> list[int]: return list(cls.as_dict().values()) @classmethod def keys(cls: type[RoborockEnum]) -> list[str]: return list(cls.as_dict().keys()) @classmethod def items(cls: type[RoborockEnum]): return cls.as_dict().items() class RoborockModeEnum(StrEnum): """A custom StrEnum that also stores an integer code for each member.""" code: int def __new__(cls, value: str, code: int) -> RoborockModeEnum: """Creates a new enum member.""" member = str.__new__(cls, value) member._value_ = value member.code = code return member @classmethod def from_code(cls, code: int): for member in cls: if member.code == code: return member raise ValueError(f"{code} is not a valid code for {cls.__name__}") ProductInfo = namedtuple("ProductInfo", ["nickname", "short_models"]) class RoborockProductNickname(Enum): # Coral Series CORAL = ProductInfo(nickname="Coral", short_models=("a20", "a21")) CORALPRO = ProductInfo(nickname="CoralPro", short_models=("a143", "a144")) # Pearl Series PEARL = ProductInfo(nickname="Pearl", short_models=("a74", "a75")) PEARLC = ProductInfo(nickname="PearlC", short_models=("a103", "a104")) PEARLE = ProductInfo(nickname="PearlE", short_models=("a167", "a168")) PEARLELITE = ProductInfo(nickname="PearlELite", short_models=("a169", "a170")) PEARLPLUS = ProductInfo(nickname="PearlPlus", short_models=("a86", "a87")) PEARLPLUSS = ProductInfo(nickname="PearlPlusS", short_models=("a116", "a117", "a136")) PEARLS = ProductInfo(nickname="PearlS", short_models=("a100", "a101")) PEARLSLITE = ProductInfo(nickname="PearlSLite", short_models=("a122", "a123")) # Ruby Series RUBYPLUS = ProductInfo(nickname="RubyPlus", short_models=("t4", "s4")) RUBYSC = ProductInfo(nickname="RubySC", short_models=("p5", "a08")) RUBYSE = ProductInfo(nickname="RubySE", short_models=("a19",)) RUBYSLITE = ProductInfo(nickname="RubySLite", short_models=("p6", "s5e", "a05")) # Tanos Series TANOS = ProductInfo(nickname="Tanos", short_models=("t6", "s6")) TANOSE = ProductInfo(nickname="TanosE", short_models=("t7", "a11")) TANOSS = ProductInfo(nickname="TanosS", short_models=("a14", "a15")) TANOSSC = ProductInfo(nickname="TanosSC", short_models=("a39", "a40")) TANOSSE = ProductInfo(nickname="TanosSE", short_models=("a33", "a34")) TANOSSMAX = ProductInfo(nickname="TanosSMax", short_models=("a52",)) TANOSSLITE = ProductInfo(nickname="TanosSLite", short_models=("a37", "a38")) TANOSSPLUS = ProductInfo(nickname="TanosSPlus", short_models=("a23", "a24")) TANOSV = ProductInfo(nickname="TanosV", short_models=("t7p", "a09", "a10")) # Topaz Series TOPAZS = ProductInfo(nickname="TopazS", short_models=("a29", "a30", "a76")) TOPAZSC = ProductInfo(nickname="TopazSC", short_models=("a64", "a65")) TOPAZSPLUS = ProductInfo(nickname="TopazSPlus", short_models=("a46", "a47", "a66")) TOPAZSPOWER = ProductInfo(nickname="TopazSPower", short_models=("a62",)) TOPAZSV = ProductInfo(nickname="TopazSV", short_models=("a26", "a27")) # Ultron Series ULTRON = ProductInfo(nickname="Ultron", short_models=("a50", "a51")) ULTRONE = ProductInfo(nickname="UltronE", short_models=("a72", "a84")) ULTRONLITE = ProductInfo(nickname="UltronLite", short_models=("a73", "a85")) ULTRONSC = ProductInfo(nickname="UltronSC", short_models=("a94", "a95")) ULTRONSE = ProductInfo(nickname="UltronSE", short_models=("a124", "a125", "a139", "a140")) ULTRONSPLUS = ProductInfo(nickname="UltronSPlus", short_models=("a68", "a69", "a70")) ULTRONSV = ProductInfo(nickname="UltronSV", short_models=("a96", "a97")) # Verdelite Series VERDELITE = ProductInfo(nickname="Verdelite", short_models=("a146", "a147")) # Vivian Series VIVIAN = ProductInfo(nickname="Vivian", short_models=("a134", "a135", "a155", "a156")) VIVIANC = ProductInfo(nickname="VivianC", short_models=("a158", "a159")) SHORT_MODEL_TO_ENUM = {model: product for product in RoborockProductNickname for model in product.value.short_models} class RoborockStateCode(RoborockEnum): unknown = 0 starting = 1 charger_disconnected = 2 idle = 3 remote_control_active = 4 cleaning = 5 returning_home = 6 manual_mode = 7 charging = 8 charging_problem = 9 paused = 10 spot_cleaning = 11 error = 12 shutting_down = 13 updating = 14 docking = 15 going_to_target = 16 zoned_cleaning = 17 segment_cleaning = 18 emptying_the_bin = 22 # on s7+ washing_the_mop = 23 # on a46 washing_the_mop_2 = 25 going_to_wash_the_mop = 26 # on a46 in_call = 28 mapping = 29 egg_attack = 30 patrol = 32 attaching_the_mop = 33 # on g20s ultra detaching_the_mop = 34 # on g20s ultra charging_complete = 100 device_offline = 101 locked = 103 air_drying_stopping = 202 robot_status_mopping = 6301 clean_mop_cleaning = 6302 clean_mop_mopping = 6303 segment_mopping = 6304 segment_clean_mop_cleaning = 6305 segment_clean_mop_mopping = 6306 zoned_mopping = 6307 zoned_clean_mop_cleaning = 6308 zoned_clean_mop_mopping = 6309 back_to_dock_washing_duster = 6310 class RoborockDyadStateCode(RoborockEnum): unknown = -999 fetching = -998 # Obtaining Status fetch_failed = -997 # Failed to obtain device status. Try again later. updating = -996 washing = 1 ready = 2 charging = 3 mop_washing = 4 self_clean_cleaning = 5 self_clean_deep_cleaning = 6 self_clean_rinsing = 7 self_clean_dehydrating = 8 drying = 9 ventilating = 10 # drying reserving = 12 mop_washing_paused = 13 dusting_mode = 14 class RoborockErrorCode(RoborockEnum): none = 0 lidar_blocked = 1 bumper_stuck = 2 wheels_suspended = 3 cliff_sensor_error = 4 main_brush_jammed = 5 side_brush_jammed = 6 wheels_jammed = 7 robot_trapped = 8 no_dustbin = 9 strainer_error = 10 # Filter is wet or blocked compass_error = 11 # Strong magnetic field detected low_battery = 12 charging_error = 13 battery_error = 14 wall_sensor_dirty = 15 robot_tilted = 16 side_brush_error = 17 fan_error = 18 dock = 19 # Dock not connected to power optical_flow_sensor_dirt = 20 vertical_bumper_pressed = 21 dock_locator_error = 22 return_to_dock_fail = 23 nogo_zone_detected = 24 visual_sensor = 25 # Camera error light_touch = 26 # Wall sensor error vibrarise_jammed = 27 robot_on_carpet = 28 filter_blocked = 29 invisible_wall_detected = 30 cannot_cross_carpet = 31 internal_error = 32 collect_dust_error_3 = 34 # Clean auto-empty dock collect_dust_error_4 = 35 # Auto empty dock voltage error mopping_roller_1 = 36 # Wash roller may be jammed mopping_roller_error_2 = 37 # wash roller not lowered properly clear_water_box_hoare = 38 # Check the clean water tank dirty_water_box_hoare = 39 # Check the dirty water tank sink_strainer_hoare = 40 # Reinstall the water filter clear_water_box_exception = 41 # Clean water tank empty clear_brush_exception = 42 # Check that the water filter has been correctly installed clear_brush_exception_2 = 43 # Positioning button error filter_screen_exception = 44 # Clean the dock water filter mopping_roller_2 = 45 # Wash roller may be jammed up_water_exception = 48 drain_water_exception = 49 temperature_protection = 51 # Unit temperature protection clean_carousel_exception = 52 clean_carousel_water_full = 53 water_carriage_drop = 54 check_clean_carouse = 55 audio_error = 56 class RoborockFanPowerCode(RoborockEnum): """Describes the fan power of the vacuum cleaner.""" # Fan speeds should have the first letter capitalized - as there is no way to change the name in translations as # far as I am aware class RoborockFanSpeedV1(RoborockFanPowerCode): silent = 38 standard = 60 medium = 77 turbo = 90 class RoborockFanSpeedV2(RoborockFanPowerCode): silent = 101 balanced = 102 turbo = 103 max = 104 gentle = 105 auto = 106 class RoborockFanSpeedV3(RoborockFanPowerCode): silent = 38 standard = 60 medium = 75 turbo = 100 class RoborockFanSpeedE2(RoborockFanPowerCode): gentle = 41 silent = 50 standard = 68 medium = 79 turbo = 100 class RoborockFanSpeedS7(RoborockFanPowerCode): off = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 class RoborockFanSpeedS7MaxV(RoborockFanPowerCode): off = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 max_plus = 108 class RoborockFanSpeedS6Pure(RoborockFanPowerCode): gentle = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 class RoborockFanSpeedQ7Max(RoborockFanPowerCode): quiet = 101 balanced = 102 turbo = 103 max = 104 class RoborockFanSpeedQRevoMaster(RoborockFanPowerCode): off = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 max_plus = 108 smart_mode = 110 class RoborockFanSpeedQRevoCurv(RoborockFanPowerCode): quiet = 101 balanced = 102 turbo = 103 max = 104 off = 105 custom = 106 max_plus = 108 smart_mode = 110 class RoborockFanSpeedQRevoMaxV(RoborockFanPowerCode): off = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 max_plus = 108 smart_mode = 110 class RoborockFanSpeedP10(RoborockFanPowerCode): off = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 max_plus = 108 smart_mode = 110 class RoborockFanSpeedS8MaxVUltra(RoborockFanPowerCode): off = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 max_plus = 108 smart_mode = 110 class RoborockFanSpeedSaros10(RoborockFanPowerCode): off = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 max_plus = 108 smart_mode = 110 class RoborockFanSpeedSaros10R(RoborockFanPowerCode): off = 105 quiet = 101 balanced = 102 turbo = 103 max = 104 custom = 106 max_plus = 108 smart_mode = 110 class RoborockMopModeCode(RoborockEnum): """Describes the mop mode of the vacuum cleaner.""" class RoborockMopModeQRevoCurv(RoborockMopModeCode): standard = 300 deep = 301 custom = 302 deep_plus = 303 fast = 304 smart_mode = 306 class RoborockMopModeS7(RoborockMopModeCode): """Describes the mop mode of the vacuum cleaner.""" standard = 300 deep = 301 custom = 302 deep_plus = 303 class RoborockMopModeS8ProUltra(RoborockMopModeCode): standard = 300 deep = 301 deep_plus = 303 fast = 304 custom = 302 smart_mode = 306 class RoborockMopModeS8MaxVUltra(RoborockMopModeCode): standard = 300 deep = 301 custom = 302 deep_plus = 303 fast = 304 deep_plus_pearl = 305 smart_mode = 306 class RoborockMopModeSaros10R(RoborockMopModeCode): standard = 300 deep = 301 custom = 302 deep_plus = 303 fast = 304 smart_mode = 306 class RoborockMopModeQRevoMaster(RoborockMopModeCode): standard = 300 deep = 301 custom = 302 deep_plus = 303 fast = 304 smart_mode = 306 class RoborockMopModeQRevoMaxV(RoborockMopModeCode): standard = 300 deep = 301 custom = 302 deep_plus = 303 fast = 304 smart_mode = 306 class RoborockMopModeSaros10(RoborockMopModeCode): standard = 300 deep = 301 custom = 302 deep_plus = 303 fast = 304 smart_mode = 306 class RoborockMopIntensityCode(RoborockEnum): """Describes the mop intensity of the vacuum cleaner.""" class RoborockMopIntensityS7(RoborockMopIntensityCode): """Describes the mop intensity of the vacuum cleaner.""" off = 200 mild = 201 moderate = 202 intense = 203 custom = 204 class RoborockMopIntensityV2(RoborockMopIntensityCode): """Describes the mop intensity of the vacuum cleaner.""" off = 200 low = 201 medium = 202 high = 203 custom = 207 class RoborockMopIntensityQRevoMaster(RoborockMopIntensityCode): """Describes the mop intensity of the vacuum cleaner.""" off = 200 low = 201 medium = 202 high = 203 custom = 204 custom_water_flow = 207 smart_mode = 209 class RoborockMopIntensityQRevoCurv(RoborockMopIntensityCode): off = 200 low = 201 medium = 202 high = 203 custom = 204 custom_water_flow = 207 smart_mode = 209 class RoborockMopIntensityQRevoMaxV(RoborockMopIntensityCode): off = 200 low = 201 medium = 202 high = 203 custom = 204 custom_water_flow = 207 smart_mode = 209 class RoborockMopIntensityP10(RoborockMopIntensityCode): """Describes the mop intensity of the vacuum cleaner.""" off = 200 low = 201 medium = 202 high = 203 custom = 204 custom_water_flow = 207 smart_mode = 209 class RoborockMopIntensityS8MaxVUltra(RoborockMopIntensityCode): off = 200 low = 201 medium = 202 high = 203 custom = 204 max = 208 smart_mode = 209 custom_water_flow = 207 class RoborockMopIntensitySaros10(RoborockMopIntensityCode): off = 200 mild = 201 standard = 202 intense = 203 extreme = 208 custom = 204 smart_mode = 209 class RoborockMopIntensitySaros10R(RoborockMopIntensityCode): off = 200 low = 201 medium = 202 high = 203 custom = 204 extreme = 250 vac_followed_by_mop = 235 smart_mode = 209 class RoborockMopIntensityS5Max(RoborockMopIntensityCode): """Describes the mop intensity of the vacuum cleaner.""" off = 200 low = 201 medium = 202 high = 203 custom = 204 custom_water_flow = 207 class RoborockMopIntensityS6MaxV(RoborockMopIntensityCode): """Describes the mop intensity of the vacuum cleaner.""" off = 200 low = 201 medium = 202 high = 203 custom = 204 custom_water_flow = 207 class RoborockMopIntensityQ7Max(RoborockMopIntensityCode): """Describes the mop intensity of the vacuum cleaner.""" off = 200 low = 201 medium = 202 high = 203 custom_water_flow = 207 class RoborockDockErrorCode(RoborockEnum): """Describes the error code of the dock.""" ok = 0 duct_blockage = 34 water_empty = 38 waste_water_tank_full = 39 maintenance_brush_jammed = 42 dirty_tank_latch_open = 44 no_dustbin = 46 cleaning_tank_full_or_blocked = 53 class RoborockDockTypeCode(RoborockEnum): unknown = -9999 no_dock = 0 auto_empty_dock = 1 empty_wash_fill_dock = 3 auto_empty_dock_pure = 5 s7_max_ultra_dock = 6 s8_dock = 7 p10_dock = 8 p10_pro_dock = 9 s8_maxv_ultra_dock = 10 qrevo_master_dock = 14 qrevo_s_dock = 15 saros_r10_dock = 16 qrevo_curv_dock = 17 saros_10_dock = 18 class RoborockDockDustCollectionModeCode(RoborockEnum): """Describes the dust collection mode of the vacuum cleaner.""" # TODO: Get the correct values for various different docks unknown = -9999 smart = 0 light = 1 balanced = 2 max = 4 class RoborockDockWashTowelModeCode(RoborockEnum): """Describes the wash towel mode of the vacuum cleaner.""" # TODO: Get the correct values for various different docks unknown = -9999 light = 0 balanced = 1 deep = 2 smart = 10 class RoborockCategory(Enum): """Describes the category of the device.""" WET_DRY_VAC = "roborock.wetdryvac" VACUUM = "robot.vacuum.cleaner" WASHING_MACHINE = "roborock.wm" UNKNOWN = "UNKNOWN" def __missing__(self, key): _LOGGER.warning("Missing key %s from category", key) return RoborockCategory.UNKNOWN class RoborockFinishReason(RoborockEnum): manual_interrupt = 21 # Cleaning interrupted by user cleanup_interrupted = 24 # Cleanup interrupted manual_interrupt_2 = 21 manual_interrupt_12 = 29 breakpoint = 32 # Could not continue cleaning breakpoint_2 = 33 cleanup_interrupted_2 = 34 manual_interrupt_3 = 35 manual_interrupt_4 = 36 manual_interrupt_5 = 37 manual_interrupt_6 = 43 locate_fail = 45 # Positioning Failed cleanup_interrupted_3 = 64 locate_fail_2 = 65 manual_interrupt_7 = 48 manual_interrupt_8 = 49 manual_interrupt_9 = 50 cleanup_interrupted_4 = 51 finished_cleaning = 52 # Finished cleaning finished_cleaning_2 = 54 finished_cleaning_3 = 55 finished_cleaning_4 = 56 finished_clenaing_5 = 57 manual_interrupt_10 = 60 area_unreachable = 61 # Area unreachable area_unreachable_2 = 62 washing_error = 67 # Washing error back_to_wash_failure = 68 # Failed to return to the dock cleanup_interrupted_5 = 101 breakpoint_4 = 102 manual_interrupt_11 = 103 cleanup_interrupted_6 = 104 cleanup_interrupted_7 = 105 cleanup_interrupted_8 = 106 cleanup_interrupted_9 = 107 cleanup_interrupted_10 = 109 cleanup_interrupted_11 = 110 patrol_success = 114 # Cruise completed patrol_fail = 115 # Cruise failed pet_patrol_success = 116 # Pet found pet_patrol_fail = 117 # Pet found failed class RoborockInCleaning(RoborockEnum): complete = 0 global_clean_not_complete = 1 zone_clean_not_complete = 2 segment_clean_not_complete = 3 class RoborockCleanType(RoborockEnum): all_zone = 1 draw_zone = 2 select_zone = 3 quick_build = 4 video_patrol = 5 pet_patrol = 6 class RoborockStartType(RoborockEnum): button = 1 app = 2 schedule = 3 mi_home = 4 quick_start = 5 voice_control = 13 routines = 101 alexa = 801 google = 802 ifttt = 803 yandex = 804 homekit = 805 xiaoai = 806 tmall_genie = 807 duer = 808 dingdong = 809 siri = 810 clova = 811 wechat = 901 alipay = 902 aqara = 903 hisense = 904 huawei = 905 widget_launch = 820 smart_watch = 821 class RoborockDssCodes(RoborockEnum): @classmethod def _missing_(cls: type[RoborockEnum], key) -> RoborockEnum: # If the calculated value is not provided, then it should be viewed as okay. # As the math will sometimes result in you getting numbers that don't matter. return cls.okay # type: ignore class ClearWaterBoxStatus(RoborockDssCodes): """Status of the clear water box.""" okay = 0 out_of_water = 1 out_of_water_2 = 38 refill_error = 48 class DirtyWaterBoxStatus(RoborockDssCodes): """Status of the dirty water box.""" okay = 0 full_not_installed = 1 full_not_installed_2 = 39 drain_error = 49 class DustBagStatus(RoborockDssCodes): """Status of the dust bag.""" okay = 0 not_installed = 1 full = 34 class DyadSelfCleanMode(RoborockEnum): self_clean = 1 self_clean_and_dry = 2 dry = 3 ventilation = 4 class DyadSelfCleanLevel(RoborockEnum): normal = 1 deep = 2 class DyadWarmLevel(RoborockEnum): normal = 1 deep = 2 class DyadMode(RoborockEnum): wash = 1 wash_and_dry = 2 dry = 3 class DyadCleanMode(RoborockEnum): auto = 1 max = 2 dehydration = 3 power_saving = 4 class DyadSuction(RoborockEnum): l1 = 1 l2 = 2 l3 = 3 l4 = 4 l5 = 5 l6 = 6 class DyadWaterLevel(RoborockEnum): l1 = 1 l2 = 2 l3 = 3 l4 = 4 class DyadBrushSpeed(RoborockEnum): l1 = 1 l2 = 2 class DyadCleanser(RoborockEnum): none = 0 normal = 1 deep = 2 max = 3 class DyadError(RoborockEnum): none = 0 dirty_tank_full = 20000 # Dirty tank full. Empty it water_level_sensor_stuck = 20001 # Water level sensor is stuck. Clean it. clean_tank_empty = 20002 # Clean tank empty. Refill now clean_head_entangled = 20003 # Check if the cleaning head is entangled with foreign objects. clean_head_too_hot = 20004 # Cleaning head temperature protection. Wait for the temperature to return to normal. fan_protection_e5 = 10005 # Fan protection (E5). Restart the vacuum cleaner. cleaning_head_blocked = 20005 # Remove blockages from the cleaning head and pipes. temperature_protection = 20006 # Temperature protection. Wait for the temperature to return to normal fan_protection_e4 = 10004 # Fan protection (E4). Restart the vacuum cleaner. fan_protection_e9 = 10009 # Fan protection (E9). Restart the vacuum cleaner. battery_temperature_protection_e0 = 10000 battery_temperature_protection = ( 20007 # Battery temperature protection. Wait for the temperature to return to a normal range. ) battery_temperature_protection_2 = 20008 power_adapter_error = 20009 # Check if the power adapter is working properly. dirty_charging_contacts = 10007 # Disconnection between the device and dock. Wipe charging contacts. low_battery = 20017 # Low battery level. Charge before starting self-cleaning. battery_under_10 = 20018 # Charge until the battery level exceeds 10% before manually starting self-cleaning. class ZeoMode(RoborockEnum): wash = 1 wash_and_dry = 2 dry = 3 class ZeoState(RoborockEnum): standby = 1 weighing = 2 soaking = 3 washing = 4 rinsing = 5 spinning = 6 drying = 7 cooling = 8 under_delay_start = 9 done = 10 class ZeoProgram(RoborockEnum): standard = 1 quick = 2 sanitize = 3 wool = 4 air_refresh = 5 custom = 6 bedding = 7 down = 8 silk = 9 rinse_and_spin = 10 spin = 11 down_clean = 12 baby_care = 13 anti_allergen = 14 sportswear = 15 night = 16 new_clothes = 17 shirts = 18 synthetics = 19 underwear = 20 gentle = 21 intensive = 22 cotton_linen = 23 season = 24 warming = 25 bra = 26 panties = 27 boiling_wash = 28 socks = 30 towels = 31 anti_mite = 32 exo_40_60 = 33 twenty_c = 34 t_shirts = 35 stain_removal = 36 class ZeoSoak(RoborockEnum): normal = 0 low = 1 medium = 2 high = 3 max = 4 class ZeoTemperature(RoborockEnum): normal = 1 low = 2 medium = 3 high = 4 max = 5 twenty_c = 6 class ZeoRinse(RoborockEnum): none = 0 min = 1 low = 2 mid = 3 high = 4 max = 5 class ZeoSpin(RoborockEnum): none = 1 very_low = 2 low = 3 mid = 4 high = 5 very_high = 6 max = 7 class ZeoDryingMode(RoborockEnum): none = 0 quick = 1 iron = 2 store = 3 class ZeoDetergentType(RoborockEnum): empty = 0 low = 1 medium = 2 high = 3 class ZeoSoftenerType(RoborockEnum): empty = 0 low = 1 medium = 2 high = 3 class ZeoError(RoborockEnum): none = 0 refill_error = 1 drain_error = 2 door_lock_error = 3 water_level_error = 4 inverter_error = 5 heating_error = 6 temperature_error = 7 communication_error = 10 drying_error = 11 drying_error_e_12 = 12 drying_error_e_13 = 13 drying_error_e_14 = 14 drying_error_e_15 = 15 drying_error_e_16 = 16 drying_error_water_flow = 17 # Check for normal water flow drying_error_restart = 18 # Restart the washer and try again spin_error = 19 # re-arrange clothes Python-roborock-python-roborock-32df4f3/roborock/command_cache.py000066400000000000000000000173501507503702500253300ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Mapping from dataclasses import dataclass, field from enum import Enum from roborock import RoborockCommand GET_PREFIX = "get_" SET_PREFIX = ("set_", "change_", "close_") class CacheableAttribute(str, Enum): status = "status" consumable = "consumable" sound_volume = "sound_volume" camera_status = "camera_status" carpet_clean_mode = "carpet_clean_mode" carpet_mode = "carpet_mode" child_lock_status = "child_lock_status" collision_avoid_status = "collision_avoid_status" customize_clean_mode = "customize_clean_mode" custom_mode = "custom_mode" dnd_timer = "dnd_timer" dust_collection_mode = "dust_collection_mode" flow_led_status = "flow_led_status" identify_furniture_status = "identify_furniture_status" identify_ground_material_status = "identify_ground_material_status" led_status = "led_status" server_timer = "server_timer" smart_wash_params = "smart_wash_params" timezone = "timezone" valley_electricity_timer = "valley_electricity_timer" wash_towel_mode = "wash_towel_mode" @dataclass class RoborockAttribute: attribute: str get_command: RoborockCommand add_command: RoborockCommand | None = None set_command: RoborockCommand | None = None close_command: RoborockCommand | None = None additional_change_commands: list[RoborockCommand] = field(default_factory=list) cache_map: Mapping[CacheableAttribute, RoborockAttribute] = { CacheableAttribute.status: RoborockAttribute( attribute="status", get_command=RoborockCommand.GET_STATUS, additional_change_commands=[ RoborockCommand.SET_WATER_BOX_CUSTOM_MODE, RoborockCommand.SET_MOP_MODE, ], ), CacheableAttribute.consumable: RoborockAttribute( attribute="consumable", get_command=RoborockCommand.GET_CONSUMABLE, ), CacheableAttribute.sound_volume: RoborockAttribute( attribute="sound_volume", get_command=RoborockCommand.GET_SOUND_VOLUME, set_command=RoborockCommand.CHANGE_SOUND_VOLUME, ), CacheableAttribute.camera_status: RoborockAttribute( attribute="camera_status", get_command=RoborockCommand.GET_CAMERA_STATUS, set_command=RoborockCommand.SET_CAMERA_STATUS, ), CacheableAttribute.carpet_clean_mode: RoborockAttribute( attribute="carpet_clean_mode", get_command=RoborockCommand.GET_CARPET_CLEAN_MODE, set_command=RoborockCommand.SET_CARPET_CLEAN_MODE, ), CacheableAttribute.carpet_mode: RoborockAttribute( attribute="carpet_mode", get_command=RoborockCommand.GET_CARPET_MODE, set_command=RoborockCommand.SET_CARPET_MODE, ), CacheableAttribute.child_lock_status: RoborockAttribute( attribute="child_lock_status", get_command=RoborockCommand.GET_CHILD_LOCK_STATUS, set_command=RoborockCommand.SET_CHILD_LOCK_STATUS, ), CacheableAttribute.collision_avoid_status: RoborockAttribute( attribute="collision_avoid_status", get_command=RoborockCommand.GET_COLLISION_AVOID_STATUS, set_command=RoborockCommand.SET_COLLISION_AVOID_STATUS, ), CacheableAttribute.customize_clean_mode: RoborockAttribute( attribute="customize_clean_mode", get_command=RoborockCommand.GET_CUSTOMIZE_CLEAN_MODE, set_command=RoborockCommand.SET_CUSTOMIZE_CLEAN_MODE, ), CacheableAttribute.custom_mode: RoborockAttribute( attribute="custom_mode", get_command=RoborockCommand.GET_CUSTOM_MODE, set_command=RoborockCommand.SET_CUSTOM_MODE, ), CacheableAttribute.dnd_timer: RoborockAttribute( attribute="dnd_timer", get_command=RoborockCommand.GET_DND_TIMER, set_command=RoborockCommand.SET_DND_TIMER, close_command=RoborockCommand.CLOSE_DND_TIMER, ), CacheableAttribute.dust_collection_mode: RoborockAttribute( attribute="dust_collection_mode", get_command=RoborockCommand.GET_DUST_COLLECTION_MODE, set_command=RoborockCommand.SET_DUST_COLLECTION_MODE, ), CacheableAttribute.flow_led_status: RoborockAttribute( attribute="flow_led_status", get_command=RoborockCommand.GET_FLOW_LED_STATUS, set_command=RoborockCommand.SET_FLOW_LED_STATUS, ), CacheableAttribute.identify_furniture_status: RoborockAttribute( attribute="identify_furniture_status", get_command=RoborockCommand.GET_IDENTIFY_FURNITURE_STATUS, set_command=RoborockCommand.SET_IDENTIFY_FURNITURE_STATUS, ), CacheableAttribute.identify_ground_material_status: RoborockAttribute( attribute="identify_ground_material_status", get_command=RoborockCommand.GET_IDENTIFY_GROUND_MATERIAL_STATUS, set_command=RoborockCommand.SET_IDENTIFY_GROUND_MATERIAL_STATUS, ), CacheableAttribute.led_status: RoborockAttribute( attribute="led_status", get_command=RoborockCommand.GET_LED_STATUS, set_command=RoborockCommand.SET_LED_STATUS, ), CacheableAttribute.server_timer: RoborockAttribute( attribute="server_timer", get_command=RoborockCommand.GET_SERVER_TIMER, add_command=RoborockCommand.SET_SERVER_TIMER, set_command=RoborockCommand.UPD_SERVER_TIMER, close_command=RoborockCommand.DEL_SERVER_TIMER, ), CacheableAttribute.smart_wash_params: RoborockAttribute( attribute="smart_wash_params", get_command=RoborockCommand.GET_SMART_WASH_PARAMS, set_command=RoborockCommand.SET_SMART_WASH_PARAMS, ), CacheableAttribute.timezone: RoborockAttribute( attribute="timezone", get_command=RoborockCommand.GET_TIMEZONE, set_command=RoborockCommand.SET_TIMEZONE ), CacheableAttribute.valley_electricity_timer: RoborockAttribute( attribute="valley_electricity_timer", get_command=RoborockCommand.GET_VALLEY_ELECTRICITY_TIMER, set_command=RoborockCommand.SET_VALLEY_ELECTRICITY_TIMER, close_command=RoborockCommand.CLOSE_VALLEY_ELECTRICITY_TIMER, ), CacheableAttribute.wash_towel_mode: RoborockAttribute( attribute="wash_towel_mode", get_command=RoborockCommand.GET_WASH_TOWEL_MODE, set_command=RoborockCommand.SET_WASH_TOWEL_MODE, ), } def get_change_commands(attr: RoborockAttribute) -> list[RoborockCommand]: commands = [ attr.add_command, attr.set_command, attr.close_command, *attr.additional_change_commands, ] return [command for command in commands if command is not None] cache_map_by_get_command: dict[RoborockCommand | str, CacheableAttribute] = { attribute.get_command: cacheable_attribute for cacheable_attribute, attribute in cache_map.items() } cache_map_by_change_command: dict[RoborockCommand | str, CacheableAttribute] = { command: cacheable_attribute for cacheable_attribute, attribute in cache_map.items() for command in get_change_commands(attribute) } def get_cache_map(): return cache_map class CommandType(Enum): OTHER = -1 GET = 0 CHANGE = 1 @dataclass class CacheableAttributeResult: attribute: CacheableAttribute type: CommandType def find_cacheable_attribute(method: RoborockCommand | str) -> CacheableAttributeResult | None: if method is None: return None cacheable_attribute = None command_type = CommandType.OTHER if cacheable_attribute := cache_map_by_get_command.get(method, None): command_type = CommandType.GET elif cacheable_attribute := cache_map_by_change_command.get(method, None): command_type = CommandType.CHANGE if cacheable_attribute: return CacheableAttributeResult(attribute=CacheableAttribute(cacheable_attribute), type=command_type) else: return None Python-roborock-python-roborock-32df4f3/roborock/const.py000066400000000000000000000053641507503702500237170ustar00rootroot00000000000000# Total time in seconds consumables have before Roborock recommends replacing MAIN_BRUSH_REPLACE_TIME = 1080000 SIDE_BRUSH_REPLACE_TIME = 720000 FILTER_REPLACE_TIME = 540000 SENSOR_DIRTY_REPLACE_TIME = 108000 MOP_ROLLER_REPLACE_TIME = 1080000 STRAINER_REPLACE_TIME = 150 CLEANING_BRUSH_REPLACE_TIME = 300 DUST_COLLECTION_REPLACE_TIME = 90 FLOOR_CLEANER_REPLACE_TIME = 300 ROBOROCK_V1 = "ROBOROCK.vacuum.v1" ROBOROCK_S4 = "roborock.vacuum.s4" ROBOROCK_S4_MAX = "roborock.vacuum.a19" ROBOROCK_S5 = "roborock.vacuum.s5" ROBOROCK_S5_MAX = "roborock.vacuum.s5e" ROBOROCK_S6 = "roborock.vacuum.s6" ROBOROCK_T6 = "roborock.vacuum.t6" # cn s6 ROBOROCK_E4 = "roborock.vacuum.a01" ROBOROCK_S6_PURE = "roborock.vacuum.a08" ROBOROCK_T7 = "roborock.vacuum.a11" # cn s7 ROBOROCK_T7S = "roborock.vacuum.a14" ROBOROCK_T7SPLUS = "roborock.vacuum.a23" ROBOROCK_S7_MAXV = "roborock.vacuum.a27" ROBOROCK_S7_MAXV_ULTRA = "roborock.vacuum.a65" ROBOROCK_S7_PRO_ULTRA = "roborock.vacuum.a62" ROBOROCK_Q5 = "roborock.vacuum.a34" ROBOROCK_Q5_PRO = "roborock.vacuum.a72" ROBOROCK_Q7 = "roborock.vacuum.a40" ROBOROCK_Q7_MAX = "roborock.vacuum.a38" ROBOROCK_Q7PLUS = "roborock.vacuum.a40" ROBOROCK_QREVO_MASTER = "roborock.vacuum.a117" ROBOROCK_QREVO_CURV = "roborock.vacuum.a135" ROBOROCK_Q8_MAX = "roborock.vacuum.a73" ROBOROCK_G10S_PRO = "roborock.vacuum.a26" ROBOROCK_G20S_Ultra = "roborock.vacuum.a143" # cn saros_r10 ROBOROCK_G10S = "roborock.vacuum.a46" ROBOROCK_G10 = "roborock.vacuum.a29" ROCKROBO_G10_SG = "roborock.vacuum.a30" # Variant of the G10, has similar features as S7 ROBOROCK_S7 = "roborock.vacuum.a15" ROBOROCK_S6_MAXV = "roborock.vacuum.a10" ROBOROCK_E2 = "roborock.vacuum.e2" ROBOROCK_1S = "roborock.vacuum.m1s" ROBOROCK_C1 = "roborock.vacuum.c1" ROBOROCK_S8_PRO_ULTRA = "roborock.vacuum.a70" ROBOROCK_S8 = "roborock.vacuum.a51" ROBOROCK_P10 = "roborock.vacuum.a75" # also known as q_revo ROBOROCK_S8_MAXV_ULTRA = "roborock.vacuum.a97" ROBOROCK_QREVO_S = "roborock.vacuum.a104" ROBOROCK_QREVO_PRO = "roborock.vacuum.a101" ROBOROCK_QREVO_MAXV = "roborock.vacuum.a87" ROBOROCK_SAROS_10R = "roborock.vacuum.a144" ROBOROCK_SAROS_10 = "roborock.vacuum.a147" ROBOROCK_DYAD_AIR = "roborock.wetdryvac.a107" ROBOROCK_DYAD_PRO_COMBO = "roborock.wetdryvac.a83" ROBOROCK_DYAD_PRO = "roborock.wetdryvac.a56" # These are the devices that show up when you add a device - more could be supported and just not show up SUPPORTED_VACUUMS = [ ROBOROCK_G10, ROBOROCK_G10S_PRO, ROBOROCK_G20S_Ultra, ROBOROCK_Q5, ROBOROCK_Q7, ROBOROCK_Q7_MAX, ROBOROCK_S4, ROBOROCK_S5_MAX, ROBOROCK_S6, ROBOROCK_S6_MAXV, ROBOROCK_S6_PURE, ROBOROCK_S7_MAXV, ROBOROCK_S8_PRO_ULTRA, ROBOROCK_S8, ROBOROCK_S4_MAX, ROBOROCK_S7, ROBOROCK_P10, ROCKROBO_G10_SG, ] NO_MAP = 63 Python-roborock-python-roborock-32df4f3/roborock/containers.py000066400000000000000000000727761507503702500247510ustar00rootroot00000000000000import dataclasses import datetime import json import logging import re import types from dataclasses import asdict, dataclass, field from enum import Enum from functools import cached_property from typing import Any, NamedTuple, get_args, get_origin from .code_mappings import ( SHORT_MODEL_TO_ENUM, ClearWaterBoxStatus, DirtyWaterBoxStatus, DustBagStatus, RoborockCategory, RoborockCleanType, RoborockDockDustCollectionModeCode, RoborockDockErrorCode, RoborockDockTypeCode, RoborockDockWashTowelModeCode, RoborockErrorCode, RoborockFanPowerCode, RoborockFanSpeedP10, RoborockFanSpeedQ7Max, RoborockFanSpeedQRevoCurv, RoborockFanSpeedQRevoMaster, RoborockFanSpeedQRevoMaxV, RoborockFanSpeedS6Pure, RoborockFanSpeedS7, RoborockFanSpeedS7MaxV, RoborockFanSpeedS8MaxVUltra, RoborockFanSpeedSaros10, RoborockFanSpeedSaros10R, RoborockFinishReason, RoborockInCleaning, RoborockModeEnum, RoborockMopIntensityCode, RoborockMopIntensityP10, RoborockMopIntensityQ7Max, RoborockMopIntensityQRevoCurv, RoborockMopIntensityQRevoMaster, RoborockMopIntensityQRevoMaxV, RoborockMopIntensityS5Max, RoborockMopIntensityS6MaxV, RoborockMopIntensityS7, RoborockMopIntensityS8MaxVUltra, RoborockMopIntensitySaros10, RoborockMopIntensitySaros10R, RoborockMopModeCode, RoborockMopModeQRevoCurv, RoborockMopModeQRevoMaster, RoborockMopModeQRevoMaxV, RoborockMopModeS7, RoborockMopModeS8MaxVUltra, RoborockMopModeS8ProUltra, RoborockMopModeSaros10, RoborockMopModeSaros10R, RoborockProductNickname, RoborockStartType, RoborockStateCode, ) from .const import ( CLEANING_BRUSH_REPLACE_TIME, DUST_COLLECTION_REPLACE_TIME, FILTER_REPLACE_TIME, MAIN_BRUSH_REPLACE_TIME, MOP_ROLLER_REPLACE_TIME, NO_MAP, ROBOROCK_G10S_PRO, ROBOROCK_P10, ROBOROCK_Q7_MAX, ROBOROCK_QREVO_CURV, ROBOROCK_QREVO_MASTER, ROBOROCK_QREVO_MAXV, ROBOROCK_QREVO_PRO, ROBOROCK_QREVO_S, ROBOROCK_S4_MAX, ROBOROCK_S5_MAX, ROBOROCK_S6, ROBOROCK_S6_MAXV, ROBOROCK_S6_PURE, ROBOROCK_S7, ROBOROCK_S7_MAXV, ROBOROCK_S8, ROBOROCK_S8_MAXV_ULTRA, ROBOROCK_S8_PRO_ULTRA, ROBOROCK_SAROS_10, ROBOROCK_SAROS_10R, SENSOR_DIRTY_REPLACE_TIME, SIDE_BRUSH_REPLACE_TIME, STRAINER_REPLACE_TIME, ROBOROCK_G20S_Ultra, ) from .exceptions import RoborockException _LOGGER = logging.getLogger(__name__) def _camelize(s: str): first, *others = s.split("_") if len(others) == 0: return s return "".join([first.lower(), *map(str.title, others)]) def _decamelize(s: str): return re.sub("([A-Z]+)", "_\\1", s).lower() def _attr_repr(obj: Any) -> str: """Return a string representation of the object including specified attributes. This reproduces the default repr behavior of dataclasses, but also includes properties. This must be called by the child class's __repr__ method since the parent RoborockBase class does not know about the child class's attributes. """ # Reproduce default repr behavior parts = [] for k in dir(obj): if k.startswith("_"): continue try: v = getattr(obj, k) except (RuntimeError, Exception): continue if callable(v): continue parts.append(f"{k}={v!r}") return f"{type(obj).__name__}({', '.join(parts)})" @dataclass(repr=False) class RoborockBase: """Base class for all Roborock data classes.""" @staticmethod def _convert_to_class_obj(class_type: type, value): if get_origin(class_type) is list: sub_type = get_args(class_type)[0] return [RoborockBase._convert_to_class_obj(sub_type, obj) for obj in value] if get_origin(class_type) is dict: _, value_type = get_args(class_type) # assume keys are only basic types return {k: RoborockBase._convert_to_class_obj(value_type, v) for k, v in value.items()} if issubclass(class_type, RoborockBase): return class_type.from_dict(value) if issubclass(class_type, RoborockModeEnum): return class_type.from_code(value) if class_type is Any: return value return class_type(value) # type: ignore[call-arg] @classmethod def from_dict(cls, data: dict[str, Any]): """Create an instance of the class from a dictionary.""" if not isinstance(data, dict): return None field_types = {field.name: field.type for field in dataclasses.fields(cls)} result: dict[str, Any] = {} for orig_key, value in data.items(): key = _decamelize(orig_key) if (field_type := field_types.get(key)) is None: continue if value == "None" or value is None: result[key] = None continue if isinstance(field_type, types.UnionType): for subtype in get_args(field_type): if subtype is types.NoneType: continue try: result[key] = RoborockBase._convert_to_class_obj(subtype, value) break except Exception: _LOGGER.exception(f"Failed to convert {key} with value {value} to type {subtype}") continue else: try: result[key] = RoborockBase._convert_to_class_obj(field_type, value) except Exception: _LOGGER.exception(f"Failed to convert {key} with value {value} to type {field_type}") continue return cls(**result) def as_dict(self) -> dict: return asdict( self, dict_factory=lambda _fields: { _camelize(key): value.value if isinstance(value, Enum) else value for (key, value) in _fields if value is not None }, ) @dataclass class RoborockBaseTimer(RoborockBase): start_hour: int | None = None start_minute: int | None = None end_hour: int | None = None end_minute: int | None = None enabled: int | None = None @property def start_time(self) -> datetime.time | None: return ( datetime.time(hour=self.start_hour, minute=self.start_minute) if self.start_hour is not None and self.start_minute is not None else None ) @property def end_time(self) -> datetime.time | None: return ( datetime.time(hour=self.end_hour, minute=self.end_minute) if self.end_hour is not None and self.end_minute is not None else None ) def __repr__(self) -> str: return _attr_repr(self) @dataclass class Reference(RoborockBase): r: str | None = None a: str | None = None m: str | None = None l: str | None = None @dataclass class RRiot(RoborockBase): u: str s: str h: str k: str r: Reference @dataclass class UserData(RoborockBase): rriot: RRiot uid: int | None = None tokentype: str | None = None token: str | None = None rruid: str | None = None region: str | None = None countrycode: str | None = None country: str | None = None nickname: str | None = None tuya_device_state: int | None = None avatarurl: str | None = None @dataclass class HomeDataProductSchema(RoborockBase): id: Any | None = None name: Any | None = None code: Any | None = None mode: Any | None = None type: Any | None = None product_property: Any | None = None property: Any | None = None desc: Any | None = None @dataclass class HomeDataProduct(RoborockBase): id: str name: str model: str category: RoborockCategory code: str | None = None icon_url: str | None = None attribute: Any | None = None capability: int | None = None schema: list[HomeDataProductSchema] | None = None @property def product_nickname(self) -> RoborockProductNickname: return SHORT_MODEL_TO_ENUM.get(self.model.split(".")[-1], RoborockProductNickname.PEARLPLUS) @dataclass class HomeDataDevice(RoborockBase): duid: str name: str local_key: str fv: str product_id: str attribute: Any | None = None active_time: int | None = None runtime_env: Any | None = None time_zone_id: str | None = None icon_url: str | None = None lon: Any | None = None lat: Any | None = None share: Any | None = None share_time: Any | None = None online: bool | None = None pv: str | None = None room_id: Any | None = None tuya_uuid: Any | None = None tuya_migrated: bool | None = None extra: Any | None = None sn: str | None = None feature_set: str | None = None new_feature_set: str | None = None device_status: dict | None = None silent_ota_switch: bool | None = None setting: Any | None = None f: bool | None = None @dataclass class HomeDataRoom(RoborockBase): id: int name: str @dataclass class HomeDataScene(RoborockBase): id: int name: str @dataclass class HomeData(RoborockBase): id: int name: str products: list[HomeDataProduct] = field(default_factory=lambda: []) devices: list[HomeDataDevice] = field(default_factory=lambda: []) received_devices: list[HomeDataDevice] = field(default_factory=lambda: []) lon: Any | None = None lat: Any | None = None geo_name: Any | None = None rooms: list[HomeDataRoom] = field(default_factory=list) def get_all_devices(self) -> list[HomeDataDevice]: devices = [] if self.devices is not None: devices += self.devices if self.received_devices is not None: devices += self.received_devices return devices @cached_property def product_map(self) -> dict[str, HomeDataProduct]: """Returns a dictionary of product IDs to HomeDataProduct objects.""" return {product.id: product for product in self.products} @cached_property def device_products(self) -> dict[str, tuple[HomeDataDevice, HomeDataProduct]]: """Returns a dictionary of device DUIDs to HomeDataDeviceProduct objects.""" product_map = self.product_map return { device.duid: (device, product) for device in self.get_all_devices() if (product := product_map.get(device.product_id)) is not None } @dataclass class LoginData(RoborockBase): user_data: UserData email: str home_data: HomeData | None = None @dataclass class Status(RoborockBase): msg_ver: int | None = None msg_seq: int | None = None state: RoborockStateCode | None = None battery: int | None = None clean_time: int | None = None clean_area: int | None = None error_code: RoborockErrorCode | None = None map_present: int | None = None in_cleaning: RoborockInCleaning | None = None in_returning: int | None = None in_fresh_state: int | None = None lab_status: int | None = None water_box_status: int | None = None back_type: int | None = None wash_phase: int | None = None wash_ready: int | None = None fan_power: RoborockFanPowerCode | None = None dnd_enabled: int | None = None map_status: int | None = None is_locating: int | None = None lock_status: int | None = None water_box_mode: RoborockMopIntensityCode | None = None water_box_carriage_status: int | None = None mop_forbidden_enable: int | None = None camera_status: int | None = None is_exploring: int | None = None home_sec_status: int | None = None home_sec_enable_password: int | None = None adbumper_status: list[int] | None = None water_shortage_status: int | None = None dock_type: RoborockDockTypeCode | None = None dust_collection_status: int | None = None auto_dust_collection: int | None = None avoid_count: int | None = None mop_mode: RoborockMopModeCode | None = None debug_mode: int | None = None collision_avoid_status: int | None = None switch_map_mode: int | None = None dock_error_status: RoborockDockErrorCode | None = None charge_status: int | None = None unsave_map_reason: int | None = None unsave_map_flag: int | None = None wash_status: int | None = None distance_off: int | None = None in_warmup: int | None = None dry_status: int | None = None rdt: int | None = None clean_percent: int | None = None rss: int | None = None dss: int | None = None common_status: int | None = None corner_clean_mode: int | None = None last_clean_t: int | None = None replenish_mode: int | None = None repeat: int | None = None kct: int | None = None subdivision_sets: int | None = None @property def square_meter_clean_area(self) -> float | None: return round(self.clean_area / 1000000, 1) if self.clean_area is not None else None @property def error_code_name(self) -> str | None: return self.error_code.name if self.error_code else None @property def state_name(self) -> str | None: return self.state.name if self.state else None @property def water_box_mode_name(self) -> str | None: return self.water_box_mode.name if self.water_box_mode else None @property def fan_power_options(self) -> list[str]: if self.fan_power is None: return [] return list(self.fan_power.keys()) @property def fan_power_name(self) -> str | None: return self.fan_power.name if self.fan_power else None @property def mop_mode_name(self) -> str | None: return self.mop_mode.name if self.mop_mode else None def get_fan_speed_code(self, fan_speed: str) -> int: if self.fan_power is None: raise RoborockException("Attempted to get fan speed before status has been updated.") return self.fan_power.as_dict().get(fan_speed) def get_mop_intensity_code(self, mop_intensity: str) -> int: if self.water_box_mode is None: raise RoborockException("Attempted to get mop_intensity before status has been updated.") return self.water_box_mode.as_dict().get(mop_intensity) def get_mop_mode_code(self, mop_mode: str) -> int: if self.mop_mode is None: raise RoborockException("Attempted to get mop_mode before status has been updated.") return self.mop_mode.as_dict().get(mop_mode) @property def current_map(self) -> int | None: """Returns the current map ID if the map is present.""" if self.map_status is not None: map_flag = self.map_status >> 2 if map_flag != NO_MAP: return map_flag return None @property def clear_water_box_status(self) -> ClearWaterBoxStatus | None: if self.dss: return ClearWaterBoxStatus((self.dss >> 2) & 3) return None @property def dirty_water_box_status(self) -> DirtyWaterBoxStatus | None: if self.dss: return DirtyWaterBoxStatus((self.dss >> 4) & 3) return None @property def dust_bag_status(self) -> DustBagStatus | None: if self.dss: return DustBagStatus((self.dss >> 6) & 3) return None @property def water_box_filter_status(self) -> int | None: if self.dss: return (self.dss >> 8) & 3 return None @property def clean_fluid_status(self) -> int | None: if self.dss: return (self.dss >> 10) & 3 return None @property def hatch_door_status(self) -> int | None: if self.dss: return (self.dss >> 12) & 7 return None @property def dock_cool_fan_status(self) -> int | None: if self.dss: return (self.dss >> 15) & 3 return None def __repr__(self) -> str: return _attr_repr(self) @dataclass class S4MaxStatus(Status): fan_power: RoborockFanSpeedS6Pure | None = None water_box_mode: RoborockMopIntensityS7 | None = None mop_mode: RoborockMopModeS7 | None = None @dataclass class S5MaxStatus(Status): fan_power: RoborockFanSpeedS6Pure | None = None water_box_mode: RoborockMopIntensityS5Max | None = None @dataclass class Q7MaxStatus(Status): fan_power: RoborockFanSpeedQ7Max | None = None water_box_mode: RoborockMopIntensityQ7Max | None = None @dataclass class QRevoMasterStatus(Status): fan_power: RoborockFanSpeedQRevoMaster | None = None water_box_mode: RoborockMopIntensityQRevoMaster | None = None mop_mode: RoborockMopModeQRevoMaster | None = None @dataclass class QRevoCurvStatus(Status): fan_power: RoborockFanSpeedQRevoCurv | None = None water_box_mode: RoborockMopIntensityQRevoCurv | None = None mop_mode: RoborockMopModeQRevoCurv | None = None @dataclass class QRevoMaxVStatus(Status): fan_power: RoborockFanSpeedQRevoMaxV | None = None water_box_mode: RoborockMopIntensityQRevoMaxV | None = None mop_mode: RoborockMopModeQRevoMaxV | None = None @dataclass class S6MaxVStatus(Status): fan_power: RoborockFanSpeedS7MaxV | None = None water_box_mode: RoborockMopIntensityS6MaxV | None = None @dataclass class S6PureStatus(Status): fan_power: RoborockFanSpeedS6Pure | None = None @dataclass class S7MaxVStatus(Status): fan_power: RoborockFanSpeedS7MaxV | None = None water_box_mode: RoborockMopIntensityS7 | None = None mop_mode: RoborockMopModeS7 | None = None @dataclass class S7Status(Status): fan_power: RoborockFanSpeedS7 | None = None water_box_mode: RoborockMopIntensityS7 | None = None mop_mode: RoborockMopModeS7 | None = None @dataclass class S8ProUltraStatus(Status): fan_power: RoborockFanSpeedS7MaxV | None = None water_box_mode: RoborockMopIntensityS7 | None = None mop_mode: RoborockMopModeS8ProUltra | None = None @dataclass class S8Status(Status): fan_power: RoborockFanSpeedS7MaxV | None = None water_box_mode: RoborockMopIntensityS7 | None = None mop_mode: RoborockMopModeS8ProUltra | None = None @dataclass class P10Status(Status): fan_power: RoborockFanSpeedP10 | None = None water_box_mode: RoborockMopIntensityP10 | None = None mop_mode: RoborockMopModeS8ProUltra | None = None @dataclass class S8MaxvUltraStatus(Status): fan_power: RoborockFanSpeedS8MaxVUltra | None = None water_box_mode: RoborockMopIntensityS8MaxVUltra | None = None mop_mode: RoborockMopModeS8MaxVUltra | None = None @dataclass class Saros10RStatus(Status): fan_power: RoborockFanSpeedSaros10R | None = None water_box_mode: RoborockMopIntensitySaros10R | None = None mop_mode: RoborockMopModeSaros10R | None = None @dataclass class Saros10Status(Status): fan_power: RoborockFanSpeedSaros10 | None = None water_box_mode: RoborockMopIntensitySaros10 | None = None mop_mode: RoborockMopModeSaros10 | None = None ModelStatus: dict[str, type[Status]] = { ROBOROCK_S4_MAX: S4MaxStatus, ROBOROCK_S5_MAX: S5MaxStatus, ROBOROCK_Q7_MAX: Q7MaxStatus, ROBOROCK_QREVO_MASTER: QRevoMasterStatus, ROBOROCK_QREVO_CURV: QRevoCurvStatus, ROBOROCK_S6: S6PureStatus, ROBOROCK_S6_MAXV: S6MaxVStatus, ROBOROCK_S6_PURE: S6PureStatus, ROBOROCK_S7_MAXV: S7MaxVStatus, ROBOROCK_S7: S7Status, ROBOROCK_S8: S8Status, ROBOROCK_S8_PRO_ULTRA: S8ProUltraStatus, ROBOROCK_G10S_PRO: S7MaxVStatus, ROBOROCK_G20S_Ultra: QRevoMasterStatus, ROBOROCK_P10: P10Status, # These likely are not correct, # but i am currently unable to do my typical reverse engineering/ get any data from users on this, # so this will be here in the mean time. ROBOROCK_QREVO_S: P10Status, ROBOROCK_QREVO_MAXV: QRevoMaxVStatus, ROBOROCK_QREVO_PRO: P10Status, ROBOROCK_S8_MAXV_ULTRA: S8MaxvUltraStatus, ROBOROCK_SAROS_10R: Saros10RStatus, ROBOROCK_SAROS_10: Saros10Status, } @dataclass class DnDTimer(RoborockBaseTimer): """DnDTimer""" @dataclass class ValleyElectricityTimer(RoborockBaseTimer): """ValleyElectricityTimer""" @dataclass class CleanSummary(RoborockBase): clean_time: int | None = None clean_area: int | None = None clean_count: int | None = None dust_collection_count: int | None = None records: list[int] | None = None last_clean_t: int | None = None @property def square_meter_clean_area(self) -> float | None: """Returns the clean area in square meters.""" if isinstance(self.clean_area, list | str): _LOGGER.warning(f"Clean area is a unexpected type! Please give the following in a issue: {self.clean_area}") return None return round(self.clean_area / 1000000, 1) if self.clean_area is not None else None def __repr__(self) -> str: """Return a string representation of the object including all attributes.""" return _attr_repr(self) @dataclass class CleanRecord(RoborockBase): begin: int | None = None end: int | None = None duration: int | None = None area: int | None = None error: int | None = None complete: int | None = None start_type: RoborockStartType | None = None clean_type: RoborockCleanType | None = None finish_reason: RoborockFinishReason | None = None dust_collection_status: int | None = None avoid_count: int | None = None wash_count: int | None = None map_flag: int | None = None @property def square_meter_area(self) -> float | None: return round(self.area / 1000000, 1) if self.area is not None else None @property def begin_datetime(self) -> datetime.datetime | None: return datetime.datetime.fromtimestamp(self.begin).astimezone(datetime.UTC) if self.begin else None @property def end_datetime(self) -> datetime.datetime | None: return datetime.datetime.fromtimestamp(self.end).astimezone(datetime.UTC) if self.end else None def __repr__(self) -> str: return _attr_repr(self) @dataclass class Consumable(RoborockBase): main_brush_work_time: int | None = None side_brush_work_time: int | None = None filter_work_time: int | None = None filter_element_work_time: int | None = None sensor_dirty_time: int | None = None strainer_work_times: int | None = None dust_collection_work_times: int | None = None cleaning_brush_work_times: int | None = None moproller_work_time: int | None = None @property def main_brush_time_left(self) -> int | None: return MAIN_BRUSH_REPLACE_TIME - self.main_brush_work_time if self.main_brush_work_time is not None else None @property def side_brush_time_left(self) -> int | None: return SIDE_BRUSH_REPLACE_TIME - self.side_brush_work_time if self.side_brush_work_time is not None else None @property def filter_time_left(self) -> int | None: return FILTER_REPLACE_TIME - self.filter_work_time if self.filter_work_time is not None else None @property def sensor_time_left(self) -> int | None: return SENSOR_DIRTY_REPLACE_TIME - self.sensor_dirty_time if self.sensor_dirty_time is not None else None @property def strainer_time_left(self) -> int | None: return STRAINER_REPLACE_TIME - self.strainer_work_times if self.strainer_work_times is not None else None @property def dust_collection_time_left(self) -> int | None: return ( DUST_COLLECTION_REPLACE_TIME - self.dust_collection_work_times if self.dust_collection_work_times is not None else None ) @property def cleaning_brush_time_left(self) -> int | None: return ( CLEANING_BRUSH_REPLACE_TIME - self.cleaning_brush_work_times if self.cleaning_brush_work_times is not None else None ) @property def mop_roller_time_left(self) -> int | None: return MOP_ROLLER_REPLACE_TIME - self.moproller_work_time if self.moproller_work_time is not None else None def __repr__(self) -> str: return _attr_repr(self) @dataclass class MultiMapsListMapInfoBakMaps(RoborockBase): mapflag: Any | None = None add_time: Any | None = None @dataclass class MultiMapsListMapInfo(RoborockBase): map_flag: int name: str add_time: Any | None = None length: Any | None = None bak_maps: list[MultiMapsListMapInfoBakMaps] | None = None @property def mapFlag(self) -> int: """Alias for map_flag, returns the map flag as an integer.""" return self.map_flag @dataclass class MultiMapsList(RoborockBase): max_multi_map: int | None = None max_bak_map: int | None = None multi_map_count: int | None = None map_info: list[MultiMapsListMapInfo] | None = None @dataclass class SmartWashParams(RoborockBase): smart_wash: int | None = None wash_interval: int | None = None @dataclass class DustCollectionMode(RoborockBase): mode: RoborockDockDustCollectionModeCode | None = None @dataclass class WashTowelMode(RoborockBase): wash_mode: RoborockDockWashTowelModeCode | None = None @dataclass class NetworkInfo(RoborockBase): ip: str ssid: str | None = None mac: str | None = None bssid: str | None = None rssi: int | None = None @dataclass class AppInitStatusLocalInfo(RoborockBase): location: str bom: str | None = None featureset: int | None = None language: str | None = None logserver: str | None = None wifiplan: str | None = None timezone: str | None = None name: str | None = None @dataclass class AppInitStatus(RoborockBase): local_info: AppInitStatusLocalInfo feature_info: list[int] new_feature_info: int new_feature_info_str: str new_feature_info_2: int | None = None carriage_type: int | None = None dsp_version: int | None = None @dataclass class DeviceData(RoborockBase): device: HomeDataDevice model: str host: str | None = None @property def product_nickname(self) -> RoborockProductNickname: return SHORT_MODEL_TO_ENUM.get(self.model.split(".")[-1], RoborockProductNickname.PEARLPLUS) def __repr__(self) -> str: return _attr_repr(self) @dataclass class RoomMapping(RoborockBase): segment_id: int iot_id: str @dataclass class NamedRoomMapping(RoomMapping): """Dataclass representing a mapping of a room segment to a name. The name information is not provided by the device directly, but is provided from the HomeData based on the iot_id from the room. """ name: str """The human-readable name of the room, if available.""" @dataclass class CombinedMapInfo(RoborockBase): """Data structure for caching home information. This is not provided directly by the API, but is a combination of map data and room data to provide a more useful structure. """ map_flag: int """The map identifier.""" name: str """The name of the map from MultiMapsListMapInfo.""" rooms: list[NamedRoomMapping] """The list of rooms in the map.""" @dataclass class ChildLockStatus(RoborockBase): lock_status: int = 0 @dataclass class FlowLedStatus(RoborockBase): status: int = 0 @dataclass class LedStatus(RoborockBase): status: int = 0 @dataclass class BroadcastMessage(RoborockBase): duid: str ip: str version: bytes class ServerTimer(NamedTuple): id: str status: str dontknow: int @dataclass class RoborockProductStateValue(RoborockBase): value: list desc: dict @dataclass class RoborockProductState(RoborockBase): dps: int desc: dict value: list[RoborockProductStateValue] @dataclass class RoborockProductSpec(RoborockBase): state: RoborockProductState battery: dict | None = None dry_countdown: dict | None = None extra: dict | None = None offpeak: dict | None = None countdown: dict | None = None mode: dict | None = None ota_nfo: dict | None = None pause: dict | None = None program: dict | None = None shutdown: dict | None = None washing_left: dict | None = None @dataclass class RoborockProduct(RoborockBase): id: int | None = None name: str | None = None model: str | None = None packagename: str | None = None ssid: str | None = None picurl: str | None = None cardpicurl: str | None = None mediumCardpicurl: str | None = None resetwifipicurl: str | None = None configPicUrl: str | None = None pluginPicUrl: str | None = None resetwifitext: dict | None = None tuyaid: str | None = None status: int | None = None rriotid: str | None = None pictures: list | None = None ncMode: str | None = None scope: str | None = None product_tags: list | None = None agreements: list | None = None cardspec: str | None = None plugin_pic_url: str | None = None @property def product_nickname(self) -> RoborockProductNickname | None: if self.cardspec: return RoborockProductSpec.from_dict(json.loads(self.cardspec).get("data")) return None def __repr__(self) -> str: return _attr_repr(self) @dataclass class RoborockProductCategory(RoborockBase): id: int display_name: str icon_url: str @dataclass class RoborockCategoryDetail(RoborockBase): category: RoborockProductCategory product_list: list[RoborockProduct] @dataclass class ProductResponse(RoborockBase): category_detail_list: list[RoborockCategoryDetail] @dataclass class DyadProductInfo(RoborockBase): sn: str ssid: str timezone: str posix_timezone: str ip: str mac: str oba: dict @dataclass class DyadSndState(RoborockBase): sid_in_use: int sid_version: int location: str bom: str language: str @dataclass class DyadOtaNfo(RoborockBase): mqttOtaData: dict Python-roborock-python-roborock-32df4f3/roborock/device_features.py000066400000000000000000001033221507503702500257170ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass, field, fields from enum import IntEnum, StrEnum from typing import Any from .code_mappings import RoborockProductNickname from .containers import RoborockBase class NewFeatureStrBit(IntEnum): TWO_KEY_REAL_TIME_VIDEO = 32 TWO_KEY_RTV_IN_CHARGING = 33 DIRTY_REPLENISH_CLEAN = 34 AUTO_DELIVERY_FIELD_IN_GLOBAL_STATUS = 35 AVOID_COLLISION_MODE = 36 VOICE_CONTROL = 37 NEW_ENDPOINT = 38 PUMPING_WATER = 39 CORNER_MOP_STRETCH = 40 HOT_WASH_TOWEL = 41 FLOOR_DIR_CLEAN_ANY_TIME = 42 PET_SUPPLIES_DEEP_CLEAN = 43 MOP_SHAKE_WATER_MAX = 45 EXACT_CUSTOM_MODE = 47 VIDEO_PATROL = 48 CARPET_CUSTOM_CLEAN = 49 PET_SNAPSHOT = 50 CUSTOM_CLEAN_MODE_COUNT = 51 NEW_AI_RECOGNITION = 52 AUTO_COLLECTION_2 = 53 RIGHT_BRUSH_STRETCH = 54 SMART_CLEAN_MODE_SET = 55 DIRTY_OBJECT_DETECT = 56 NO_NEED_CARPET_PRESS_SET = 57 VOICE_CONTROL_LED = 58 WATER_LEAK_CHECK = 60 MIN_BATTERY_15_TO_CLEAN_TASK = 62 GAP_DEEP_CLEAN = 63 OBJECT_DETECT_CHECK = 64 IDENTIFY_ROOM = 66 MATTER = 67 WORKDAY_HOLIDAY = 69 CLEAN_DIRECT_STATUS = 70 MAP_ERASER = 71 OPTIMIZE_BATTERY = 72 ACTIVATE_VIDEO_CHARGING_AND_STANDBY = 73 CARPET_LONG_HAIRED = 75 CLEAN_HISTORY_TIME_LINE = 76 MAX_ZONE_OPENED = 77 EXHIBITION_FUNCTION = 78 LDS_LIFTING = 79 AUTO_TEAR_DOWN_MOP = 80 SMALL_SIDE_MOP = 81 SUPPORT_SIDE_BRUSH_UP_DOWN = 82 DRY_INTERVAL_TIMER = 83 UVC_STERILIZE = 84 MIDWAY_BACK_TO_DOCK = 85 SUPPORT_MAIN_BRUSH_UP_DOWN = 86 EGG_DANCE_MODE = 87 MECHANICAL_ARM_MODE = 89 TIDYUP_ZONES = MECHANICAL_ARM_MODE CLEAN_TIME_LINE = 91 CLEAN_THEN_MOP_MODE = 93 TYPE_IDENTIFY = 94 SUPPORT_GET_PARTICULAR_STATUS = 96 THREE_D_MAPPING_INNER_TEST = 97 SYNC_SERVER_NAME = 98 SHOULD_SHOW_ARM_OVER_LOAD = 99 COLLECT_DUST_COUNT_SHOW = 100 SUPPORT_API_APP_STOP_GRASP = 101 CTM_WITH_REPEAT = 102 SIDE_BRUSH_LIFT_CARPET = 104 DETECT_WIRE_CARPET = 105 WATER_SLIDE_MODE = 106 SOAK_AND_WASH = 107 CLEAN_EFFICIENCY = 108 BACK_WASH_NEW_SMART = 109 DUAL_BAND_WI_FI = 110 PROGRAM_MODE = 111 CLEAN_FLUID_DELIVERY = 112 CARPET_LONG_HAIRED_EX = 113 OVER_SEA_CTM = 114 FULL_DUPLES_SWITCH = 115 LOW_AREA_ACCESS = 116 FOLLOW_LOW_OBS = 117 TWO_GEARS_NO_COLLISION = 118 CARPET_SHAPE_TYPE = 119 SR_MAP = 120 class ProductFeatures(StrEnum): REMOTE_BACK = "remote_back" CLEANMODE_MAXPLUS = "cleanmode_maxplus" CLEANMODE_PURECLEANMOP = "cleanmode_purecleanmop" CLEANMODE_NONE_PURECLEANMOP_WITH_MAXPLUS = "cleanmode_none_purecleanmop_with_maxplus" MOP_ELECTRONIC_MODULE = "mop_electronic_module" MOP_SHAKE_MODULE = "mop_shake_module" MOP_SPIN_MODULE = "mop_spin_module" DEFAULT_MAP3D = "map3d" DEFAULT_CLEANMODECUSTOM = "custom_cleanmode" REALTIMEVIDEO = "realtimevideo" REALTIMEVIDEO_LIVECALL = "realtimevideo_livecall" REALTIMEVIDEO_RECORDANDSHORTCUT = "realtimevideo_livecall" CAMERA_SINGLELINE = "camera_singleline" CAMERA_DUALLINE = "camera_dualline" CAMERA_RGB = "camera_rgb" CAMERA_DOUBLERGB = "camera_doublergb" AIRECOGNITION_SETTING = "airecognition_setting" AIRECOGNITION_SCENE = "airecognition_scene" AIRECOGNITION_PET = "airecognition_pet" AIRECOGNITION_OBSTACLE = "airecognition_obstacle" # The following combinations are pulled directly from decompiled source code. AIRECOGNITION_OBSTACLE = [ProductFeatures.AIRECOGNITION_OBSTACLE] RGB_CAMERA_FEATURES = [ ProductFeatures.CAMERA_RGB, ProductFeatures.AIRECOGNITION_SETTING, ProductFeatures.AIRECOGNITION_SCENE, ProductFeatures.AIRECOGNITION_PET, ProductFeatures.AIRECOGNITION_OBSTACLE, ProductFeatures.REALTIMEVIDEO, ProductFeatures.REALTIMEVIDEO_LIVECALL, ProductFeatures.REALTIMEVIDEO_RECORDANDSHORTCUT, ] DOUBLE_RGB_CAMERA_FEATURES = [ ProductFeatures.CAMERA_DOUBLERGB, ProductFeatures.AIRECOGNITION_SETTING, ProductFeatures.AIRECOGNITION_PET, ProductFeatures.AIRECOGNITION_OBSTACLE, ProductFeatures.REALTIMEVIDEO, ] SINGLE_LINE_CAMERA_FEATURES = [ ProductFeatures.CAMERA_SINGLELINE, ProductFeatures.AIRECOGNITION_SETTING, ProductFeatures.AIRECOGNITION_OBSTACLE, ] DUAL_LINE_CAMERA_FEATURES = [ ProductFeatures.CAMERA_DUALLINE, ProductFeatures.AIRECOGNITION_SETTING, ProductFeatures.AIRECOGNITION_OBSTACLE, ProductFeatures.AIRECOGNITION_PET, ] NEW_DEFAULT_FEATURES = [ProductFeatures.REMOTE_BACK, ProductFeatures.CLEANMODE_MAXPLUS] PEARL_FEATURES = SINGLE_LINE_CAMERA_FEATURES + [ProductFeatures.CLEANMODE_MAXPLUS, ProductFeatures.MOP_SPIN_MODULE] PEARL_PLUS_FEATURES = NEW_DEFAULT_FEATURES + RGB_CAMERA_FEATURES + [ProductFeatures.MOP_SPIN_MODULE] ULTRON_FEATURES = NEW_DEFAULT_FEATURES + DUAL_LINE_CAMERA_FEATURES + [ProductFeatures.MOP_SHAKE_MODULE] ULTRONSV_FEATURES = NEW_DEFAULT_FEATURES + RGB_CAMERA_FEATURES + [ProductFeatures.MOP_SHAKE_MODULE] TANOSS_FEATURES = [ProductFeatures.REMOTE_BACK, ProductFeatures.MOP_SHAKE_MODULE] TOPAZSPOWER_FEATURES = [ProductFeatures.CLEANMODE_MAXPLUS, ProductFeatures.MOP_SHAKE_MODULE] PRODUCTS_WITHOUT_CUSTOM_CLEAN: set[RoborockProductNickname] = { RoborockProductNickname.TANOS, RoborockProductNickname.RUBYPLUS, RoborockProductNickname.RUBYSC, RoborockProductNickname.RUBYSE, } PRODUCTS_WITHOUT_DEFAULT_3D_MAP: set[RoborockProductNickname] = { RoborockProductNickname.TANOS, RoborockProductNickname.TANOSSPLUS, RoborockProductNickname.TANOSE, RoborockProductNickname.TANOSV, RoborockProductNickname.RUBYPLUS, RoborockProductNickname.RUBYSC, RoborockProductNickname.RUBYSE, } PRODUCTS_WITHOUT_PURE_CLEAN_MOP: set[RoborockProductNickname] = { RoborockProductNickname.TANOS, RoborockProductNickname.TANOSE, RoborockProductNickname.TANOSV, RoborockProductNickname.TANOSSLITE, RoborockProductNickname.TANOSSE, RoborockProductNickname.TANOSSC, RoborockProductNickname.ULTRONLITE, RoborockProductNickname.ULTRONE, RoborockProductNickname.RUBYPLUS, RoborockProductNickname.RUBYSLITE, RoborockProductNickname.RUBYSC, RoborockProductNickname.RUBYSE, } # Base map containing the initial, unconditional features for each product. _BASE_PRODUCT_FEATURE_MAP: dict[RoborockProductNickname, list[ProductFeatures]] = { RoborockProductNickname.PEARL: PEARL_FEATURES, RoborockProductNickname.PEARLS: PEARL_FEATURES, RoborockProductNickname.PEARLPLUS: PEARL_PLUS_FEATURES, RoborockProductNickname.VIVIAN: PEARL_PLUS_FEATURES, RoborockProductNickname.CORAL: PEARL_PLUS_FEATURES, RoborockProductNickname.ULTRON: ULTRON_FEATURES, RoborockProductNickname.ULTRONE: [ProductFeatures.CLEANMODE_NONE_PURECLEANMOP_WITH_MAXPLUS], RoborockProductNickname.ULTRONSV: ULTRONSV_FEATURES, RoborockProductNickname.TOPAZSPOWER: TOPAZSPOWER_FEATURES, RoborockProductNickname.TANOSS: TANOSS_FEATURES, RoborockProductNickname.PEARLC: PEARL_FEATURES, RoborockProductNickname.PEARLPLUSS: PEARL_PLUS_FEATURES, RoborockProductNickname.PEARLSLITE: PEARL_FEATURES, RoborockProductNickname.PEARLE: PEARL_FEATURES, RoborockProductNickname.PEARLELITE: PEARL_FEATURES, RoborockProductNickname.VIVIANC: [ProductFeatures.CLEANMODE_MAXPLUS, ProductFeatures.MOP_SPIN_MODULE] + SINGLE_LINE_CAMERA_FEATURES, RoborockProductNickname.CORALPRO: PEARL_PLUS_FEATURES, RoborockProductNickname.ULTRONLITE: SINGLE_LINE_CAMERA_FEATURES + [ProductFeatures.CLEANMODE_NONE_PURECLEANMOP_WITH_MAXPLUS, ProductFeatures.MOP_ELECTRONIC_MODULE], RoborockProductNickname.ULTRONSC: ULTRON_FEATURES, RoborockProductNickname.ULTRONSE: [ ProductFeatures.CLEANMODE_NONE_PURECLEANMOP_WITH_MAXPLUS, ProductFeatures.MOP_ELECTRONIC_MODULE, ], RoborockProductNickname.ULTRONSPLUS: ULTRON_FEATURES, RoborockProductNickname.VERDELITE: ULTRONSV_FEATURES, RoborockProductNickname.TOPAZS: [ProductFeatures.REMOTE_BACK, ProductFeatures.MOP_SHAKE_MODULE], RoborockProductNickname.TOPAZSPLUS: NEW_DEFAULT_FEATURES + DUAL_LINE_CAMERA_FEATURES + [ProductFeatures.MOP_SHAKE_MODULE], RoborockProductNickname.TOPAZSC: TOPAZSPOWER_FEATURES + SINGLE_LINE_CAMERA_FEATURES, RoborockProductNickname.TOPAZSV: NEW_DEFAULT_FEATURES + RGB_CAMERA_FEATURES + [ProductFeatures.MOP_SHAKE_MODULE], RoborockProductNickname.TANOSSPLUS: TANOSS_FEATURES + DUAL_LINE_CAMERA_FEATURES, RoborockProductNickname.TANOSSLITE: [ProductFeatures.MOP_ELECTRONIC_MODULE], RoborockProductNickname.TANOSSC: [], RoborockProductNickname.TANOSSE: [], RoborockProductNickname.TANOSSMAX: NEW_DEFAULT_FEATURES + DUAL_LINE_CAMERA_FEATURES + [ProductFeatures.MOP_SHAKE_MODULE], RoborockProductNickname.TANOS: [ProductFeatures.REMOTE_BACK], RoborockProductNickname.TANOSE: [ProductFeatures.MOP_ELECTRONIC_MODULE, ProductFeatures.REMOTE_BACK], RoborockProductNickname.TANOSV: DOUBLE_RGB_CAMERA_FEATURES + [ProductFeatures.REMOTE_BACK, ProductFeatures.MOP_ELECTRONIC_MODULE], RoborockProductNickname.RUBYPLUS: [], RoborockProductNickname.RUBYSC: [], RoborockProductNickname.RUBYSE: [], RoborockProductNickname.RUBYSLITE: [ProductFeatures.MOP_ELECTRONIC_MODULE], } PRODUCT_FEATURE_MAP: dict[RoborockProductNickname, list[ProductFeatures]] = { product: ( features + ([ProductFeatures.DEFAULT_CLEANMODECUSTOM] if product not in PRODUCTS_WITHOUT_CUSTOM_CLEAN else []) + ([ProductFeatures.DEFAULT_MAP3D] if product not in PRODUCTS_WITHOUT_DEFAULT_3D_MAP else []) + ([ProductFeatures.CLEANMODE_PURECLEANMOP] if product not in PRODUCTS_WITHOUT_PURE_CLEAN_MOP else []) ) for product, features in _BASE_PRODUCT_FEATURE_MAP.items() } @dataclass class DeviceFeatures(RoborockBase): """Represents the features supported by a Roborock device.""" # Features from robot_new_features (lower 32 bits) is_show_clean_finish_reason_supported: bool = field(metadata={"robot_new_features": 1}) is_re_segment_supported: bool = field(metadata={"robot_new_features": 4}) is_video_monitor_supported: bool = field(metadata={"robot_new_features": 8}) is_any_state_transit_goto_supported: bool = field(metadata={"robot_new_features": 16}) is_fw_filter_obstacle_supported: bool = field(metadata={"robot_new_features": 32}) is_video_setting_supported: bool = field(metadata={"robot_new_features": 64}) is_ignore_unknown_map_object_supported: bool = field(metadata={"robot_new_features": 128}) is_set_child_supported: bool = field(metadata={"robot_new_features": 256}) is_carpet_supported: bool = field(metadata={"robot_new_features": 512}) is_record_allowed: bool = field(metadata={"robot_new_features": 1024}) is_mop_path_supported: bool = field(metadata={"robot_new_features": 2048}) is_multi_map_segment_timer_supported: bool = field(metadata={"robot_new_features": 4096}) is_current_map_restore_enabled: bool = field(metadata={"robot_new_features": 8192}) is_room_name_supported: bool = field(metadata={"robot_new_features": 16384}) is_shake_mop_set_supported: bool = field(metadata={"robot_new_features": 262144}) is_map_beautify_internal_debug_supported: bool = field(metadata={"robot_new_features": 2097152}) is_new_data_for_clean_history: bool = field(metadata={"robot_new_features": 4194304}) is_new_data_for_clean_history_detail: bool = field(metadata={"robot_new_features": 8388608}) is_flow_led_setting_supported: bool = field(metadata={"robot_new_features": 16777216}) is_dust_collection_setting_supported: bool = field(metadata={"robot_new_features": 33554432}) is_rpc_retry_supported: bool = field(metadata={"robot_new_features": 67108864}) is_avoid_collision_supported: bool = field(metadata={"robot_new_features": 134217728}) is_support_set_switch_map_mode: bool = field(metadata={"robot_new_features": 268435456}) is_map_carpet_add_support: bool = field(metadata={"robot_new_features": 1073741824}) is_custom_water_box_distance_supported: bool = field(metadata={"robot_new_features": 2147483648}) # Features from robot_new_features (upper 32 bits) is_support_smart_scene: bool = field(metadata={"upper_32_bits": 1}) is_support_floor_edit: bool = field(metadata={"upper_32_bits": 3}) is_support_furniture: bool = field(metadata={"upper_32_bits": 4}) is_wash_then_charge_cmd_supported: bool = field(metadata={"upper_32_bits": 5}) is_support_room_tag: bool = field(metadata={"upper_32_bits": 6}) is_support_quick_map_builder: bool = field(metadata={"upper_32_bits": 7}) is_support_smart_global_clean_with_custom_mode: bool = field(metadata={"upper_32_bits": 8}) is_careful_slow_mop_supported: bool = field(metadata={"upper_32_bits": 9}) is_egg_mode_supported_from_new_features: bool = field(metadata={"upper_32_bits": 10}) is_carpet_show_on_map: bool = field(metadata={"upper_32_bits": 12}) is_supported_valley_electricity: bool = field(metadata={"upper_32_bits": 13}) is_unsave_map_reason_supported: bool = field(metadata={"upper_32_bits": 14}) is_supported_drying: bool = field(metadata={"upper_32_bits": 15}) is_supported_download_test_voice: bool = field(metadata={"upper_32_bits": 16}) is_support_backup_map: bool = field(metadata={"upper_32_bits": 17}) is_support_custom_mode_in_cleaning: bool = field(metadata={"upper_32_bits": 18}) is_support_remote_control_in_call: bool = field(metadata={"upper_32_bits": 19}) # Features from new_feature_info_str (masking last 8 chars / 32 bits) is_support_set_volume_in_call: bool = field(metadata={"new_feature_str_mask": (1, 8)}) is_support_clean_estimate: bool = field(metadata={"new_feature_str_mask": (2, 8)}) is_support_custom_dnd: bool = field(metadata={"new_feature_str_mask": (4, 8)}) is_carpet_deep_clean_supported: bool = field(metadata={"new_feature_str_mask": (8, 8)}) is_support_stuck_zone: bool = field(metadata={"new_feature_str_mask": (16, 8)}) is_support_custom_door_sill: bool = field(metadata={"new_feature_str_mask": (32, 8)}) is_wifi_manage_supported: bool = field(metadata={"new_feature_str_mask": (128, 8)}) is_clean_route_fast_mode_supported: bool = field(metadata={"new_feature_str_mask": (256, 8)}) is_support_cliff_zone: bool = field(metadata={"new_feature_str_mask": (512, 8)}) is_support_smart_door_sill: bool = field(metadata={"new_feature_str_mask": (1024, 8)}) is_support_floor_direction: bool = field(metadata={"new_feature_str_mask": (2048, 8)}) is_back_charge_auto_wash_supported: bool = field(metadata={"new_feature_str_mask": (4096, 8)}) is_support_incremental_map: bool = field(metadata={"new_feature_str_mask": (4194304, 8)}) is_offline_map_supported: bool = field(metadata={"new_feature_str_mask": (16384, 8)}) is_super_deep_wash_supported: bool = field(metadata={"new_feature_str_mask": (32768, 8)}) is_ces2022_supported: bool = field(metadata={"new_feature_str_mask": (65536, 8)}) is_dss_believable: bool = field(metadata={"new_feature_str_mask": (131072, 8)}) is_main_brush_up_down_supported_from_str: bool = field(metadata={"new_feature_str_mask": (262144, 8)}) is_goto_pure_clean_path_supported: bool = field(metadata={"new_feature_str_mask": (524288, 8)}) is_water_up_down_drain_supported: bool = field(metadata={"new_feature_str_mask": (1048576, 8)}) is_setting_carpet_first_supported: bool = field(metadata={"new_feature_str_mask": (8388608, 8)}) is_clean_route_deep_slow_plus_supported: bool = field(metadata={"new_feature_str_mask": (16777216, 8)}) is_dynamically_skip_clean_zone_supported: bool = field(metadata={"new_feature_str_mask": (33554432, 8)}) is_dynamically_add_clean_zones_supported: bool = field(metadata={"new_feature_str_mask": (67108864, 8)}) is_left_water_drain_supported: bool = field(metadata={"new_feature_str_mask": (134217728, 8)}) is_clean_count_setting_supported: bool = field(metadata={"new_feature_str_mask": (1073741824, 8)}) is_corner_clean_mode_supported: bool = field(metadata={"new_feature_str_mask": (2147483648, 8)}) # Features from new_feature_info_str (by bit index) is_two_key_real_time_video_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.TWO_KEY_REAL_TIME_VIDEO} ) is_two_key_rtv_in_charging_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.TWO_KEY_RTV_IN_CHARGING} ) is_dirty_replenish_clean_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.DIRTY_REPLENISH_CLEAN} ) is_auto_delivery_field_in_global_status_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.AUTO_DELIVERY_FIELD_IN_GLOBAL_STATUS} ) is_avoid_collision_mode_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.AVOID_COLLISION_MODE} ) is_voice_control_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.VOICE_CONTROL}) is_new_endpoint_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.NEW_ENDPOINT}) is_pumping_water_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.PUMPING_WATER}) is_corner_mop_stretch_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.CORNER_MOP_STRETCH}) is_hot_wash_towel_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.HOT_WASH_TOWEL}) is_floor_dir_clean_any_time_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.FLOOR_DIR_CLEAN_ANY_TIME} ) is_pet_supplies_deep_clean_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.PET_SUPPLIES_DEEP_CLEAN} ) is_mop_shake_water_max_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.MOP_SHAKE_WATER_MAX} ) is_exact_custom_mode_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.EXACT_CUSTOM_MODE}) is_video_patrol_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.VIDEO_PATROL}) is_carpet_custom_clean_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.CARPET_CUSTOM_CLEAN} ) is_pet_snapshot_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.PET_SNAPSHOT}) is_custom_clean_mode_count_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.CUSTOM_CLEAN_MODE_COUNT} ) is_new_ai_recognition_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.NEW_AI_RECOGNITION}) is_auto_collection_2_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.AUTO_COLLECTION_2}) is_right_brush_stretch_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.RIGHT_BRUSH_STRETCH} ) is_smart_clean_mode_set_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.SMART_CLEAN_MODE_SET} ) is_dirty_object_detect_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.DIRTY_OBJECT_DETECT} ) is_no_need_carpet_press_set_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.NO_NEED_CARPET_PRESS_SET} ) is_voice_control_led_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.VOICE_CONTROL_LED}) is_water_leak_check_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.WATER_LEAK_CHECK}) is_min_battery_15_to_clean_task_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.MIN_BATTERY_15_TO_CLEAN_TASK} ) is_gap_deep_clean_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.GAP_DEEP_CLEAN}) is_object_detect_check_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.OBJECT_DETECT_CHECK} ) is_identify_room_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.IDENTIFY_ROOM}) is_matter_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.MATTER}) is_workday_holiday_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.WORKDAY_HOLIDAY}) is_clean_direct_status_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.CLEAN_DIRECT_STATUS} ) is_map_eraser_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.MAP_ERASER}) is_optimize_battery_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.OPTIMIZE_BATTERY}) is_activate_video_charging_and_standby_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.ACTIVATE_VIDEO_CHARGING_AND_STANDBY} ) is_carpet_long_haired_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.CARPET_LONG_HAIRED}) is_clean_history_time_line_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.CLEAN_HISTORY_TIME_LINE} ) is_max_zone_opened_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.MAX_ZONE_OPENED}) is_exhibition_function_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.EXHIBITION_FUNCTION} ) is_lds_lifting_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.LDS_LIFTING}) is_auto_tear_down_mop_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.AUTO_TEAR_DOWN_MOP}) is_small_side_mop_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.SMALL_SIDE_MOP}) is_support_side_brush_up_down_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.SUPPORT_SIDE_BRUSH_UP_DOWN} ) is_dry_interval_timer_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.DRY_INTERVAL_TIMER}) is_uvc_sterilize_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.UVC_STERILIZE}) is_midway_back_to_dock_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.MIDWAY_BACK_TO_DOCK} ) is_support_main_brush_up_down_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.SUPPORT_MAIN_BRUSH_UP_DOWN} ) is_egg_dance_mode_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.EGG_DANCE_MODE}) is_mechanical_arm_mode_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.MECHANICAL_ARM_MODE} ) is_tidyup_zones_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.TIDYUP_ZONES}) is_clean_time_line_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.CLEAN_TIME_LINE}) is_clean_then_mop_mode_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.CLEAN_THEN_MOP_MODE} ) is_type_identify_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.TYPE_IDENTIFY}) is_support_get_particular_status_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.SUPPORT_GET_PARTICULAR_STATUS} ) is_three_d_mapping_inner_test_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.THREE_D_MAPPING_INNER_TEST} ) is_sync_server_name_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.SYNC_SERVER_NAME}) is_should_show_arm_over_load_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.SHOULD_SHOW_ARM_OVER_LOAD} ) is_collect_dust_count_show_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.COLLECT_DUST_COUNT_SHOW} ) is_support_api_app_stop_grasp_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.SUPPORT_API_APP_STOP_GRASP} ) is_ctm_with_repeat_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.CTM_WITH_REPEAT}) is_side_brush_lift_carpet_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.SIDE_BRUSH_LIFT_CARPET} ) is_detect_wire_carpet_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.DETECT_WIRE_CARPET}) is_water_slide_mode_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.WATER_SLIDE_MODE}) is_soak_and_wash_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.SOAK_AND_WASH}) is_clean_efficiency_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.CLEAN_EFFICIENCY}) is_back_wash_new_smart_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.BACK_WASH_NEW_SMART} ) is_dual_band_wi_fi_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.DUAL_BAND_WI_FI}) is_program_mode_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.PROGRAM_MODE}) is_clean_fluid_delivery_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.CLEAN_FLUID_DELIVERY} ) is_carpet_long_haired_ex_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.CARPET_LONG_HAIRED_EX} ) is_over_sea_ctm_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.OVER_SEA_CTM}) is_full_duples_switch_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.FULL_DUPLES_SWITCH}) is_low_area_access_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.LOW_AREA_ACCESS}) is_follow_low_obs_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.FOLLOW_LOW_OBS}) is_two_gears_no_collision_supported: bool = field( metadata={"new_feature_str_bit": NewFeatureStrBit.TWO_GEARS_NO_COLLISION} ) is_carpet_shape_type_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.CARPET_SHAPE_TYPE}) is_sr_map_supported: bool = field(metadata={"new_feature_str_bit": NewFeatureStrBit.SR_MAP}) # Features from feature_info list is_led_status_switch_supported: bool = field(metadata={"robot_features": 119}) is_multi_floor_supported: bool = field(metadata={"robot_features": 120}) is_support_fetch_timer_summary: bool = field(metadata={"robot_features": 122}) is_order_clean_supported: bool = field(metadata={"robot_features": 123}) is_analysis_supported: bool = field(metadata={"robot_features": 124}) is_remote_supported: bool = field(metadata={"robot_features": 125}) is_support_voice_control_debug: bool = field(metadata={"robot_features": 130}) # Features from model whitelists/blacklists or other flags is_mop_forbidden_supported: bool = field( metadata={ "model_whitelist": [ RoborockProductNickname.TANOSV, RoborockProductNickname.TOPAZSV, RoborockProductNickname.TANOS, RoborockProductNickname.TANOSE, RoborockProductNickname.TANOSSLITE, RoborockProductNickname.TANOSS, RoborockProductNickname.TANOSSPLUS, RoborockProductNickname.TANOSSMAX, RoborockProductNickname.ULTRON, RoborockProductNickname.ULTRONLITE, RoborockProductNickname.PEARL, RoborockProductNickname.RUBYSLITE, ] } ) is_soft_clean_mode_supported: bool = field( metadata={ "model_whitelist": [ RoborockProductNickname.TANOSV, RoborockProductNickname.TANOSE, RoborockProductNickname.TANOS, ] } ) is_custom_mode_supported: bool = field(metadata={"model_blacklist": [RoborockProductNickname.TANOS]}) is_support_custom_carpet: bool = field(metadata={"model_whitelist": [RoborockProductNickname.ULTRONLITE]}) is_show_general_obstacle_supported: bool = field(metadata={"model_whitelist": [RoborockProductNickname.TANOSSPLUS]}) is_show_obstacle_photo_supported: bool = field( metadata={ "model_whitelist": [ RoborockProductNickname.TANOSSPLUS, RoborockProductNickname.TANOSSMAX, RoborockProductNickname.ULTRON, ] } ) is_rubber_brush_carpet_supported: bool = field(metadata={"model_whitelist": [RoborockProductNickname.ULTRONLITE]}) is_carpet_pressure_use_origin_paras_supported: bool = field( metadata={"model_whitelist": [RoborockProductNickname.ULTRONLITE]} ) is_support_mop_back_pwm_set: bool = field(metadata={"model_whitelist": [RoborockProductNickname.PEARL]}) is_collect_dust_mode_supported: bool = field(metadata={"model_blacklist": [RoborockProductNickname.PEARL]}) is_support_water_mode: bool = field( metadata={ "product_features": [ ProductFeatures.MOP_ELECTRONIC_MODULE, ProductFeatures.MOP_SHAKE_MODULE, ProductFeatures.MOP_SPIN_MODULE, ] } ) is_pure_clean_mop_supported: bool = field(metadata={"product_features": [ProductFeatures.CLEANMODE_PURECLEANMOP]}) is_new_remote_view_supported: bool = field(metadata={"product_features": [ProductFeatures.REMOTE_BACK]}) is_max_plus_mode_supported: bool = field(metadata={"product_features": [ProductFeatures.CLEANMODE_MAXPLUS]}) is_none_pure_clean_mop_with_max_plus: bool = field( metadata={"product_features": [ProductFeatures.CLEANMODE_NONE_PURECLEANMOP_WITH_MAXPLUS]} ) is_clean_route_setting_supported: bool = field( metadata={"product_features": [ProductFeatures.MOP_SHAKE_MODULE, ProductFeatures.MOP_SPIN_MODULE]} ) is_mop_shake_module_supported: bool = field(metadata={"product_features": [ProductFeatures.MOP_SHAKE_MODULE]}) is_customized_clean_supported: bool = field( metadata={"product_features": [ProductFeatures.MOP_SHAKE_MODULE, ProductFeatures.MOP_SPIN_MODULE]} ) @classmethod def from_feature_flags( cls, new_feature_info: int, new_feature_info_str: str, feature_info: list[int], product_nickname: RoborockProductNickname | None, ) -> DeviceFeatures: """Creates a DeviceFeatures instance from raw feature flags. :param new_feature_info: A int from get_init_status (sometimes can be found in homedata, but it is not always) :param new_feature_info_str: A hex string from get_init_status or home_data. :param feature_info: A list of ints from get_init_status :param product_nickname: The product nickname of the device.""" # For any future reverse engineerining: # RobotNewFeatures = new_feature_info # newFeatureInfoStr = new_feature_info_str # feature_info =robotFeatures kwargs: dict[str, Any] = {} for f in fields(cls): # Default all features to False. kwargs[f.name] = False if not f.metadata: continue if (mask := f.metadata.get("robot_new_features")) is not None: kwargs[f.name] = bool(mask & new_feature_info) elif (bit_index := f.metadata.get("upper_32_bits")) is not None: # Check bits in the upper 32-bit integer of new_feature_info if new_feature_info: kwargs[f.name] = bool(((new_feature_info >> 32) >> bit_index) & 1) elif (mask_info := f.metadata.get("new_feature_str_mask")) is not None: # Check bitmask against a slice of the hex string if new_feature_info_str: try: mask, slice_count = mask_info if len(new_feature_info_str) >= slice_count: last_chars = new_feature_info_str[-slice_count:] value = int(last_chars, 16) kwargs[f.name] = bool(mask & value) except (ValueError, IndexError): pass # Keep it False elif (bit := f.metadata.get("new_feature_str_bit")) is not None: # Check a specific bit in the hex string using its index if new_feature_info_str: try: # Bit index defines which character and which bit inside it to check char_index_from_end = 1 + bit.value // 4 if char_index_from_end <= len(new_feature_info_str): char_hex = new_feature_info_str[-char_index_from_end] nibble = int(char_hex, 16) bit_in_nibble = bit.value % 4 kwargs[f.name] = bool((nibble >> bit_in_nibble) & 1) except (ValueError, IndexError): pass # Keep it False elif (feature_id := f.metadata.get("robot_features")) is not None: kwargs[f.name] = feature_id in feature_info elif (whitelist := f.metadata.get("model_whitelist")) is not None: # If product_nickname is None, assume it is not in the whitelist kwargs[f.name] = product_nickname in whitelist or product_nickname is None elif (blacklist := f.metadata.get("model_blacklist")) is not None: # If product_nickname is None, assume it is not in the blacklist. if product_nickname is None: kwargs[f.name] = True else: kwargs[f.name] = product_nickname not in blacklist elif (product_features := f.metadata.get("product_features")) is not None: if product_nickname is not None: available_features = PRODUCT_FEATURE_MAP.get(product_nickname, []) if any(feat in available_features for feat in product_features): # type: ignore kwargs[f.name] = True return cls(**kwargs) def get_supported_features(self) -> list[str]: """Returns a list of supported features (Primarily used for logging purposes).""" return [k for k, v in vars(self).items() if v] Python-roborock-python-roborock-32df4f3/roborock/devices/000077500000000000000000000000001507503702500236315ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/devices/README.md000066400000000000000000000122721507503702500251140ustar00rootroot00000000000000# Roborock Device Discovery This page documents the full lifecycle of device discovery across Cloud and Network. ## Init account setup ### Login - Login can happen with either email and password or email and sending a code. We currently prefer email with sending a code -- however the roborock no longer supports this method of login. In the future we may want to migrate to password if this login method is no longer supported. - The Login API provides a `userData` object with information on connecting to the cloud APIs - This `rriot` data contains per-session information, unique each time you login. - This contains information used to connect to MQTT - You get an `-eu` suffix in the API URLs if you are in the eu and `-us` if you are in the us ## Home Data The `HomeData` includes information about the various devices in the home. We use `v3` and it is notable that if devices don't show up in the `home_data` response it is likely that a newer version of the API should be used. - `products`: This is a list of all of the products you have on your account. These objects are always the same (i.e. a s7 maxv is always the exact same.) - It only shows the products for devices available on your account - `devices` and `received_devices`: - These both share the same objects, but one is for devices that have been shared with you and one is those that are on your account. - The big things here are (MOST are static): - `duid`: A unique identifier for your device (this is always the same i think) - `name`: The name of the device in your app - `local_key`: The local key that is needed for encoding and decoding messages for the device. This stays the same unless someone sets their vacuum back up. - `pv`: the protocol version (i.e. 1.0 or A1 or B1) - `product_id`: The id of the product from the above products list. - `device_status`: An initial status for some of the data we care about, though this changes on each update. - `rooms`: The rooms in the home. - This changes if the user adds a new room or changes its name. - We have to combine this with the room numbers from `GET_ROOM_MAPPING` on the device - There is another REST request `get_rooms` that will do the same thing. - Note: If we cache home_data, we likely need to use `get_rooms` to get rooms fresh ## Device Connections ### MQTT connection - Initial device information must be obtained from MQTT - We typically set up the MQTT device connection before the local device connection. - The `NetworkingInfo` needs to be fetched to get additional information about connecting to the device: - e.g. Local IP Address - This networking info can be cached to reduce network calls - MQTT also is the only way to get the device Map - Incoming and outgoing messages are decoded/encoded using the device `local_key` - Otherwise all commands may be performed locally. ## Local connection - We can use the `ip` from the `NetworkingInfo` to find the device - The local connection is preferred to for improved latency and reducing load on the cloud servers to avoid rate limiting. - Connections are made using a normal TCP socket on port `58867` - Incoming and outgoing messages are decoded/encoded using the device `local_key` - Messages received on the stream may be partially received so we keep a running as messages are partially decoded ## Design ### Current API Issues - Complex Inheritance Hierarchy: Multiple inheritance with classes like RoborockMqttClientV1 inheriting from both RoborockMqttClient and RoborockClientV1 - Callback-Heavy Design: Heavy reliance on callbacks and listeners in RoborockClientV1.on_message_received and the ListenerModel system - Version Fragmentation: Separate v1 and A01 APIs with different patterns and abstractions - Mixed Concerns: Classes handle both communication protocols (MQTT/local) and device-specific logic - Complex Caching: The AttributeCache system with RepeatableTask adds complexity - Manual Connection Management: Users need to manually set up both MQTT and local clients as shown in the README example ## Design Changes - Prefer a single unfieid client that handles both MQTT and local connections internally. - Home and device discovery (fetching home data and device setup) will be behind a single API. - Asyncio First: Everything should be asyncio as much as possible, with fewer callbacks. - The clients should be working in terms of devices. We need to detect capabilities for each device and not expose details about API versions. - Reliability issues: The current Home Assistant integration has issues with reliability and needs to be simplified. It may be that there are bugs with the exception handling and it's too heavy the cloud APIs and could benefit from more seamless caching. ## Implementation Details - We don't really need to worry about backwards compatibility for the new set of APIs. - We'll have a `RoborockManager` responsible for managing the connections and getting devices. - Caching can be persisted to disk. The caller can implement the cache storage themselves, but we need to give them an API to do so. - Users don't really choose between cloud vs local. However, we will want to allow the caller to know if its using the locale connection so we can show a warnings. Python-roborock-python-roborock-32df4f3/roborock/devices/__init__.py000066400000000000000000000002351507503702500257420ustar00rootroot00000000000000"""The devices module provides functionality to discover Roborock devices on the network.""" __all__ = [ "device", "device_manager", "cache", ] Python-roborock-python-roborock-32df4f3/roborock/devices/a01_channel.py000066400000000000000000000060441507503702500262600ustar00rootroot00000000000000"""Thin wrapper around the MQTT channel for Roborock A01 devices.""" import asyncio import logging from typing import Any, overload from roborock.exceptions import RoborockException from roborock.protocols.a01_protocol import ( decode_rpc_response, encode_mqtt_payload, ) from roborock.roborock_message import ( RoborockDyadDataProtocol, RoborockMessage, RoborockZeoProtocol, ) from .mqtt_channel import MqttChannel _LOGGER = logging.getLogger(__name__) _TIMEOUT = 10.0 # Both RoborockDyadDataProtocol and RoborockZeoProtocol have the same # value for ID_QUERY _ID_QUERY = int(RoborockDyadDataProtocol.ID_QUERY) @overload async def send_decoded_command( mqtt_channel: MqttChannel, params: dict[RoborockDyadDataProtocol, Any], ) -> dict[RoborockDyadDataProtocol, Any]: ... @overload async def send_decoded_command( mqtt_channel: MqttChannel, params: dict[RoborockZeoProtocol, Any], ) -> dict[RoborockZeoProtocol, Any]: ... async def send_decoded_command( mqtt_channel: MqttChannel, params: dict[RoborockDyadDataProtocol, Any] | dict[RoborockZeoProtocol, Any], ) -> dict[RoborockDyadDataProtocol, Any] | dict[RoborockZeoProtocol, Any]: """Send a command on the MQTT channel and get a decoded response.""" _LOGGER.debug("Sending MQTT command: %s", params) roborock_message = encode_mqtt_payload(params) # For commands that set values: send the command and do not # block waiting for a response. Queries are handled below. param_values = {int(k): v for k, v in params.items()} if not (query_values := param_values.get(_ID_QUERY)): await mqtt_channel.publish(roborock_message) return {} # Merge any results together than contain the requested data. This # does not use a future since it needs to merge results across responses. # This could be simplified if we can assume there is a single response. finished = asyncio.Event() result: dict[int, Any] = {} def find_response(response_message: RoborockMessage) -> None: """Handle incoming messages and resolve the future.""" try: decoded = decode_rpc_response(response_message) except RoborockException as ex: _LOGGER.info("Failed to decode a01 message: %s: %s", response_message, ex) return for key, value in decoded.items(): if key in query_values: result[key] = value if len(result) != len(query_values): _LOGGER.debug("Incomplete query response: %s != %s", result, query_values) return _LOGGER.debug("Received query response: %s", result) if not finished.is_set(): finished.set() unsub = await mqtt_channel.subscribe(find_response) try: await mqtt_channel.publish(roborock_message) try: await asyncio.wait_for(finished.wait(), timeout=_TIMEOUT) except TimeoutError as ex: raise RoborockException(f"Command timed out after {_TIMEOUT}s") from ex finally: unsub() return result # type: ignore[return-value] Python-roborock-python-roborock-32df4f3/roborock/devices/b01_channel.py000066400000000000000000000012571507503702500262620ustar00rootroot00000000000000"""Thin wrapper around the MQTT channel for Roborock B01 devices.""" from __future__ import annotations import logging from roborock.protocols.b01_protocol import ( CommandType, ParamsType, encode_mqtt_payload, ) from .mqtt_channel import MqttChannel _LOGGER = logging.getLogger(__name__) async def send_decoded_command( mqtt_channel: MqttChannel, dps: int, command: CommandType, params: ParamsType, ) -> None: """Send a command on the MQTT channel and get a decoded response.""" _LOGGER.debug("Sending MQTT command: %s", params) roborock_message = encode_mqtt_payload(dps, command, params) await mqtt_channel.publish(roborock_message) Python-roborock-python-roborock-32df4f3/roborock/devices/cache.py000066400000000000000000000033351507503702500252520ustar00rootroot00000000000000"""This module provides caching functionality for the Roborock device management system. This module defines a cache interface that you may use to cache device information to avoid unnecessary API calls. Callers may implement this interface to provide their own caching mechanism. """ from dataclasses import dataclass, field from typing import Protocol from roborock.containers import CombinedMapInfo, HomeData, NetworkInfo from roborock.device_features import DeviceFeatures @dataclass class CacheData: """Data structure for caching device information.""" home_data: HomeData | None = None """Home data containing device and product information.""" network_info: dict[str, NetworkInfo] = field(default_factory=dict) """Network information indexed by device DUID.""" home_cache: dict[int, CombinedMapInfo] = field(default_factory=dict) """Home cache information indexed by map_flag.""" device_features: DeviceFeatures | None = None """Device features information.""" class Cache(Protocol): """Protocol for a cache that can store and retrieve values.""" async def get(self) -> CacheData: """Get cached value.""" ... async def set(self, value: CacheData) -> None: """Set value in the cache.""" ... class InMemoryCache(Cache): """In-memory cache implementation.""" def __init__(self): self._data = CacheData() async def get(self) -> CacheData: return self._data async def set(self, value: CacheData) -> None: self._data = value class NoCache(Cache): """No-op cache implementation.""" async def get(self) -> CacheData: return CacheData() async def set(self, value: CacheData) -> None: pass Python-roborock-python-roborock-32df4f3/roborock/devices/channel.py000066400000000000000000000017251507503702500256200ustar00rootroot00000000000000"""Low-level interface for connections to Roborock devices.""" import logging from collections.abc import Callable from typing import Protocol from roborock.roborock_message import RoborockMessage _LOGGER = logging.getLogger(__name__) class Channel(Protocol): """A generic channel for establishing a connection with a Roborock device. Individual channel implementations have their own methods for speaking to the device that hide some of the protocol specific complexity, but they are still specialized for the device type and protocol. """ @property def is_connected(self) -> bool: """Return true if the channel is connected.""" ... @property def is_local_connected(self) -> bool: """Return true if the channel is connected locally.""" ... async def subscribe(self, callback: Callable[[RoborockMessage], None]) -> Callable[[], None]: """Subscribe to messages from the device.""" ... Python-roborock-python-roborock-32df4f3/roborock/devices/device.py000066400000000000000000000071461507503702500254520ustar00rootroot00000000000000"""Module for Roborock devices. This interface is experimental and subject to breaking changes without notice until the API is stable. """ import logging from abc import ABC from collections.abc import Callable from roborock.containers import HomeDataDevice, HomeDataProduct from roborock.roborock_message import RoborockMessage from .channel import Channel from .traits import Trait from .traits.traits_mixin import TraitsMixin _LOGGER = logging.getLogger(__name__) __all__ = [ "RoborockDevice", ] class RoborockDevice(ABC, TraitsMixin): """A generic channel for establishing a connection with a Roborock device. Individual channel implementations have their own methods for speaking to the device that hide some of the protocol specific complexity, but they are still specialized for the device type and protocol. Attributes of the device are exposed through traits, which are mixed in through the TraitsMixin class. Traits are optional and may not be present on all devices. """ def __init__( self, device_info: HomeDataDevice, product: HomeDataProduct, channel: Channel, trait: Trait, ) -> None: """Initialize the RoborockDevice. The device takes ownership of the channel for communication with the device. Use `connect()` to establish the connection, which will set up the appropriate protocol channel. Use `close()` to clean up all connections. """ TraitsMixin.__init__(self, trait) self._duid = device_info.duid self._name = device_info.name self._device_info = device_info self._product = product self._channel = channel self._unsub: Callable[[], None] | None = None @property def duid(self) -> str: """Return the device unique identifier (DUID).""" return self._duid @property def name(self) -> str: """Return the device name.""" return self._name @property def device_info(self) -> HomeDataDevice: """Return the device information. This includes information specific to the device like its identifier or firmware version. """ return self._device_info @property def product(self) -> HomeDataProduct: """Return the device product name. This returns product level information such as the model name. """ return self._product @property def is_connected(self) -> bool: """Return whether the device is connected.""" return self._channel.is_connected @property def is_local_connected(self) -> bool: """Return whether the device is connected locally. This can be used to determine if the device is reachable over a local network connection, as opposed to a cloud connection. This is useful for adjusting behavior like polling frequency. """ return self._channel.is_local_connected async def connect(self) -> None: """Connect to the device using the appropriate protocol channel.""" if self._unsub: raise ValueError("Already connected to the device") self._unsub = await self._channel.subscribe(self._on_message) _LOGGER.info("Connected to V1 device %s", self.name) async def close(self) -> None: """Close all connections to the device.""" if self._unsub: self._unsub() self._unsub = None def _on_message(self, message: RoborockMessage) -> None: """Handle incoming messages from the device.""" _LOGGER.debug("Received message from device: %s", message) Python-roborock-python-roborock-32df4f3/roborock/devices/device_manager.py000066400000000000000000000143451507503702500271430ustar00rootroot00000000000000"""Module for discovering Roborock devices.""" import asyncio import enum import logging from collections.abc import Awaitable, Callable import aiohttp from roborock.containers import ( HomeData, HomeDataDevice, HomeDataProduct, UserData, ) from roborock.devices.device import RoborockDevice from roborock.map.map_parser import MapParserConfig from roborock.mqtt.roborock_session import create_lazy_mqtt_session from roborock.mqtt.session import MqttSession from roborock.protocol import create_mqtt_params from roborock.web_api import RoborockApiClient from .cache import Cache, NoCache from .channel import Channel from .mqtt_channel import create_mqtt_channel from .traits import Trait, a01, b01, v1 from .v1_channel import create_v1_channel _LOGGER = logging.getLogger(__name__) __all__ = [ "create_device_manager", "create_home_data_api", "DeviceManager", ] HomeDataApi = Callable[[], Awaitable[HomeData]] DeviceCreator = Callable[[HomeData, HomeDataDevice, HomeDataProduct], RoborockDevice] class DeviceVersion(enum.StrEnum): """Enum for device versions.""" V1 = "1.0" A01 = "A01" B01 = "B01" UNKNOWN = "unknown" class DeviceManager: """Central manager for Roborock device discovery and connections.""" def __init__( self, home_data_api: HomeDataApi, device_creator: DeviceCreator, mqtt_session: MqttSession, cache: Cache, ) -> None: """Initialize the DeviceManager with user data and optional cache storage. This takes ownership of the MQTT session and will close it when the manager is closed. """ self._home_data_api = home_data_api self._cache = cache self._device_creator = device_creator self._devices: dict[str, RoborockDevice] = {} self._mqtt_session = mqtt_session async def discover_devices(self) -> list[RoborockDevice]: """Discover all devices for the logged-in user.""" cache_data = await self._cache.get() if not cache_data.home_data: _LOGGER.debug("No cached home data found, fetching from API") cache_data.home_data = await self._home_data_api() await self._cache.set(cache_data) home_data = cache_data.home_data device_products = home_data.device_products _LOGGER.debug("Discovered %d devices %s", len(device_products), home_data) # These are connected serially to avoid overwhelming the MQTT broker new_devices = {} for duid, (device, product) in device_products.items(): if duid in self._devices: continue new_device = self._device_creator(home_data, device, product) await new_device.connect() new_devices[duid] = new_device self._devices.update(new_devices) return list(self._devices.values()) async def get_device(self, duid: str) -> RoborockDevice | None: """Get a specific device by DUID.""" return self._devices.get(duid) async def get_devices(self) -> list[RoborockDevice]: """Get all discovered devices.""" return list(self._devices.values()) async def close(self) -> None: """Close all MQTT connections and clean up resources.""" tasks = [device.close() for device in self._devices.values()] self._devices.clear() tasks.append(self._mqtt_session.close()) await asyncio.gather(*tasks) def create_home_data_api( email: str, user_data: UserData, base_url: str | None = None, session: aiohttp.ClientSession | None = None ) -> HomeDataApi: """Create a home data API wrapper. This function creates a wrapper around the Roborock API client to fetch home data for the user. """ # Note: This will auto discover the API base URL. This can be improved # by caching this next to `UserData` if needed to avoid unnecessary API calls. client = RoborockApiClient(username=email, base_url=base_url, session=session) return create_home_data_from_api_client(client, user_data) def create_home_data_from_api_client(client: RoborockApiClient, user_data: UserData) -> HomeDataApi: """Create a home data API wrapper from an existing API client.""" async def home_data_api() -> HomeData: return await client.get_home_data_v3(user_data) return home_data_api async def create_device_manager( user_data: UserData, home_data_api: HomeDataApi, cache: Cache | None = None, map_parser_config: MapParserConfig | None = None, ) -> DeviceManager: """Convenience function to create and initialize a DeviceManager. The Home Data is fetched using the provided home_data_api callable which is exposed this way to allow for swapping out other implementations to include caching or other optimizations. """ if cache is None: cache = NoCache() mqtt_params = create_mqtt_params(user_data.rriot) mqtt_session = await create_lazy_mqtt_session(mqtt_params) def device_creator(home_data: HomeData, device: HomeDataDevice, product: HomeDataProduct) -> RoborockDevice: channel: Channel trait: Trait match device.pv: case DeviceVersion.V1: channel = create_v1_channel(user_data, mqtt_params, mqtt_session, device, cache) trait = v1.create( product, home_data, channel.rpc_channel, channel.mqtt_rpc_channel, channel.map_rpc_channel, cache, map_parser_config=map_parser_config, ) case DeviceVersion.A01: channel = create_mqtt_channel(user_data, mqtt_params, mqtt_session, device) trait = a01.create(product, channel) case DeviceVersion.B01: channel = create_mqtt_channel(user_data, mqtt_params, mqtt_session, device) trait = b01.create(channel) case _: raise NotImplementedError(f"Device {device.name} has unsupported version {device.pv}") return RoborockDevice(device, product, channel, trait) manager = DeviceManager(home_data_api, device_creator, mqtt_session=mqtt_session, cache=cache) await manager.discover_devices() return manager Python-roborock-python-roborock-32df4f3/roborock/devices/local_channel.py000066400000000000000000000121611507503702500267660ustar00rootroot00000000000000"""Module for communicating with Roborock devices over a local network.""" import asyncio import logging from collections.abc import Callable from dataclasses import dataclass from roborock.callbacks import CallbackList, decoder_callback from roborock.exceptions import RoborockConnectionException, RoborockException from roborock.protocol import Decoder, Encoder, create_local_decoder, create_local_encoder from roborock.roborock_message import RoborockMessage from .channel import Channel _LOGGER = logging.getLogger(__name__) _PORT = 58867 @dataclass class _LocalProtocol(asyncio.Protocol): """Callbacks for the Roborock local client transport.""" messages_cb: Callable[[bytes], None] connection_lost_cb: Callable[[Exception | None], None] def data_received(self, data: bytes) -> None: """Called when data is received from the transport.""" self.messages_cb(data) def connection_lost(self, exc: Exception | None) -> None: """Called when the transport connection is lost.""" self.connection_lost_cb(exc) class LocalChannel(Channel): """Simple RPC-style channel for communicating with a device over a local network. Handles request/response correlation and timeouts, but leaves message format most parsing to higher-level components. """ def __init__(self, host: str, local_key: str): self._host = host self._transport: asyncio.Transport | None = None self._protocol: _LocalProtocol | None = None self._subscribers: CallbackList[RoborockMessage] = CallbackList(_LOGGER) self._is_connected = False self._decoder: Decoder = create_local_decoder(local_key) self._encoder: Encoder = create_local_encoder(local_key) # Callback to decode messages and dispatch to subscribers self._data_received: Callable[[bytes], None] = decoder_callback(self._decoder, self._subscribers, _LOGGER) @property def is_connected(self) -> bool: """Check if the channel is currently connected.""" return self._is_connected @property def is_local_connected(self) -> bool: """Check if the channel is currently connected locally.""" return self._is_connected async def connect(self) -> None: """Connect to the device.""" if self._is_connected: _LOGGER.warning("Already connected") return _LOGGER.debug("Connecting to %s:%s", self._host, _PORT) loop = asyncio.get_running_loop() protocol = _LocalProtocol(self._data_received, self._connection_lost) try: self._transport, self._protocol = await loop.create_connection(lambda: protocol, self._host, _PORT) self._is_connected = True except OSError as e: raise RoborockConnectionException(f"Failed to connect to {self._host}:{_PORT}") from e def close(self) -> None: """Disconnect from the device.""" if self._transport: self._transport.close() else: _LOGGER.warning("Close called but transport is already None") self._transport = None self._is_connected = False def _connection_lost(self, exc: Exception | None) -> None: """Handle connection loss.""" _LOGGER.warning("Connection lost to %s", self._host, exc_info=exc) self._transport = None self._is_connected = False async def subscribe(self, callback: Callable[[RoborockMessage], None]) -> Callable[[], None]: """Subscribe to all messages from the device.""" return self._subscribers.add_callback(callback) async def publish(self, message: RoborockMessage) -> None: """Send a command message. The caller is responsible for associating the message with its response. """ if not self._transport or not self._is_connected: raise RoborockConnectionException("Not connected to device") try: encoded_msg = self._encoder(message) except Exception as err: _LOGGER.exception("Error encoding MQTT message: %s", err) raise RoborockException(f"Failed to encode MQTT message: {err}") from err try: self._transport.write(encoded_msg) except Exception as err: logging.exception("Uncaught error sending command") raise RoborockException(f"Failed to send message: {message}") from err # This module provides a factory function to create LocalChannel instances. # # TODO: Make a separate LocalSession and use it to manage retries with the host, # similar to how MqttSession works. For now this is a simple factory function # for creating channels. LocalSession = Callable[[str], LocalChannel] def create_local_session(local_key: str) -> LocalSession: """Creates a local session which can create local channels. This plays a role similar to the MqttSession but is really just a factory for creating LocalChannel instances with the same local key. """ def create_local_channel(host: str) -> LocalChannel: """Create a LocalChannel instance for the given host.""" return LocalChannel(host, local_key) return create_local_channel Python-roborock-python-roborock-32df4f3/roborock/devices/mqtt_channel.py000066400000000000000000000066651507503702500266750ustar00rootroot00000000000000"""Modules for communicating with specific Roborock devices over MQTT.""" import logging from collections.abc import Callable from roborock.callbacks import decoder_callback from roborock.containers import HomeDataDevice, RRiot, UserData from roborock.exceptions import RoborockException from roborock.mqtt.session import MqttParams, MqttSession, MqttSessionException from roborock.protocol import create_mqtt_decoder, create_mqtt_encoder from roborock.roborock_message import RoborockMessage from .channel import Channel _LOGGER = logging.getLogger(__name__) class MqttChannel(Channel): """Simple RPC-style channel for communicating with a device over MQTT. Handles request/response correlation and timeouts, but leaves message format most parsing to higher-level components. """ def __init__(self, mqtt_session: MqttSession, duid: str, local_key: str, rriot: RRiot, mqtt_params: MqttParams): self._mqtt_session = mqtt_session self._duid = duid self._local_key = local_key self._rriot = rriot self._mqtt_params = mqtt_params self._decoder = create_mqtt_decoder(local_key) self._encoder = create_mqtt_encoder(local_key) @property def is_connected(self) -> bool: """Return true if the channel is connected. This passes through the underlying MQTT session's connected state. """ return self._mqtt_session.connected @property def is_local_connected(self) -> bool: """Return true if the channel is connected locally.""" return False @property def _publish_topic(self) -> str: """Topic to send commands to the device.""" return f"rr/m/i/{self._rriot.u}/{self._mqtt_params.username}/{self._duid}" @property def _subscribe_topic(self) -> str: """Topic to receive responses from the device.""" return f"rr/m/o/{self._rriot.u}/{self._mqtt_params.username}/{self._duid}" async def subscribe(self, callback: Callable[[RoborockMessage], None]) -> Callable[[], None]: """Subscribe to the device's response topic. The callback will be called with the message payload when a message is received. Returns a callable that can be used to unsubscribe from the topic. """ dispatch = decoder_callback(self._decoder, callback, _LOGGER) return await self._mqtt_session.subscribe(self._subscribe_topic, dispatch) async def publish(self, message: RoborockMessage) -> None: """Publish a command message. The caller is responsible for handling any responses and associating them with the incoming request. """ try: encoded_msg = self._encoder(message) except Exception as e: _LOGGER.exception("Error encoding MQTT message: %s", e) raise RoborockException(f"Failed to encode MQTT message: {e}") from e try: return await self._mqtt_session.publish(self._publish_topic, encoded_msg) except MqttSessionException as e: _LOGGER.exception("Error publishing MQTT message: %s", e) raise RoborockException(f"Failed to publish MQTT message: {e}") from e def create_mqtt_channel( user_data: UserData, mqtt_params: MqttParams, mqtt_session: MqttSession, device: HomeDataDevice ) -> MqttChannel: """Create a V1Channel for the given device.""" return MqttChannel(mqtt_session, device.duid, device.local_key, user_data.rriot, mqtt_params) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/000077500000000000000000000000001507503702500251375ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/devices/traits/__init__.py000066400000000000000000000002761507503702500272550ustar00rootroot00000000000000"""Module for device traits.""" from abc import ABC __all__ = [ "Trait", "traits_mixin", "v1", "a01", "b01", ] class Trait(ABC): """Base class for all traits.""" Python-roborock-python-roborock-32df4f3/roborock/devices/traits/a01/000077500000000000000000000000001507503702500255205ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/devices/traits/a01/__init__.py000066400000000000000000000045771507503702500276460ustar00rootroot00000000000000from typing import Any from roborock.containers import HomeDataProduct, RoborockCategory from roborock.devices.a01_channel import send_decoded_command from roborock.devices.mqtt_channel import MqttChannel from roborock.devices.traits import Trait from roborock.roborock_message import RoborockDyadDataProtocol, RoborockZeoProtocol __init__ = [ "DyadApi", "ZeoApi", ] class DyadApi(Trait): """API for interacting with Dyad devices.""" def __init__(self, channel: MqttChannel) -> None: """Initialize the Dyad API.""" self._channel = channel async def query_values(self, protocols: list[RoborockDyadDataProtocol]) -> dict[RoborockDyadDataProtocol, Any]: """Query the device for the values of the given Dyad protocols.""" params = {RoborockDyadDataProtocol.ID_QUERY: [int(p) for p in protocols]} return await send_decoded_command(self._channel, params) async def set_value(self, protocol: RoborockDyadDataProtocol, value: Any) -> dict[RoborockDyadDataProtocol, Any]: """Set a value for a specific protocol on the device.""" params = {protocol: value} return await send_decoded_command(self._channel, params) class ZeoApi(Trait): """API for interacting with Zeo devices.""" name = "zeo" def __init__(self, channel: MqttChannel) -> None: """Initialize the Zeo API.""" self._channel = channel async def query_values(self, protocols: list[RoborockZeoProtocol]) -> dict[RoborockZeoProtocol, Any]: """Query the device for the values of the given protocols.""" params = {RoborockZeoProtocol.ID_QUERY: [int(p) for p in protocols]} return await send_decoded_command(self._channel, params) async def set_value(self, protocol: RoborockZeoProtocol, value: Any) -> dict[RoborockZeoProtocol, Any]: """Set a value for a specific protocol on the device.""" params = {protocol: value} return await send_decoded_command(self._channel, params) def create(product: HomeDataProduct, mqtt_channel: MqttChannel) -> DyadApi | ZeoApi: """Create traits for A01 devices.""" match product.category: case RoborockCategory.WET_DRY_VAC: return DyadApi(mqtt_channel) case RoborockCategory.WASHING_MACHINE: return ZeoApi(mqtt_channel) case _: raise NotImplementedError(f"Unsupported category {product.category}") Python-roborock-python-roborock-32df4f3/roborock/devices/traits/b01/000077500000000000000000000000001507503702500255215ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/devices/traits/b01/__init__.py000066400000000000000000000017231507503702500276350ustar00rootroot00000000000000"""Traits for B01 devices.""" from roborock import RoborockB01Methods from roborock.devices.b01_channel import send_decoded_command from roborock.devices.mqtt_channel import MqttChannel from roborock.devices.traits import Trait from roborock.roborock_message import RoborockB01Props __init__ = [ "create_b01_traits", "PropertiesApi", ] class PropertiesApi(Trait): """API for interacting with B01 devices.""" def __init__(self, channel: MqttChannel) -> None: """Initialize the B01Props API.""" self._channel = channel async def query_values(self, props: list[RoborockB01Props]) -> None: """Query the device for the values of the given Dyad protocols.""" await send_decoded_command( self._channel, dps=10000, command=RoborockB01Methods.GET_PROP, params={"property": props} ) def create(channel: MqttChannel) -> PropertiesApi: """Create traits for B01 devices.""" return PropertiesApi(channel) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/traits_mixin.py000066400000000000000000000036661507503702500302360ustar00rootroot00000000000000"""Holds device traits mixin and related code. This holds the TraitsMixin class, which is used to provide accessors for various device traits. Each trait is a class that encapsulates a specific set of functionality for a device, such as controlling a vacuum or a mop. The TraitsMixin holds traits across all protocol types. A trait is supported if it is non-None. """ from dataclasses import dataclass, fields from typing import get_args, get_origin from . import Trait, a01, b01, v1 __all__ = [ "TraitsMixin", ] @dataclass(init=False) class TraitsMixin: """Mixin to provide trait accessors.""" v1_properties: v1.PropertiesApi | None = None """V1 properties trait, if supported.""" dyad: a01.DyadApi | None = None """Dyad API, if supported.""" zeo: a01.ZeoApi | None = None """Zeo API, if supported.""" b01_properties: b01.PropertiesApi | None = None """B01 properties trait, if supported.""" def __init__(self, trait: Trait) -> None: """Initialize the TraitsMixin with the given trait. This will populate the appropriate trait attributes based on the types of the traits provided. """ for item in fields(self): trait_type = _get_trait_type(item) if trait_type is type(trait): setattr(self, item.name, trait) break def _get_trait_type(item) -> type[Trait]: """Get the trait type from a dataclass field.""" if get_origin(item.type) is None: raise ValueError(f"Trait {item.name} is not an optional type") if (args := get_args(item.type)) is None: raise ValueError(f"Trait {item.name} is not an optional type") if len(args) != 2 or args[1] is not type(None): raise ValueError(f"Trait {item.name} is not an optional type") trait_type = args[0] if not issubclass(trait_type, Trait): raise ValueError(f"Trait {item.name} is not a Trait subclass") return trait_type Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/000077500000000000000000000000001507503702500254655ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/__init__.py000066400000000000000000000145531507503702500276060ustar00rootroot00000000000000"""Create traits for V1 devices.""" import logging from dataclasses import dataclass, field, fields from typing import get_args from roborock.containers import HomeData, HomeDataProduct from roborock.devices.cache import Cache from roborock.devices.traits import Trait from roborock.devices.v1_rpc_channel import V1RpcChannel from roborock.map.map_parser import MapParserConfig from .child_lock import ChildLockTrait from .clean_summary import CleanSummaryTrait from .command import CommandTrait from .common import V1TraitMixin from .consumeable import ConsumableTrait from .device_features import DeviceFeaturesTrait from .do_not_disturb import DoNotDisturbTrait from .flow_led_status import FlowLedStatusTrait from .home import HomeTrait from .led_status import LedStatusTrait from .map_content import MapContentTrait from .maps import MapsTrait from .rooms import RoomsTrait from .status import StatusTrait from .valley_electricity_timer import ValleyElectricityTimerTrait from .volume import SoundVolumeTrait _LOGGER = logging.getLogger(__name__) __all__ = [ "create", "PropertiesApi", "StatusTrait", "DoNotDisturbTrait", "CleanSummaryTrait", "SoundVolumeTrait", "MapsTrait", "MapContentTrait", "ConsumableTrait", "HomeTrait", "DeviceFeaturesTrait", "CommandTrait", "ChildLockTrait", "FlowLedStatusTrait", "LedStatusTrait", "ValleyElectricityTimerTrait", ] @dataclass class PropertiesApi(Trait): """Common properties for V1 devices. This class holds all the traits that are common across all V1 devices. """ # All v1 devices have these traits status: StatusTrait command: CommandTrait dnd: DoNotDisturbTrait clean_summary: CleanSummaryTrait sound_volume: SoundVolumeTrait rooms: RoomsTrait maps: MapsTrait map_content: MapContentTrait consumables: ConsumableTrait home: HomeTrait device_features: DeviceFeaturesTrait # Optional features that may not be supported on all devices child_lock: ChildLockTrait | None = None led_status: LedStatusTrait | None = None flow_led_status: FlowLedStatusTrait | None = None valley_electricity_timer: ValleyElectricityTimerTrait | None = None def __init__( self, product: HomeDataProduct, home_data: HomeData, rpc_channel: V1RpcChannel, mqtt_rpc_channel: V1RpcChannel, map_rpc_channel: V1RpcChannel, cache: Cache, map_parser_config: MapParserConfig | None = None, ) -> None: """Initialize the V1TraitProps.""" self._rpc_channel = rpc_channel self._mqtt_rpc_channel = mqtt_rpc_channel self._map_rpc_channel = map_rpc_channel self.status = StatusTrait(product) self.rooms = RoomsTrait(home_data) self.maps = MapsTrait(self.status) self.map_content = MapContentTrait(map_parser_config) self.home = HomeTrait(self.status, self.maps, self.rooms, cache) self.device_features = DeviceFeaturesTrait(product.product_nickname, cache) # Dynamically create any traits that need to be populated for item in fields(self): if (trait := getattr(self, item.name, None)) is None: # We exclude optional features and them via discover_features if (union_args := get_args(item.type)) is None or len(union_args) > 0: continue _LOGGER.debug("Initializing trait %s", item.name) trait = item.type() setattr(self, item.name, trait) # This is a hack to allow setting the rpc_channel on all traits. This is # used so we can preserve the dataclass behavior when the values in the # traits are updated, but still want to allow them to have a reference # to the rpc channel for sending commands. trait._rpc_channel = self._get_rpc_channel(trait) def _get_rpc_channel(self, trait: V1TraitMixin) -> V1RpcChannel: # The decorator `@common.mqtt_rpc_channel` means that the trait needs # to use the mqtt_rpc_channel (cloud only) instead of the rpc_channel (adaptive) if hasattr(trait, "mqtt_rpc_channel"): return self._mqtt_rpc_channel elif hasattr(trait, "map_rpc_channel"): return self._map_rpc_channel else: return self._rpc_channel async def discover_features(self) -> None: """Populate any supported traits that were not initialized in __init__.""" await self.device_features.refresh() for item in fields(self): if (trait := getattr(self, item.name, None)) is not None: continue if (union_args := get_args(item.type)) is None: raise ValueError(f"Unexpected non-union type for trait {item.name}: {item.type}") if len(union_args) != 2 or type(None) not in union_args: raise ValueError(f"Unexpected non-optional type for trait {item.name}: {item.type}") # Union args may not be in declared order item_type = union_args[0] if union_args[1] is type(None) else union_args[1] trait = item_type() if not hasattr(trait, "requires_feature"): _LOGGER.debug("Trait missing required feature %s", item.name) continue _LOGGER.debug("Checking for feature %s", trait.requires_feature) is_supported = getattr(self.device_features, trait.requires_feature) # _LOGGER.debug("Device features: %s", self.device_features) if is_supported is None: raise ValueError(f"Device feature '{trait.requires_feature}' on trait '{item.name}' is unknown") if not is_supported: _LOGGER.debug("Disabling optional feature trait %s", item.name) continue _LOGGER.debug("Enabling optional feature trait %s", item.name) setattr(self, item.name, trait) trait._rpc_channel = self._get_rpc_channel(trait) def create( product: HomeDataProduct, home_data: HomeData, rpc_channel: V1RpcChannel, mqtt_rpc_channel: V1RpcChannel, map_rpc_channel: V1RpcChannel, cache: Cache, map_parser_config: MapParserConfig | None = None, ) -> PropertiesApi: """Create traits for V1 devices.""" return PropertiesApi(product, home_data, rpc_channel, mqtt_rpc_channel, map_rpc_channel, cache, map_parser_config) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/child_lock.py000066400000000000000000000016601507503702500301350ustar00rootroot00000000000000from roborock.containers import ChildLockStatus from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand _STATUS_PARAM = "lock_status" class ChildLockTrait(ChildLockStatus, common.V1TraitMixin, common.RoborockSwitchBase): """Trait for controlling the child lock of a Roborock device.""" command = RoborockCommand.GET_CHILD_LOCK_STATUS requires_feature = "is_set_child_supported" @property def is_on(self) -> bool: """Return whether the child lock is enabled.""" return self.lock_status == 1 async def enable(self) -> None: """Enable the child lock.""" await self.rpc_channel.send_command(RoborockCommand.SET_CHILD_LOCK_STATUS, params={_STATUS_PARAM: 1}) async def disable(self) -> None: """Disable the child lock.""" await self.rpc_channel.send_command(RoborockCommand.SET_CHILD_LOCK_STATUS, params={_STATUS_PARAM: 0}) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/clean_summary.py000066400000000000000000000021221507503702500306730ustar00rootroot00000000000000from typing import Self from roborock.containers import CleanSummary from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand from roborock.util import unpack_list class CleanSummaryTrait(CleanSummary, common.V1TraitMixin): """Trait for managing the clean summary of Roborock devices.""" command = RoborockCommand.GET_CLEAN_SUMMARY @classmethod def _parse_type_response(cls, response: common.V1ResponseData) -> Self: """Parse the response from the device into a CleanSummary.""" if isinstance(response, dict): return cls.from_dict(response) elif isinstance(response, list): clean_time, clean_area, clean_count, records = unpack_list(response, 4) return cls( clean_time=clean_time, clean_area=clean_area, clean_count=clean_count, records=records, ) elif isinstance(response, int): return cls(clean_time=response) raise ValueError(f"Unexpected clean summary format: {response!r}") Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/command.py000066400000000000000000000013051507503702500274540ustar00rootroot00000000000000from typing import Any from roborock import RoborockCommand class CommandTrait: """Trait for sending commands to Roborock devices.""" def __post_init__(self) -> None: """Post-initialization to set up the RPC channel. This is called automatically after the dataclass is initialized by the device setup code. """ self._rpc_channel = None async def send(self, command: RoborockCommand | str, params: dict[str, Any] | None = None) -> Any: """Send a command to the device.""" if not self._rpc_channel: raise ValueError("Device trait in invalid state") return await self._rpc_channel.send_command(command, params=params) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/common.py000066400000000000000000000136241507503702500273350ustar00rootroot00000000000000"""Module for Roborock V1 devices common trait commands. This is an internal library and should not be used directly by consumers. """ import logging from abc import ABC, abstractmethod from dataclasses import dataclass, fields from typing import ClassVar, Self from roborock.containers import RoborockBase from roborock.devices.v1_rpc_channel import V1RpcChannel from roborock.roborock_typing import RoborockCommand _LOGGER = logging.getLogger(__name__) V1ResponseData = dict | list | int | str @dataclass class V1TraitMixin(ABC): """Base model that supports v1 traits. This class provides functioanlity for parsing responses from V1 devices into dataclass instances. It also provides a reference to the V1RpcChannel used to communicate with the device to execute commands. Each trait subclass must define a class variable `command` that specifies the RoborockCommand used to fetch the trait data from the device. The `refresh()` method can be called to update the contents of the trait data from the device. A trait can also support additional commands for updating state associated with the trait. The traits typically subclass RoborockBase to provide serialization and deserialization functionality, but this is not strictly required. """ command: ClassVar[RoborockCommand] @classmethod def _parse_type_response(cls, response: V1ResponseData) -> RoborockBase: """Parse the response from the device into a a RoborockBase. Subclasses should override this method to implement custom parsing logic as needed. """ if not issubclass(cls, RoborockBase): raise NotImplementedError(f"Trait {cls} does not implement RoborockBase") # Subclasses can override to implement custom parsing logic if isinstance(response, list): response = response[0] if not isinstance(response, dict): raise ValueError(f"Unexpected {cls} response format: {response!r}") return cls.from_dict(response) def _parse_response(self, response: V1ResponseData) -> RoborockBase: """Parse the response from the device into a a RoborockBase. This is used by subclasses that want to override the class behavior with instance-specific data. """ return self._parse_type_response(response) def __post_init__(self) -> None: """Post-initialization to set up the RPC channel. This is called automatically after the dataclass is initialized by the device setup code. """ self._rpc_channel = None @property def rpc_channel(self) -> V1RpcChannel: """Helper for executing commands, used internally by the trait""" if not self._rpc_channel: raise ValueError("Device trait in invalid state") return self._rpc_channel async def refresh(self) -> Self: """Refresh the contents of this trait.""" response = await self.rpc_channel.send_command(self.command) new_data = self._parse_response(response) if not isinstance(new_data, RoborockBase): raise ValueError(f"Internal error, unexpected response type: {new_data!r}") self._update_trait_values(new_data) return self def _update_trait_values(self, new_data: RoborockBase) -> None: """Update the values of this trait from another instance.""" for field in fields(new_data): new_value = getattr(new_data, field.name, None) setattr(self, field.name, new_value) def _get_value_field(clazz: type[V1TraitMixin]) -> str: """Get the name of the field marked as the main value of the RoborockValueBase.""" value_fields = [field.name for field in fields(clazz) if field.metadata.get("roborock_value", False)] if len(value_fields) != 1: raise ValueError( f"RoborockValueBase subclass {clazz} must have exactly one field marked as roborock_value, " f" but found: {value_fields}" ) return value_fields[0] @dataclass(init=False, kw_only=True) class RoborockValueBase(V1TraitMixin, RoborockBase): """Base class for traits that represent a single value. This class is intended to be subclassed by traits that represent a single value, such as volume or brightness. The subclass should define a single field with the metadata `roborock_value=True` to indicate which field represents the main value of the trait. """ @classmethod def _parse_response(cls, response: V1ResponseData) -> Self: """Parse the response from the device into a RoborockValueBase.""" if isinstance(response, list): response = response[0] if not isinstance(response, int): raise ValueError(f"Unexpected response format: {response!r}") value_field = _get_value_field(cls) return cls(**{value_field: response}) class RoborockSwitchBase(ABC): """Base class for traits that represent a boolean switch.""" @property @abstractmethod def is_on(self) -> bool: """Return whether the switch is on.""" @abstractmethod async def enable(self) -> None: """Enable the switch.""" @abstractmethod async def disable(self) -> None: """Disable the switch.""" def mqtt_rpc_channel(cls): """Decorator to mark a function as cloud only. Normally a trait uses an adaptive rpc channel that can use either local or cloud communication depending on what is available. This will force the trait to always use the cloud rpc channel. """ def wrapper(*args, **kwargs): return cls(*args, **kwargs) cls.mqtt_rpc_channel = True # type: ignore[attr-defined] return wrapper def map_rpc_channel(cls): """Decorator to mark a function as cloud only using the map rpc format.""" def wrapper(*args, **kwargs): return cls(*args, **kwargs) cls.map_rpc_channel = True # type: ignore[attr-defined] return wrapper Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/consumeable.py000066400000000000000000000027431507503702500303420ustar00rootroot00000000000000"""Trait for managing consumable attributes. A consumable attribute is one that is expected to be replaced or refilled periodically, such as filters, brushes, etc. """ from enum import StrEnum from typing import Self from roborock.containers import Consumable from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand __all__ = [ "ConsumableTrait", ] class ConsumableAttribute(StrEnum): """Enum for consumable attributes.""" SENSOR_DIRTY_TIME = "sensor_dirty_time" FILTER_WORK_TIME = "filter_work_time" SIDE_BRUSH_WORK_TIME = "side_brush_work_time" MAIN_BRUSH_WORK_TIME = "main_brush_work_time" @classmethod def from_str(cls, value: str) -> Self: """Create a ConsumableAttribute from a string value.""" for member in cls: if member.value == value: return member raise ValueError(f"Unknown ConsumableAttribute: {value}") class ConsumableTrait(Consumable, common.V1TraitMixin): """Trait for managing consumable attributes on Roborock devices. After the first refresh, you can tell what consumables are supported by checking which attributes are not None. """ command = RoborockCommand.GET_CONSUMABLE async def reset_consumable(self, consumable: ConsumableAttribute) -> None: """Reset a specific consumable attribute on the device.""" await self.rpc_channel.send_command(RoborockCommand.RESET_CONSUMABLE, params=[consumable.value]) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/device_features.py000066400000000000000000000037621507503702500312040ustar00rootroot00000000000000from typing import Self from roborock import AppInitStatus, RoborockProductNickname from roborock.device_features import DeviceFeatures from roborock.devices.cache import Cache from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand class DeviceFeaturesTrait(DeviceFeatures, common.V1TraitMixin): """Trait for managing Do Not Disturb (DND) settings on Roborock devices.""" command = RoborockCommand.APP_GET_INIT_STATUS def __init__(self, product_nickname: RoborockProductNickname, cache: Cache) -> None: """Initialize MapContentTrait.""" self._nickname = product_nickname self._cache = cache async def refresh(self) -> Self: """Refresh the contents of this trait. This will use cached device features if available since they do not change often and this avoids unnecessary RPC calls. This would only ever change with a firmware update, so caching is appropriate. """ cache_data = await self._cache.get() if cache_data.device_features is not None: self._update_trait_values(cache_data.device_features) return self # Save cached device features device_features = await super().refresh() cache_data.device_features = device_features await self._cache.set(cache_data) return device_features def _parse_response(self, response: common.V1ResponseData) -> DeviceFeatures: """Parse the response from the device into a MapContentTrait instance.""" if not isinstance(response, list): raise ValueError(f"Unexpected AppInitStatus response format: {type(response)}") app_status = AppInitStatus.from_dict(response[0]) return DeviceFeatures.from_feature_flags( new_feature_info=app_status.new_feature_info, new_feature_info_str=app_status.new_feature_info_str, feature_info=app_status.feature_info, product_nickname=self._nickname, ) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/do_not_disturb.py000066400000000000000000000030711507503702500310560ustar00rootroot00000000000000from roborock.containers import DnDTimer from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand _ENABLED_PARAM = "enabled" class DoNotDisturbTrait(DnDTimer, common.V1TraitMixin, common.RoborockSwitchBase): """Trait for managing Do Not Disturb (DND) settings on Roborock devices.""" command = RoborockCommand.GET_DND_TIMER @property def is_on(self) -> bool: """Return whether the Do Not Disturb (DND) timer is enabled.""" return self.enabled == 1 async def set_dnd_timer(self, dnd_timer: DnDTimer) -> None: """Set the Do Not Disturb (DND) timer settings of the device.""" await self.rpc_channel.send_command(RoborockCommand.SET_DND_TIMER, params=dnd_timer.as_dict()) async def clear_dnd_timer(self) -> None: """Clear the Do Not Disturb (DND) timer settings of the device.""" await self.rpc_channel.send_command(RoborockCommand.CLOSE_DND_TIMER) async def enable(self) -> None: """Set the Do Not Disturb (DND) timer settings of the device.""" await self.rpc_channel.send_command( RoborockCommand.SET_DND_TIMER, params={ **self.as_dict(), _ENABLED_PARAM: 1, }, ) async def disable(self) -> None: """Set the Do Not Disturb (DND) timer settings of the device.""" await self.rpc_channel.send_command( RoborockCommand.SET_DND_TIMER, params={ **self.as_dict(), _ENABLED_PARAM: 0, }, ) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/flow_led_status.py000066400000000000000000000016731507503702500312440ustar00rootroot00000000000000from roborock.containers import FlowLedStatus from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand _STATUS_PARAM = "status" class FlowLedStatusTrait(FlowLedStatus, common.V1TraitMixin, common.RoborockSwitchBase): """Trait for controlling the Flow LED status of a Roborock device.""" command = RoborockCommand.GET_FLOW_LED_STATUS requires_feature = "is_flow_led_setting_supported" @property def is_on(self) -> bool: """Return whether the Flow LED status is enabled.""" return self.status == 1 async def enable(self) -> None: """Enable the Flow LED status.""" await self.rpc_channel.send_command(RoborockCommand.SET_FLOW_LED_STATUS, params={_STATUS_PARAM: 1}) async def disable(self) -> None: """Disable the Flow LED status.""" await self.rpc_channel.send_command(RoborockCommand.SET_FLOW_LED_STATUS, params={_STATUS_PARAM: 0}) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/home.py000066400000000000000000000163111507503702500267710ustar00rootroot00000000000000"""Trait that represents a full view of the home layout. This trait combines information about maps and rooms to provide a comprehensive view of the home layout, including room names and their corresponding segment on the map. It also makes it straight forward to fetch the map image and data. This trait depends on the MapsTrait and RoomsTrait to gather the necessary information. It provides properties to access the current map, the list of rooms with names, and the map image and data. """ import asyncio import logging from typing import Self from roborock.code_mappings import RoborockStateCode from roborock.containers import CombinedMapInfo, RoborockBase from roborock.devices.cache import Cache from roborock.devices.traits.v1 import common from roborock.exceptions import RoborockDeviceBusy, RoborockException from roborock.roborock_typing import RoborockCommand from .maps import MapsTrait from .rooms import RoomsTrait from .status import StatusTrait _LOGGER = logging.getLogger(__name__) MAP_SLEEP = 3 class HomeTrait(RoborockBase, common.V1TraitMixin): """Trait that represents a full view of the home layout.""" command = RoborockCommand.GET_MAP_V1 # This is not used def __init__( self, status_trait: StatusTrait, maps_trait: MapsTrait, rooms_trait: RoomsTrait, cache: Cache, ) -> None: """Initialize the HomeTrait. We keep track of the MapsTrait and RoomsTrait to provide a comprehensive view of the home layout. This also depends on the StatusTrait to determine the current map. See comments in MapsTrait for details on that dependency. The cache is used to store discovered home data to minimize map switching and improve performance. The cache should be persisted by the caller to ensure data is retained across restarts. After initial discovery, only information for the current map is refreshed to keep data up to date without excessive map switching. However, as users switch rooms, the current map's data will be updated to ensure accuracy. """ super().__init__() self._status_trait = status_trait self._maps_trait = maps_trait self._rooms_trait = rooms_trait self._cache = cache self._home_cache: dict[int, CombinedMapInfo] | None = None async def discover_home(self) -> None: """Iterate through all maps to discover rooms and cache them. This will be a no-op if the home cache is already populated. This cannot be called while the device is cleaning, as that would interrupt the cleaning process. This will raise `RoborockDeviceBusy` if the device is currently cleaning. After discovery, the home cache will be populated and can be accessed via the `home_cache` property. """ cache_data = await self._cache.get() if cache_data.home_cache: _LOGGER.debug("Home cache already populated, skipping discovery") self._home_cache = cache_data.home_cache return if self._status_trait.state == RoborockStateCode.cleaning: raise RoborockDeviceBusy("Cannot perform home discovery while the device is cleaning") await self._maps_trait.refresh() if self._maps_trait.current_map_info is None: raise RoborockException("Cannot perform home discovery without current map info") home_cache = await self._build_home_cache() _LOGGER.debug("Home discovery complete, caching data for %d maps", len(home_cache)) await self._update_home_cache(home_cache) async def _refresh_map_data(self, map_info) -> CombinedMapInfo: """Collect room data for a specific map and return CombinedMapInfo.""" await self._rooms_trait.refresh() return CombinedMapInfo( map_flag=map_info.map_flag, name=map_info.name, rooms=self._rooms_trait.rooms or [], ) async def _build_home_cache(self) -> dict[int, CombinedMapInfo]: """Perform the actual discovery and caching of home data.""" home_cache: dict[int, CombinedMapInfo] = {} # Sort map_info to process the current map last, reducing map switching. # False (non-original maps) sorts before True (original map). We ensure # we load the original map last. sorted_map_infos = sorted( self._maps_trait.map_info or [], key=lambda mi: mi.map_flag == self._maps_trait.current_map, reverse=False, ) _LOGGER.debug("Building home cache for maps: %s", [mi.map_flag for mi in sorted_map_infos]) for map_info in sorted_map_infos: # We need to load each map to get its room data if len(sorted_map_infos) > 1: _LOGGER.debug("Loading map %s", map_info.map_flag) await self._maps_trait.set_current_map(map_info.map_flag) await asyncio.sleep(MAP_SLEEP) map_data = await self._refresh_map_data(map_info) home_cache[map_info.map_flag] = map_data return home_cache async def refresh(self) -> Self: """Refresh current map's underlying map and room data, updating cache as needed. This will only refresh the current map's data and will not populate non active maps or re-discover the home. It is expected that this will keep information up to date for the current map as users switch to that map. """ if self._home_cache is None: raise RoborockException("Cannot refresh home data without home cache, did you call discover_home()?") # Refresh the list of map names/info await self._maps_trait.refresh() if (current_map_info := self._maps_trait.current_map_info) is None or ( map_flag := self._maps_trait.current_map ) is None: raise RoborockException("Cannot refresh home data without current map info") # Refresh the current map's room data current_map_data = self._home_cache.get(map_flag) if current_map_data: map_data = await self._refresh_map_data(current_map_info) if map_data != current_map_data: await self._update_home_cache({**self._home_cache, map_flag: map_data}) return self @property def home_cache(self) -> dict[int, CombinedMapInfo] | None: """Returns the map information for all cached maps.""" return self._home_cache @property def current_map_data(self) -> CombinedMapInfo | None: """Returns the map data for the current map.""" current_map_flag = self._maps_trait.current_map if current_map_flag is None or self._home_cache is None: return None return self._home_cache.get(current_map_flag) def _parse_response(self, response: common.V1ResponseData) -> Self: """This trait does not parse responses directly.""" raise NotImplementedError("HomeTrait does not support direct command responses") async def _update_home_cache(self, home_cache: dict[int, CombinedMapInfo]) -> None: cache_data = await self._cache.get() cache_data.home_cache = home_cache await self._cache.set(cache_data) self._home_cache = home_cache Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/led_status.py000066400000000000000000000027221507503702500302110ustar00rootroot00000000000000from roborock.containers import LedStatus from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand from .common import V1ResponseData class LedStatusTrait(LedStatus, common.V1TraitMixin, common.RoborockSwitchBase): """Trait for controlling the LED status of a Roborock device.""" command = RoborockCommand.GET_LED_STATUS requires_feature = "is_led_status_switch_supported" @property def is_on(self) -> bool: """Return whether the LED status is enabled.""" return self.status == 1 async def enable(self) -> None: """Enable the LED status.""" await self.rpc_channel.send_command(RoborockCommand.SET_LED_STATUS, params=[1]) async def disable(self) -> None: """Disable the LED status.""" await self.rpc_channel.send_command(RoborockCommand.SET_LED_STATUS, params=[0]) @classmethod def _parse_type_response(cls, response: V1ResponseData) -> LedStatus: """Parse the response from the device into a a RoborockBase. Subclasses should override this method to implement custom parsing logic as needed. """ if not isinstance(response, list): raise ValueError(f"Unexpected {cls} response format: {response!r}") response = response[0] if not isinstance(response, int): raise ValueError(f"Unexpected {cls} response format: {response!r}") return cls.from_dict({"status": response}) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/map_content.py000066400000000000000000000032331507503702500303470ustar00rootroot00000000000000"""Trait for fetching the map content from Roborock devices.""" import logging from dataclasses import dataclass from vacuum_map_parser_base.map_data import MapData from roborock.containers import RoborockBase from roborock.devices.traits.v1 import common from roborock.map.map_parser import MapParser, MapParserConfig from roborock.roborock_typing import RoborockCommand _LOGGER = logging.getLogger(__name__) @dataclass class MapContent(RoborockBase): """Dataclass representing map content.""" image_content: bytes | None = None """The rendered image of the map in PNG format.""" map_data: MapData | None = None """The parsed map data which contains metadata for points on the map.""" @common.map_rpc_channel class MapContentTrait(MapContent, common.V1TraitMixin): """Trait for fetching the map content.""" command = RoborockCommand.GET_MAP_V1 def __init__(self, map_parser_config: MapParserConfig | None = None) -> None: """Initialize MapContentTrait.""" super().__init__() self._map_parser = MapParser(map_parser_config or MapParserConfig()) def _parse_response(self, response: common.V1ResponseData) -> MapContent: """Parse the response from the device into a MapContentTrait instance.""" if not isinstance(response, bytes): raise ValueError(f"Unexpected MapContentTrait response format: {type(response)}") parsed_data = self._map_parser.parse(response) if parsed_data is None: raise ValueError("Failed to parse map data") return MapContent( image_content=parsed_data.image_content, map_data=parsed_data.map_data, ) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/maps.py000066400000000000000000000064221507503702500270030ustar00rootroot00000000000000"""Trait for managing maps and room mappings on Roborock devices. New datatypes are introduced here to manage the additional information associated with maps and rooms, such as map names and room names. These override the base container datatypes to add additional fields. """ import logging from typing import Self from roborock.containers import MultiMapsList, MultiMapsListMapInfo from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand from .status import StatusTrait _LOGGER = logging.getLogger(__name__) @common.mqtt_rpc_channel class MapsTrait(MultiMapsList, common.V1TraitMixin): """Trait for managing the maps of Roborock devices. A device may have multiple maps, each identified by a unique map_flag. Each map can have multiple rooms associated with it, in a `RoomMapping`. The MapsTrait depends on the StatusTrait to determine the currently active map. It is the responsibility of the caller to ensure that the StatusTrait is up to date before using this trait. However, there is a possibility of races if another client changes the current map between the time the StatusTrait is refreshed and when the MapsTrait is used. This is mitigated by the fact that the map list is unlikely to change frequently, and the current map is only changed when the user explicitly switches maps. """ command = RoborockCommand.GET_MULTI_MAPS_LIST def __init__(self, status_trait: StatusTrait) -> None: """Initialize the MapsTrait. We keep track of the StatusTrait to ensure we have the latest status information when dealing with maps. """ super().__init__() self._status_trait = status_trait @property def current_map(self) -> int | None: """Returns the currently active map (map_flag), if available.""" return self._status_trait.current_map @property def current_map_info(self) -> MultiMapsListMapInfo | None: """Returns the currently active map info, if available.""" if (current_map := self.current_map) is None or self.map_info is None: return None for map_info in self.map_info: if map_info.map_flag == current_map: return map_info return None async def set_current_map(self, map_flag: int) -> None: """Update the current map of the device by it's map_flag id.""" await self.rpc_channel.send_command(RoborockCommand.LOAD_MULTI_MAP, params=[map_flag]) # Refresh our status to make sure it reflects the new map await self._status_trait.refresh() def _parse_response(self, response: common.V1ResponseData) -> Self: """Parse the response from the device into a MapsTrait instance. This overrides the base implementation to handle the specific response format for the multi maps list. This is needed because we have a custom constructor that requires the StatusTrait. """ if not isinstance(response, list): raise ValueError(f"Unexpected MapsTrait response format: {response!r}") response = response[0] if not isinstance(response, dict): raise ValueError(f"Unexpected MapsTrait response format: {response!r}") return MultiMapsList.from_dict(response) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/rooms.py000066400000000000000000000057051507503702500272050ustar00rootroot00000000000000"""Trait for managing room mappings on Roborock devices.""" import logging from dataclasses import dataclass from roborock.containers import HomeData, NamedRoomMapping, RoborockBase from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand _LOGGER = logging.getLogger(__name__) _DEFAULT_NAME = "Unknown" @dataclass class Rooms(RoborockBase): """Dataclass representing a collection of room mappings.""" rooms: list[NamedRoomMapping] | None = None """List of room mappings.""" @property def room_map(self) -> dict[int, NamedRoomMapping]: """Returns a mapping of segment_id to NamedRoomMapping.""" if self.rooms is None: return {} return {room.segment_id: room for room in self.rooms} class RoomsTrait(Rooms, common.V1TraitMixin): """Trait for managing the room mappings of Roborock devices.""" command = RoborockCommand.GET_ROOM_MAPPING def __init__(self, home_data: HomeData) -> None: """Initialize the RoomsTrait.""" super().__init__() self._home_data = home_data @property def _iot_id_room_name_map(self) -> dict[str, str]: """Returns a dictionary of Room IOT IDs to room names.""" return {str(room.id): room.name for room in self._home_data.rooms or ()} def _parse_response(self, response: common.V1ResponseData) -> Rooms: """Parse the response from the device into a list of NamedRoomMapping.""" if not isinstance(response, list): raise ValueError(f"Unexpected RoomsTrait response format: {response!r}") name_map = self._iot_id_room_name_map segment_pairs = _extract_segment_pairs(response) return Rooms( rooms=[ NamedRoomMapping(segment_id=segment_id, iot_id=iot_id, name=name_map.get(iot_id, _DEFAULT_NAME)) for segment_id, iot_id in segment_pairs ] ) def _extract_segment_pairs(response: list) -> list[tuple[int, str]]: """Extract segment_id and iot_id pairs from the response. The response format can be either a flat list of [segment_id, iot_id] or a list of lists, where each inner list is a pair of [segment_id, iot_id]. This function normalizes the response into a list of (segment_id, iot_id) tuples NOTE: We currently only partial samples of the room mapping formats, so improving test coverage with samples from a real device with this format would be helpful. """ if len(response) == 2 and not isinstance(response[0], list): segment_id, iot_id = response[0], response[1] return [(segment_id, iot_id)] segment_pairs: list[tuple[int, str]] = [] for part in response: if not isinstance(part, list) or len(part) < 2: _LOGGER.warning("Unexpected room mapping entry format: %r", part) continue segment_id, iot_id = part[0], part[1] segment_pairs.append((segment_id, iot_id)) return segment_pairs Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/status.py000066400000000000000000000017171507503702500273700ustar00rootroot00000000000000from typing import Self from roborock.containers import HomeDataProduct, ModelStatus, S7MaxVStatus, Status from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand class StatusTrait(Status, common.V1TraitMixin): """Trait for managing the status of Roborock devices.""" command = RoborockCommand.GET_STATUS def __init__(self, product_info: HomeDataProduct) -> None: """Initialize the StatusTrait.""" self._product_info = product_info def _parse_response(self, response: common.V1ResponseData) -> Self: """Parse the response from the device into a CleanSummary.""" status_type: type[Status] = ModelStatus.get(self._product_info.model, S7MaxVStatus) if isinstance(response, list): response = response[0] if isinstance(response, dict): return status_type.from_dict(response) raise ValueError(f"Unexpected status format: {response!r}") Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/valley_electricity_timer.py000066400000000000000000000033071507503702500331360ustar00rootroot00000000000000from roborock.containers import ValleyElectricityTimer from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand _ENABLED_PARAM = "enabled" class ValleyElectricityTimerTrait(ValleyElectricityTimer, common.V1TraitMixin): """Trait for managing Valley Electricity Timer settings on Roborock devices.""" command = RoborockCommand.GET_VALLEY_ELECTRICITY_TIMER requires_feature = "is_supported_valley_electricity" @property def is_on(self) -> bool: """Return whether the Valley Electricity Timer is enabled.""" return self.enabled == 1 async def set_timer(self, timer: ValleyElectricityTimer) -> None: """Set the Valley Electricity Timer settings of the device.""" await self.rpc_channel.send_command(RoborockCommand.SET_VALLEY_ELECTRICITY_TIMER, params=timer.as_dict()) async def clear_timer(self) -> None: """Clear the Valley Electricity Timer settings of the device.""" await self.rpc_channel.send_command(RoborockCommand.CLOSE_VALLEY_ELECTRICITY_TIMER) async def enable(self) -> None: """Enable the Valley Electricity Timer settings of the device.""" await self.rpc_channel.send_command( RoborockCommand.SET_VALLEY_ELECTRICITY_TIMER, params={ **self.as_dict(), _ENABLED_PARAM: 1, }, ) async def disable(self) -> None: """Disable the Valley Electricity Timer settings of the device.""" await self.rpc_channel.send_command( RoborockCommand.SET_VALLEY_ELECTRICITY_TIMER, params={ **self.as_dict(), _ENABLED_PARAM: 0, }, ) Python-roborock-python-roborock-32df4f3/roborock/devices/traits/v1/volume.py000066400000000000000000000016531507503702500273530ustar00rootroot00000000000000from dataclasses import dataclass, field from roborock.devices.traits.v1 import common from roborock.roborock_typing import RoborockCommand # TODO: This is currently the pattern for holding all the commands that hold a # single value, but it still seems too verbose. Maybe we can generate these # dynamically or somehow make them less code. @dataclass class SoundVolume(common.RoborockValueBase): """Dataclass for sound volume.""" volume: int | None = field(default=None, metadata={"roborock_value": True}) """Sound volume level (0-100).""" class SoundVolumeTrait(SoundVolume, common.V1TraitMixin): """Trait for controlling the sound volume of a Roborock device.""" command = RoborockCommand.GET_SOUND_VOLUME async def set_volume(self, volume: int) -> None: """Set the sound volume of the device.""" await self.rpc_channel.send_command(RoborockCommand.CHANGE_SOUND_VOLUME, params=[volume]) Python-roborock-python-roborock-32df4f3/roborock/devices/v1_channel.py000066400000000000000000000303321507503702500262220ustar00rootroot00000000000000"""V1 Channel for Roborock devices. This module provides a unified channel interface for V1 protocol devices, handling both MQTT and local connections with automatic fallback. """ import asyncio import datetime import logging from collections.abc import Callable from typing import TypeVar from roborock.containers import HomeDataDevice, NetworkInfo, RoborockBase, UserData from roborock.exceptions import RoborockException from roborock.mqtt.session import MqttParams, MqttSession from roborock.protocols.v1_protocol import ( SecurityData, create_security_data, ) from roborock.roborock_message import RoborockMessage from roborock.roborock_typing import RoborockCommand from .cache import Cache from .channel import Channel from .local_channel import LocalChannel, LocalSession, create_local_session from .mqtt_channel import MqttChannel from .v1_rpc_channel import ( PickFirstAvailable, V1RpcChannel, create_local_rpc_channel, create_map_rpc_channel, create_mqtt_rpc_channel, ) _LOGGER = logging.getLogger(__name__) __all__ = [ "V1Channel", ] _T = TypeVar("_T", bound=RoborockBase) # Exponential backoff parameters for reconnecting to local MIN_RECONNECT_INTERVAL = datetime.timedelta(minutes=1) MAX_RECONNECT_INTERVAL = datetime.timedelta(minutes=10) RECONNECT_MULTIPLIER = 1.5 # After this many hours, the network info is refreshed NETWORK_INFO_REFRESH_INTERVAL = datetime.timedelta(hours=12) # Interval to check that the local connection is healthy LOCAL_CONNECTION_CHECK_INTERVAL = datetime.timedelta(seconds=15) class V1Channel(Channel): """Unified V1 protocol channel with automatic MQTT/local connection handling. This channel abstracts away the complexity of choosing between MQTT and local connections, and provides high-level V1 protocol methods. It automatically handles connection setup, fallback logic, and protocol encoding/decoding. """ def __init__( self, device_uid: str, security_data: SecurityData, mqtt_channel: MqttChannel, local_session: LocalSession, cache: Cache, ) -> None: """Initialize the V1Channel. Args: mqtt_channel: MQTT channel for cloud communication local_session: Factory that creates LocalChannels for a hostname. """ self._device_uid = device_uid self._mqtt_channel = mqtt_channel self._mqtt_rpc_channel = create_mqtt_rpc_channel(mqtt_channel, security_data) self._local_session = local_session self._local_channel: LocalChannel | None = None self._local_rpc_channel: V1RpcChannel | None = None # Prefer local, fallback to MQTT self._combined_rpc_channel = PickFirstAvailable( [lambda: self._local_rpc_channel, lambda: self._mqtt_rpc_channel] ) self._map_rpc_channel = create_map_rpc_channel(mqtt_channel, security_data) self._mqtt_unsub: Callable[[], None] | None = None self._local_unsub: Callable[[], None] | None = None self._callback: Callable[[RoborockMessage], None] | None = None self._cache = cache self._reconnect_task: asyncio.Task[None] | None = None self._last_network_info_refresh: datetime.datetime | None = None @property def is_connected(self) -> bool: """Return whether any connection is available.""" return self.is_mqtt_connected or self.is_local_connected @property def is_local_connected(self) -> bool: """Return whether local connection is available.""" return self._local_channel is not None and self._local_channel.is_connected @property def is_mqtt_connected(self) -> bool: """Return whether MQTT connection is available.""" return self._mqtt_unsub is not None and self._mqtt_channel.is_connected @property def rpc_channel(self) -> V1RpcChannel: """Return the combined RPC channel prefers local with a fallback to MQTT.""" return self._combined_rpc_channel @property def mqtt_rpc_channel(self) -> V1RpcChannel: """Return the MQTT RPC channel.""" return self._mqtt_rpc_channel @property def map_rpc_channel(self) -> V1RpcChannel: """Return the map RPC channel used for fetching map content.""" return self._map_rpc_channel async def subscribe(self, callback: Callable[[RoborockMessage], None]) -> Callable[[], None]: """Subscribe to all messages from the device. This will first attempt to establish a local connection to the device using cached network information if available. If that fails, it will fall back to using the MQTT connection. A background task will be started to monitor and maintain the local connection, attempting to reconnect as needed. Args: callback: Callback to invoke for each received message. Returns: Unsubscribe function to stop receiving messages and clean up resources. """ if self._callback is not None: raise ValueError("Only one subscription allowed at a time") # Make an initial, optimistic attempt to connect to local with the # cache. The cache information will be refreshed by the background task. try: await self._local_connect(use_cache=True) except RoborockException as err: _LOGGER.warning("Could not establish local connection for device %s: %s", self._device_uid, err) # Start a background task to manage the local connection health. This # happens independent of whether we were able to connect locally now. if self._reconnect_task is None: loop = asyncio.get_running_loop() self._reconnect_task = loop.create_task(self._background_reconnect()) if not self.is_local_connected: # We were not able to connect locally, so fallback to MQTT and at least # establish that connection explicitly. If this fails then raise an # error and let the caller know we failed to subscribe. self._mqtt_unsub = await self._mqtt_channel.subscribe(self._on_mqtt_message) _LOGGER.debug("V1Channel connected to device %s via MQTT", self._device_uid) def unsub() -> None: """Unsubscribe from all messages.""" if self._reconnect_task: self._reconnect_task.cancel() self._reconnect_task = None if self._mqtt_unsub: self._mqtt_unsub() self._mqtt_unsub = None if self._local_unsub: self._local_unsub() self._local_unsub = None _LOGGER.debug("Unsubscribed from device %s", self._device_uid) self._callback = callback return unsub async def _get_networking_info(self, *, use_cache: bool = True) -> NetworkInfo: """Retrieve networking information for the device. This is a cloud only command used to get the local device's IP address. """ cache_data = await self._cache.get() if use_cache and cache_data.network_info and (network_info := cache_data.network_info.get(self._device_uid)): _LOGGER.debug("Using cached network info for device %s", self._device_uid) return network_info try: network_info = await self._mqtt_rpc_channel.send_command( RoborockCommand.GET_NETWORK_INFO, response_type=NetworkInfo ) except RoborockException as e: raise RoborockException(f"Network info failed for device {self._device_uid}") from e _LOGGER.debug("Network info for device %s: %s", self._device_uid, network_info) self._last_network_info_refresh = datetime.datetime.now(datetime.UTC) cache_data.network_info[self._device_uid] = network_info await self._cache.set(cache_data) return network_info async def _local_connect(self, *, use_cache: bool = True) -> None: """Set up local connection if possible.""" _LOGGER.debug( "Attempting to connect to local channel for device %s (use_cache=%s)", self._device_uid, use_cache ) networking_info = await self._get_networking_info(use_cache=use_cache) host = networking_info.ip _LOGGER.debug("Connecting to local channel at %s", host) # Create a new local channel and connect local_channel = self._local_session(host) try: await local_channel.connect() except RoborockException as e: raise RoborockException(f"Error connecting to local device {self._device_uid}: {e}") from e # Wire up the new channel self._local_channel = local_channel self._local_rpc_channel = create_local_rpc_channel(self._local_channel) self._local_unsub = await self._local_channel.subscribe(self._on_local_message) _LOGGER.info("Successfully connected to local device %s", self._device_uid) async def _background_reconnect(self) -> None: """Task to run in the background to manage the local connection.""" _LOGGER.debug("Starting background task to manage local connection for %s", self._device_uid) reconnect_backoff = MIN_RECONNECT_INTERVAL local_connect_failures = 0 while True: try: if self.is_local_connected: await asyncio.sleep(LOCAL_CONNECTION_CHECK_INTERVAL.total_seconds()) continue # Not connected, so wait with backoff before trying to connect. # The first time through, we don't sleep, we just try to connect. local_connect_failures += 1 if local_connect_failures > 1: await asyncio.sleep(reconnect_backoff.total_seconds()) reconnect_backoff = min(reconnect_backoff * RECONNECT_MULTIPLIER, MAX_RECONNECT_INTERVAL) use_cache = self._should_use_cache(local_connect_failures) await self._local_connect(use_cache=use_cache) # Reset backoff and failures on success reconnect_backoff = MIN_RECONNECT_INTERVAL local_connect_failures = 0 except asyncio.CancelledError: _LOGGER.debug("Background reconnect task cancelled") if self._local_channel: self._local_channel.close() return except RoborockException as err: _LOGGER.debug("Background reconnect failed: %s", err) except Exception: _LOGGER.exception("Unhandled exception in background reconnect task") def _should_use_cache(self, local_connect_failures: int) -> bool: """Determine whether to use cached network info on retries. On the first retry we'll avoid the cache to handle the case where the network ip may have recently changed. Otherwise, use the cache if available then expire at some point. """ if local_connect_failures == 1: return False elif self._last_network_info_refresh and ( datetime.datetime.now(datetime.UTC) - self._last_network_info_refresh > NETWORK_INFO_REFRESH_INTERVAL ): return False return True def _on_mqtt_message(self, message: RoborockMessage) -> None: """Handle incoming MQTT messages.""" _LOGGER.debug("V1Channel received MQTT message from device %s: %s", self._device_uid, message) if self._callback: self._callback(message) def _on_local_message(self, message: RoborockMessage) -> None: """Handle incoming local messages.""" _LOGGER.debug("V1Channel received local message from device %s: %s", self._device_uid, message) if self._callback: self._callback(message) def create_v1_channel( user_data: UserData, mqtt_params: MqttParams, mqtt_session: MqttSession, device: HomeDataDevice, cache: Cache, ) -> V1Channel: """Create a V1Channel for the given device.""" security_data = create_security_data(user_data.rriot) mqtt_channel = MqttChannel(mqtt_session, device.duid, device.local_key, user_data.rriot, mqtt_params) local_session = create_local_session(device.local_key) return V1Channel(device.duid, security_data, mqtt_channel, local_session=local_session, cache=cache) Python-roborock-python-roborock-32df4f3/roborock/devices/v1_rpc_channel.py000066400000000000000000000155651507503702500271010ustar00rootroot00000000000000"""V1 Rpc Channel for Roborock devices. This is a wrapper around the V1 channel that provides a higher level interface for sending typed commands and receiving typed responses. This also provides a simple interface for sending commands and receiving responses over both MQTT and local connections, preferring local when available. """ import asyncio import logging from collections.abc import Callable from typing import Any, Protocol, TypeVar, overload from roborock.containers import RoborockBase from roborock.exceptions import RoborockException from roborock.protocols.v1_protocol import ( CommandType, MapResponse, ParamsType, RequestMessage, ResponseData, ResponseMessage, SecurityData, create_map_response_decoder, decode_rpc_response, ) from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from .local_channel import LocalChannel from .mqtt_channel import MqttChannel _LOGGER = logging.getLogger(__name__) _TIMEOUT = 10.0 _T = TypeVar("_T", bound=RoborockBase) _V = TypeVar("_V") class V1RpcChannel(Protocol): """Protocol for V1 RPC channels. This is a wrapper around a raw channel that provides a high-level interface for sending commands and receiving responses. """ @overload async def send_command( self, method: CommandType, *, params: ParamsType = None, ) -> Any: """Send a command and return a decoded response.""" ... @overload async def send_command( self, method: CommandType, *, response_type: type[_T], params: ParamsType = None, ) -> _T: """Send a command and return a parsed response RoborockBase type.""" ... class BaseV1RpcChannel(V1RpcChannel): """Base implementation that provides the typed response logic.""" async def send_command( self, method: CommandType, *, response_type: type[_T] | None = None, params: ParamsType = None, ) -> _T | Any: """Send a command and return either a decoded or parsed response.""" decoded_response = await self._send_raw_command(method, params=params) if response_type is not None: return response_type.from_dict(decoded_response) return decoded_response async def _send_raw_command( self, method: CommandType, *, params: ParamsType = None, ) -> Any: """Send a raw command and return the decoded response. Must be implemented by subclasses.""" raise NotImplementedError class PickFirstAvailable(BaseV1RpcChannel): """A V1 RPC channel that tries multiple channels and picks the first that works.""" def __init__( self, channel_cbs: list[Callable[[], V1RpcChannel | None]], ) -> None: """Initialize the pick-first-available channel.""" self._channel_cbs = channel_cbs async def _send_raw_command( self, method: CommandType, *, params: ParamsType = None, ) -> Any: """Send a command and return a parsed response RoborockBase type.""" for channel_cb in self._channel_cbs: if channel := channel_cb(): return await channel.send_command(method, params=params) raise RoborockException("No available connection to send command") class PayloadEncodedV1RpcChannel(BaseV1RpcChannel): """Protocol for V1 channels that send encoded commands.""" def __init__( self, name: str, channel: MqttChannel | LocalChannel, payload_encoder: Callable[[RequestMessage], RoborockMessage], decoder: Callable[[RoborockMessage], ResponseMessage] | Callable[[RoborockMessage], MapResponse | None], ) -> None: """Initialize the channel with a raw channel and an encoder function.""" self._name = name self._channel = channel self._payload_encoder = payload_encoder self._decoder = decoder async def _send_raw_command( self, method: CommandType, *, params: ParamsType = None, ) -> ResponseData | bytes: """Send a command and return a parsed response RoborockBase type.""" request_message = RequestMessage(method, params=params) _LOGGER.debug( "Sending command (%s, request_id=%s): %s, params=%s", self._name, request_message.request_id, method, params ) message = self._payload_encoder(request_message) future: asyncio.Future[ResponseData | bytes] = asyncio.Future() def find_response(response_message: RoborockMessage) -> None: try: decoded = self._decoder(response_message) except RoborockException as ex: _LOGGER.debug("Exception while decoding message (%s): %s", response_message, ex) return if decoded is None: return _LOGGER.debug("Received response (%s, request_id=%s)", self._name, decoded.request_id) if decoded.request_id == request_message.request_id: if isinstance(decoded, ResponseMessage) and decoded.api_error: future.set_exception(decoded.api_error) else: future.set_result(decoded.data) unsub = await self._channel.subscribe(find_response) try: await self._channel.publish(message) return await asyncio.wait_for(future, timeout=_TIMEOUT) except TimeoutError as ex: future.cancel() raise RoborockException(f"Command timed out after {_TIMEOUT}s") from ex finally: unsub() def create_mqtt_rpc_channel(mqtt_channel: MqttChannel, security_data: SecurityData) -> V1RpcChannel: """Create a V1 RPC channel using an MQTT channel.""" return PayloadEncodedV1RpcChannel( "mqtt", mqtt_channel, lambda x: x.encode_message(RoborockMessageProtocol.RPC_REQUEST, security_data=security_data), decode_rpc_response, ) def create_local_rpc_channel(local_channel: LocalChannel) -> V1RpcChannel: """Create a V1 RPC channel using a local channel.""" return PayloadEncodedV1RpcChannel( "local", local_channel, lambda x: x.encode_message(RoborockMessageProtocol.GENERAL_REQUEST), decode_rpc_response, ) def create_map_rpc_channel( mqtt_channel: MqttChannel, security_data: SecurityData, ) -> V1RpcChannel: """Create a V1 RPC channel that fetches map data. This will prefer local channels when available, falling back to MQTT channels if not. If neither is available, an exception will be raised when trying to send a command. """ return PayloadEncodedV1RpcChannel( "map", mqtt_channel, lambda x: x.encode_message(RoborockMessageProtocol.RPC_REQUEST, security_data=security_data), create_map_response_decoder(security_data=security_data), ) Python-roborock-python-roborock-32df4f3/roborock/exceptions.py000066400000000000000000000047231507503702500247500ustar00rootroot00000000000000"""Roborock exceptions.""" from __future__ import annotations class RoborockException(Exception): """Class for Roborock exceptions.""" class RoborockTimeout(RoborockException): """Class for Roborock timeout exceptions.""" class RoborockConnectionException(RoborockException): """Class for Roborock connection exceptions.""" class RoborockBackoffException(RoborockException): """Class for Roborock exceptions when many retries were made.""" class VacuumError(RoborockException): """Class for vacuum errors.""" class CommandVacuumError(RoborockException): """Class for command vacuum errors.""" def __init__(self, command: str | None, vacuum_error: VacuumError): self.message = f"{command or 'unknown'}: {str(vacuum_error)}" super().__init__(self.message) class UnknownMethodError(RoborockException): """Class for an invalid method being sent.""" class RoborockAccountDoesNotExist(RoborockException): """Class for Roborock account does not exist exceptions.""" class RoborockUrlException(RoborockException): """Class for being unable to get the URL for the Roborock account.""" class RoborockInvalidCode(RoborockException): """Class for Roborock invalid code exceptions.""" class RoborockInvalidEmail(RoborockException): """Class for Roborock invalid formatted email exceptions.""" class RoborockInvalidUserAgreement(RoborockException): """Class for Roborock invalid user agreement exceptions.""" class RoborockNoUserAgreement(RoborockException): """Class for Roborock no user agreement exceptions.""" class RoborockInvalidCredentials(RoborockException): """Class for Roborock credentials have expired or changed.""" class RoborockTooFrequentCodeRequests(RoborockException): """Class for Roborock too frequent code requests exceptions.""" class RoborockMissingParameters(RoborockException): """Class for Roborock missing parameters exceptions.""" class RoborockTooManyRequest(RoborockException): """Class for Roborock too many request exceptions.""" class RoborockRateLimit(RoborockException): """Class for our rate limits exceptions.""" class RoborockNoResponseFromBaseURL(RoborockException): """We could not find an url that had a record of the given account.""" class RoborockDeviceBusy(RoborockException): """Class for Roborock device busy exceptions.""" class RoborockUnsupportedFeature(RoborockException): """Class for Roborock unsupported feature exceptions.""" Python-roborock-python-roborock-32df4f3/roborock/map/000077500000000000000000000000001507503702500227645ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/map/__init__.py000066400000000000000000000002221507503702500250710ustar00rootroot00000000000000"""Module for Roborock map related data classes.""" from .map_parser import MapParserConfig, ParsedMapData __all__ = [ "MapParserConfig", ] Python-roborock-python-roborock-32df4f3/roborock/map/map_parser.py000066400000000000000000000067371507503702500255040ustar00rootroot00000000000000"""Module for parsing v1 Roborock map content.""" import io import logging from dataclasses import dataclass, field from vacuum_map_parser_base.config.color import ColorsPalette, SupportedColor from vacuum_map_parser_base.config.drawable import Drawable from vacuum_map_parser_base.config.image_config import ImageConfig from vacuum_map_parser_base.config.size import Size, Sizes from vacuum_map_parser_base.map_data import MapData from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser from roborock.exceptions import RoborockException _LOGGER = logging.getLogger(__name__) DEFAULT_DRAWABLES = { Drawable.CHARGER: True, Drawable.CLEANED_AREA: False, Drawable.GOTO_PATH: False, Drawable.IGNORED_OBSTACLES: False, Drawable.IGNORED_OBSTACLES_WITH_PHOTO: False, Drawable.MOP_PATH: False, Drawable.NO_CARPET_AREAS: False, Drawable.NO_GO_AREAS: False, Drawable.NO_MOPPING_AREAS: False, Drawable.OBSTACLES: False, Drawable.OBSTACLES_WITH_PHOTO: False, Drawable.PATH: True, Drawable.PREDICTED_PATH: False, Drawable.VACUUM_POSITION: True, Drawable.VIRTUAL_WALLS: False, Drawable.ZONES: False, } DEFAULT_MAP_SCALE = 4 MAP_FILE_FORMAT = "PNG" def _default_drawable_factory() -> list[Drawable]: return [drawable for drawable, default_value in DEFAULT_DRAWABLES.items() if default_value] @dataclass class MapParserConfig: """Configuration for the Roborock map parser.""" drawables: list[Drawable] = field(default_factory=_default_drawable_factory) """List of drawables to include in the map rendering.""" show_background: bool = True """Whether to show the background of the map.""" map_scale: int = DEFAULT_MAP_SCALE """Scale factor for the map.""" @dataclass class ParsedMapData: """Roborock Map Data. This class holds the parsed map data and the rendered image. """ image_content: bytes | None """The rendered image of the map in PNG format.""" map_data: MapData | None """The parsed map data which contains metadata for points on the map.""" class MapParser: """Roborock Map Parser. This class is used to parse the map data from the device and render it into an image. """ def __init__(self, config: MapParserConfig) -> None: """Initialize the MapParser.""" self._map_parser = _create_map_data_parser(config) def parse(self, map_bytes: bytes) -> ParsedMapData | None: """Parse map_bytes and return MapData and the image.""" try: parsed_map = self._map_parser.parse(map_bytes) except (IndexError, ValueError) as err: raise RoborockException("Failed to parse map data") from err if parsed_map.image is None: raise RoborockException("Failed to render map image") img_byte_arr = io.BytesIO() parsed_map.image.data.save(img_byte_arr, format=MAP_FILE_FORMAT) return ParsedMapData(image_content=img_byte_arr.getvalue(), map_data=parsed_map) def _create_map_data_parser(config: MapParserConfig) -> RoborockMapDataParser: """Create a RoborockMapDataParser based on the config entry.""" colors = ColorsPalette() if not config.show_background: colors = ColorsPalette({SupportedColor.MAP_OUTSIDE: (0, 0, 0, 0)}) return RoborockMapDataParser( colors, Sizes({k: v * config.map_scale for k, v in Sizes.SIZES.items() if k != Size.MOP_PATH_WIDTH}), config.drawables, ImageConfig(scale=config.map_scale), [], ) Python-roborock-python-roborock-32df4f3/roborock/mqtt/000077500000000000000000000000001507503702500231745ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/mqtt/__init__.py000066400000000000000000000006371507503702500253130ustar00rootroot00000000000000"""This module contains the low level MQTT client for the Roborock vacuum cleaner. This is not meant to be used directly, but rather as a base for the higher level modules. """ # This module is part of the Roborock Python library, which provides a way to # interact with Roborock devices using MQTT. It is not intended to be used directly, # but rather as a base for higher level modules. __all__: list[str] = [] Python-roborock-python-roborock-32df4f3/roborock/mqtt/roborock_session.py000066400000000000000000000272071507503702500271410ustar00rootroot00000000000000"""An MQTT session for sending and receiving messages. See create_mqtt_session for a factory function to create an MQTT session. This is a thin wrapper around the async MQTT client that handles dispatching messages from a topic to a callback function, since the async MQTT client does not support this out of the box. It also handles the authentication process and receiving messages from the vacuum cleaner. """ import asyncio import datetime import logging from collections.abc import Callable from contextlib import asynccontextmanager import aiomqtt from aiomqtt import MqttError, TLSParameters from roborock.callbacks import CallbackMap from .session import MqttParams, MqttSession, MqttSessionException _LOGGER = logging.getLogger(__name__) _MQTT_LOGGER = logging.getLogger(f"{__name__}.aiomqtt") KEEPALIVE = 60 # Exponential backoff parameters MIN_BACKOFF_INTERVAL = datetime.timedelta(seconds=10) MAX_BACKOFF_INTERVAL = datetime.timedelta(minutes=30) BACKOFF_MULTIPLIER = 1.5 class RoborockMqttSession(MqttSession): """An MQTT session for sending and receiving messages. You can start a session invoking the start() method which will connect to the MQTT broker. A caller may subscribe to a topic, and the session keeps track of which callbacks to invoke for each topic. The client is run as a background task that will run until shutdown. Once connected, the client will wait for messages to be received in a loop. If the connection is lost, the client will be re-created and reconnected. There is backoff to avoid spamming the broker with connection attempts. The client will automatically re-establish any subscriptions when the connection is re-established. """ def __init__(self, params: MqttParams): self._params = params self._background_task: asyncio.Task[None] | None = None self._healthy = False self._stop = False self._backoff = MIN_BACKOFF_INTERVAL self._client: aiomqtt.Client | None = None self._client_lock = asyncio.Lock() self._listeners: CallbackMap[str, bytes] = CallbackMap(_LOGGER) @property def connected(self) -> bool: """True if the session is connected to the broker.""" return self._healthy async def start(self) -> None: """Start the MQTT session. This has special behavior for the first connection attempt where any failures are raised immediately. This is to allow the caller to handle the failure and retry if desired itself. Once connected, the session will retry connecting in the background. """ start_future: asyncio.Future[None] = asyncio.Future() loop = asyncio.get_event_loop() self._background_task = loop.create_task(self._run_task(start_future)) try: await start_future except MqttError as err: raise MqttSessionException(f"Error starting MQTT session: {err}") from err except Exception as err: raise MqttSessionException(f"Unexpected error starting session: {err}") from err else: _LOGGER.debug("MQTT session started successfully") async def close(self) -> None: """Cancels the MQTT loop and shutdown the client library.""" self._stop = True if self._background_task: self._background_task.cancel() try: await self._background_task except asyncio.CancelledError: pass async with self._client_lock: if self._client: await self._client.close() self._healthy = False async def _run_task(self, start_future: asyncio.Future[None] | None) -> None: """Run the MQTT loop.""" _LOGGER.info("Starting MQTT session") while True: try: async with self._mqtt_client(self._params) as client: # Reset backoff once we've successfully connected self._backoff = MIN_BACKOFF_INTERVAL self._healthy = True if start_future: start_future.set_result(None) start_future = None await self._process_message_loop(client) except MqttError as err: if start_future: _LOGGER.info("MQTT error starting session: %s", err) start_future.set_exception(err) return _LOGGER.info("MQTT error: %s", err) except asyncio.CancelledError as err: if start_future: _LOGGER.debug("MQTT loop was cancelled while starting") start_future.set_exception(err) _LOGGER.debug("MQTT loop was cancelled") return # Catch exceptions to avoid crashing the loop # and to allow the loop to retry. except Exception as err: # This error is thrown when the MQTT loop is cancelled # and the generator is not stopped. if "generator didn't stop" in str(err) or "generator didn't yield" in str(err): _LOGGER.debug("MQTT loop was cancelled") return if start_future: _LOGGER.error("Uncaught error starting MQTT session: %s", err) start_future.set_exception(err) return _LOGGER.exception("Uncaught error during MQTT session: %s", err) self._healthy = False if self._stop: _LOGGER.debug("MQTT session closed, stopping retry loop") return _LOGGER.info("MQTT session disconnected, retrying in %s seconds", self._backoff.total_seconds()) await asyncio.sleep(self._backoff.total_seconds()) self._backoff = min(self._backoff * BACKOFF_MULTIPLIER, MAX_BACKOFF_INTERVAL) @asynccontextmanager async def _mqtt_client(self, params: MqttParams) -> aiomqtt.Client: """Connect to the MQTT broker and listen for messages.""" _LOGGER.debug("Connecting to %s:%s for %s", params.host, params.port, params.username) try: async with aiomqtt.Client( hostname=params.host, port=params.port, username=params.username, password=params.password, keepalive=KEEPALIVE, protocol=aiomqtt.ProtocolVersion.V5, tls_params=TLSParameters() if params.tls else None, timeout=params.timeout, logger=_MQTT_LOGGER, ) as client: _LOGGER.debug("Connected to MQTT broker") # Re-establish any existing subscriptions async with self._client_lock: self._client = client for topic in self._listeners.keys(): _LOGGER.debug("Re-establishing subscription to topic %s", topic) # TODO: If this fails it will break the whole connection. Make # this retry again in the background with backoff. await client.subscribe(topic) yield client finally: async with self._client_lock: self._client = None async def _process_message_loop(self, client: aiomqtt.Client) -> None: _LOGGER.debug("Processing MQTT messages") async for message in client.messages: _LOGGER.debug("Received message: %s", message) self._listeners(message.topic.value, message.payload) async def subscribe(self, topic: str, callback: Callable[[bytes], None]) -> Callable[[], None]: """Subscribe to messages on the specified topic and invoke the callback for new messages. The callback will be called with the message payload as a bytes object. The callback should not block since it runs in the async loop. It should not raise any exceptions. The returned callable unsubscribes from the topic when called. """ _LOGGER.debug("Subscribing to topic %s", topic) unsub = self._listeners.add_callback(topic, callback) async with self._client_lock: if self._client: _LOGGER.debug("Establishing subscription to topic %s", topic) try: await self._client.subscribe(topic) except MqttError as err: raise MqttSessionException(f"Error subscribing to topic: {err}") from err else: _LOGGER.debug("Client not connected, will establish subscription later") return unsub async def publish(self, topic: str, message: bytes) -> None: """Publish a message on the topic.""" _LOGGER.debug("Sending message to topic %s: %s", topic, message) client: aiomqtt.Client async with self._client_lock: if self._client is None: raise MqttSessionException("Could not publish message, MQTT client not connected") client = self._client try: await client.publish(topic, message) except MqttError as err: raise MqttSessionException(f"Error publishing message: {err}") from err class LazyMqttSession(MqttSession): """An MQTT session that is started on first attempt to subscribe. This is a wrapper around an existing MqttSession that will only start the underlying session when the first attempt to subscribe or publish is made. """ def __init__(self, session: RoborockMqttSession) -> None: """Initialize the lazy session with an existing session.""" self._lock = asyncio.Lock() self._started = False self._session = session @property def connected(self) -> bool: """True if the session is connected to the broker.""" return self._session.connected async def _maybe_start(self) -> None: """Start the MQTT session if not already started.""" async with self._lock: if not self._started: await self._session.start() self._started = True async def subscribe(self, device_id: str, callback: Callable[[bytes], None]) -> Callable[[], None]: """Invoke the callback when messages are received on the topic. The returned callable unsubscribes from the topic when called. """ await self._maybe_start() return await self._session.subscribe(device_id, callback) async def publish(self, topic: str, message: bytes) -> None: """Publish a message on the specified topic. This will raise an exception if the message could not be sent. """ await self._maybe_start() return await self._session.publish(topic, message) async def close(self) -> None: """Cancels the mqtt loop. This will close the underlying session and will not allow it to be restarted again. """ await self._session.close() async def create_mqtt_session(params: MqttParams) -> MqttSession: """Create an MQTT session. This function is a factory for creating an MQTT session. This will raise an exception if initial attempt to connect fails. Once connected, the session will retry connecting on failure in the background. """ session = RoborockMqttSession(params) await session.start() return session async def create_lazy_mqtt_session(params: MqttParams) -> MqttSession: """Create a lazy MQTT session. This function is a factory for creating an MQTT session that will only connect when the first attempt to subscribe or publish is made. """ return LazyMqttSession(RoborockMqttSession(params)) Python-roborock-python-roborock-32df4f3/roborock/mqtt/session.py000066400000000000000000000031541507503702500252340ustar00rootroot00000000000000"""An MQTT session for sending and receiving messages.""" from abc import ABC, abstractmethod from collections.abc import Callable from dataclasses import dataclass from roborock.exceptions import RoborockException DEFAULT_TIMEOUT = 30.0 @dataclass class MqttParams: """MQTT parameters for the connection.""" host: str """MQTT host to connect to.""" port: int """MQTT port to connect to.""" tls: bool """Use TLS for the connection.""" username: str """MQTT username to use for authentication.""" password: str """MQTT password to use for authentication.""" timeout: float = DEFAULT_TIMEOUT """Timeout for communications with the broker in seconds.""" class MqttSession(ABC): """An MQTT session for sending and receiving messages.""" @property @abstractmethod def connected(self) -> bool: """True if the session is connected to the broker.""" @abstractmethod async def subscribe(self, device_id: str, callback: Callable[[bytes], None]) -> Callable[[], None]: """Invoke the callback when messages are received on the topic. The returned callable unsubscribes from the topic when called. """ @abstractmethod async def publish(self, topic: str, message: bytes) -> None: """Publish a message on the specified topic. This will raise an exception if the message could not be sent. """ @abstractmethod async def close(self) -> None: """Cancels the mqtt loop""" class MqttSessionException(RoborockException): """ "Raised when there is an error communicating with MQTT.""" Python-roborock-python-roborock-32df4f3/roborock/protocol.py000066400000000000000000000442231507503702500244270ustar00rootroot00000000000000from __future__ import annotations import binascii import gzip import hashlib import logging from collections.abc import Callable from urllib.parse import urlparse from construct import ( # type: ignore Bytes, Checksum, ChecksumError, Construct, Container, GreedyBytes, GreedyRange, Int16ub, Int32ub, Optional, Peek, RawCopy, Struct, bytestringtype, stream_seek, stream_tell, ) from Crypto.Cipher import AES from Crypto.Util.Padding import pad, unpad from roborock.containers import RRiot from roborock.exceptions import RoborockException from roborock.mqtt.session import MqttParams from roborock.roborock_message import RoborockMessage _LOGGER = logging.getLogger(__name__) SALT = b"TXdfu$jyZ#TZHsg4" A01_HASH = "726f626f726f636b2d67a6d6da" B01_HASH = "5wwh9ikChRjASpMU8cxg7o1d2E" AP_CONFIG = 1 SOCK_DISCOVERY = 2 def md5hex(message: str) -> str: md5 = hashlib.md5() md5.update(message.encode()) return md5.hexdigest() class Utils: """Util class for protocol manipulation.""" @staticmethod def verify_token(token: bytes): """Checks if the given token is of correct type and length.""" if not isinstance(token, bytes): raise TypeError("Token must be bytes") if len(token) != 16: raise ValueError("Wrong token length") @staticmethod def ensure_bytes(msg: bytes | str) -> bytes: if isinstance(msg, str): return msg.encode() return msg @staticmethod def encode_timestamp(_timestamp: int) -> bytes: hex_value = f"{_timestamp:x}".zfill(8) return "".join(list(map(lambda idx: hex_value[idx], [5, 6, 3, 7, 1, 2, 0, 4]))).encode() @staticmethod def md5(data: bytes) -> bytes: """Calculates a md5 hashsum for the given bytes object.""" checksum = hashlib.md5() # nosec checksum.update(data) return checksum.digest() @staticmethod def encrypt_ecb(plaintext: bytes, token: bytes) -> bytes: """Encrypt plaintext with a given token using ecb mode. :param bytes plaintext: Plaintext (json) to encrypt :param bytes token: Token to use :return: Encrypted bytes """ if not isinstance(plaintext, bytes): raise TypeError("plaintext requires bytes") Utils.verify_token(token) cipher = AES.new(token, AES.MODE_ECB) if plaintext: plaintext = pad(plaintext, AES.block_size) return cipher.encrypt(plaintext) return plaintext @staticmethod def decrypt_ecb(ciphertext: bytes, token: bytes) -> bytes: """Decrypt ciphertext with a given token using ecb mode. :param bytes ciphertext: Ciphertext to decrypt :param bytes token: Token to use :return: Decrypted bytes object """ if not isinstance(ciphertext, bytes): raise TypeError("ciphertext requires bytes") if ciphertext: Utils.verify_token(token) aes_key = token decipher = AES.new(aes_key, AES.MODE_ECB) return unpad(decipher.decrypt(ciphertext), AES.block_size) return ciphertext @staticmethod def encrypt_cbc(plaintext: bytes, token: bytes) -> bytes: """Encrypt plaintext with a given token using cbc mode. This is currently used for testing purposes only. :param bytes plaintext: Plaintext (json) to encrypt :param bytes token: Token to use :return: Encrypted bytes """ if not isinstance(plaintext, bytes): raise TypeError("plaintext requires bytes") Utils.verify_token(token) iv = bytes(AES.block_size) cipher = AES.new(token, AES.MODE_CBC, iv) if plaintext: plaintext = pad(plaintext, AES.block_size) return cipher.encrypt(plaintext) return plaintext @staticmethod def decrypt_cbc(ciphertext: bytes, token: bytes) -> bytes: """Decrypt ciphertext with a given token using cbc mode. :param bytes ciphertext: Ciphertext to decrypt :param bytes token: Token to use :return: Decrypted bytes object """ if not isinstance(ciphertext, bytes): raise TypeError("ciphertext requires bytes") if ciphertext: Utils.verify_token(token) iv = bytes(AES.block_size) decipher = AES.new(token, AES.MODE_CBC, iv) return unpad(decipher.decrypt(ciphertext), AES.block_size) return ciphertext @staticmethod def _l01_key(local_key: str, timestamp: int) -> bytes: """Derive key for L01 protocol.""" hash_input = Utils.encode_timestamp(timestamp) + Utils.ensure_bytes(local_key) + SALT return hashlib.sha256(hash_input).digest() @staticmethod def _l01_iv(timestamp: int, nonce: int, sequence: int) -> bytes: """Derive IV for L01 protocol.""" digest_input = sequence.to_bytes(4, "big") + nonce.to_bytes(4, "big") + timestamp.to_bytes(4, "big") digest = hashlib.sha256(digest_input).digest() return digest[:12] @staticmethod def _l01_aad(timestamp: int, nonce: int, sequence: int, connect_nonce: int, ack_nonce: int) -> bytes: """Derive AAD for L01 protocol.""" return ( sequence.to_bytes(4, "big") + connect_nonce.to_bytes(4, "big") + ack_nonce.to_bytes(4, "big") + nonce.to_bytes(4, "big") + timestamp.to_bytes(4, "big") ) @staticmethod def encrypt_gcm_l01( plaintext: bytes, local_key: str, timestamp: int, sequence: int, nonce: int, connect_nonce: int, ack_nonce: int, ) -> bytes: """Encrypt plaintext for L01 protocol using AES-256-GCM.""" if not isinstance(plaintext, bytes): raise TypeError("plaintext requires bytes") key = Utils._l01_key(local_key, timestamp) iv = Utils._l01_iv(timestamp, nonce, sequence) aad = Utils._l01_aad(timestamp, nonce, sequence, connect_nonce, ack_nonce) cipher = AES.new(key, AES.MODE_GCM, nonce=iv) cipher.update(aad) ciphertext, tag = cipher.encrypt_and_digest(plaintext) return ciphertext + tag @staticmethod def decrypt_gcm_l01( payload: bytes, local_key: str, timestamp: int, sequence: int, nonce: int, connect_nonce: int, ack_nonce: int, ) -> bytes: """Decrypt payload for L01 protocol using AES-256-GCM.""" if not isinstance(payload, bytes): raise TypeError("payload requires bytes") key = Utils._l01_key(local_key, timestamp) iv = Utils._l01_iv(timestamp, nonce, sequence) aad = Utils._l01_aad(timestamp, nonce, sequence, connect_nonce, ack_nonce) if len(payload) < 16: raise ValueError("Invalid payload length for GCM decryption") tag = payload[-16:] ciphertext = payload[:-16] cipher = AES.new(key, AES.MODE_GCM, nonce=iv) cipher.update(aad) try: return cipher.decrypt_and_verify(ciphertext, tag) except ValueError as e: raise RoborockException("GCM tag verification failed") from e @staticmethod def crc(data: bytes) -> int: """Gather bytes for checksum calculation.""" return binascii.crc32(data) @staticmethod def decompress(compressed_data: bytes): """Decompress data using gzip.""" return gzip.decompress(compressed_data) class EncryptionAdapter(Construct): """Adapter to handle communication encryption.""" def __init__(self, token_func: Callable): super().__init__() self.token_func = token_func def _parse(self, stream, context, path): subcon1 = Optional(Int16ub) length = subcon1.parse_stream(stream, **context) if not length: if length == 0: subcon1.parse_stream(stream, **context) # seek 2 return None subcon2 = Bytes(length) obj = subcon2.parse_stream(stream, **context) return self._decode(obj, context, path) def _build(self, obj, stream, context, path): if obj is not None: obj2 = self._encode(obj, context, path) subcon1 = Int16ub length = len(obj2) subcon1.build_stream(length, stream, **context) subcon2 = Bytes(length) subcon2.build_stream(obj2, stream, **context) return obj def _encode(self, obj, context, _): """Encrypt the given payload with the token stored in the context. :param obj: JSON object to encrypt """ if context.version == b"A01": iv = md5hex(format(context.random, "08x") + A01_HASH)[8:24] decipher = AES.new(bytes(context.search("local_key"), "utf-8"), AES.MODE_CBC, bytes(iv, "utf-8")) f = decipher.encrypt(obj) return f elif context.version == b"B01": iv = md5hex(f"{context.random:08x}" + B01_HASH)[9:25] decipher = AES.new(bytes(context.search("local_key"), "utf-8"), AES.MODE_CBC, bytes(iv, "utf-8")) return decipher.encrypt(obj) elif context.version == b"L01": return Utils.encrypt_gcm_l01( plaintext=obj, local_key=context.search("local_key"), timestamp=context.timestamp, sequence=context.seq, nonce=context.random, connect_nonce=context.search("connect_nonce"), ack_nonce=context.search("ack_nonce"), ) token = self.token_func(context) encrypted = Utils.encrypt_ecb(obj, token) return encrypted def _decode(self, obj, context, _): """Decrypts the given payload with the token stored in the context.""" if context.version == b"A01": iv = md5hex(format(context.random, "08x") + A01_HASH)[8:24] decipher = AES.new(bytes(context.search("local_key"), "utf-8"), AES.MODE_CBC, bytes(iv, "utf-8")) f = decipher.decrypt(obj) return f elif context.version == b"B01": iv = md5hex(f"{context.random:08x}" + B01_HASH)[9:25] decipher = AES.new(bytes(context.search("local_key"), "utf-8"), AES.MODE_CBC, bytes(iv, "utf-8")) return decipher.decrypt(obj) elif context.version == b"L01": return Utils.decrypt_gcm_l01( payload=obj, local_key=context.search("local_key"), timestamp=context.timestamp, sequence=context.seq, nonce=context.random, connect_nonce=context.search("connect_nonce"), ack_nonce=context.search("ack_nonce"), ) token = self.token_func(context) decrypted = Utils.decrypt_ecb(obj, token) return decrypted class OptionalChecksum(Checksum): def _parse(self, stream, context, path): if not context.message.value.payload: return hash1 = self.checksumfield.parse_stream(stream, **context) hash2 = self.hashfunc(self.bytesfunc(context)) if hash1 != hash2: raise ChecksumError( f"wrong checksum, read {hash1 if not isinstance(hash1, bytestringtype) else binascii.hexlify(hash1)}, " f"computed {hash2 if not isinstance(hash2, bytestringtype) else binascii.hexlify(hash2)}", path=path, ) return hash1 class PrefixedStruct(Struct): def _parse(self, stream, context, path): subcon1 = Peek(Optional(Bytes(3))) peek_version = subcon1.parse_stream(stream, **context) if peek_version not in (b"1.0", b"A01", b"B01", b"L01"): subcon2 = Bytes(4) subcon2.parse_stream(stream, **context) return super()._parse(stream, context, path) def _build(self, obj, stream, context, path): prefixed = context.search("prefixed") if not prefixed: return super()._build(obj, stream, context, path) offset = stream_tell(stream, path) stream_seek(stream, offset + 4, 0, path) super()._build(obj, stream, context, path) new_offset = stream_tell(stream, path) subcon1 = Bytes(4) stream_seek(stream, offset, 0, path) subcon1.build_stream(new_offset - offset - subcon1.sizeof(**context), stream, **context) stream_seek(stream, new_offset + 4, 0, path) return obj _Message = RawCopy( Struct( "version" / Bytes(3), "seq" / Int32ub, "random" / Int32ub, "timestamp" / Int32ub, "protocol" / Int16ub, "payload" / EncryptionAdapter( lambda ctx: Utils.md5( Utils.encode_timestamp(ctx.timestamp) + Utils.ensure_bytes(ctx.search("local_key")) + SALT ), ), ) ) _Messages = Struct( "messages" / GreedyRange( PrefixedStruct( "message" / _Message, "checksum" / OptionalChecksum(Optional(Int32ub), Utils.crc, lambda ctx: ctx.message.data), ) ), "remaining" / Optional(GreedyBytes), ) class _Parser: def __init__(self, con: Construct, required_local_key: bool): self.con = con self.required_local_key = required_local_key def parse( self, data: bytes, local_key: str | None = None, connect_nonce: int | None = None, ack_nonce: int | None = None ) -> tuple[list[RoborockMessage], bytes]: if self.required_local_key and local_key is None: raise RoborockException("Local key is required") parsed = self.con.parse(data, local_key=local_key, connect_nonce=connect_nonce, ack_nonce=ack_nonce) parsed_messages = [Container({"message": parsed.message})] if parsed.get("message") else parsed.messages messages = [] for message in parsed_messages: messages.append( RoborockMessage( version=message.message.value.version, seq=message.message.value.get("seq"), random=message.message.value.get("random"), timestamp=message.message.value.get("timestamp"), protocol=message.message.value.get("protocol"), payload=message.message.value.payload, ) ) remaining = parsed.get("remaining") or b"" return messages, remaining def build( self, roborock_messages: list[RoborockMessage] | RoborockMessage, local_key: str, prefixed: bool = True, connect_nonce: int | None = None, ack_nonce: int | None = None, ) -> bytes: if isinstance(roborock_messages, RoborockMessage): roborock_messages = [roborock_messages] messages = [] for roborock_message in roborock_messages: messages.append( { "message": { "value": { "version": roborock_message.version, "seq": roborock_message.seq, "random": roborock_message.random, "timestamp": roborock_message.timestamp, "protocol": roborock_message.protocol, "payload": roborock_message.payload, } }, } ) return self.con.build( {"messages": [message for message in messages], "remaining": b""}, local_key=local_key, prefixed=prefixed, connect_nonce=connect_nonce, ack_nonce=ack_nonce, ) MessageParser: _Parser = _Parser(_Messages, True) def create_mqtt_params(rriot: RRiot) -> MqttParams: """Return the MQTT parameters for this user.""" url = urlparse(rriot.r.m) if not isinstance(url.hostname, str): raise RoborockException(f"Url parsing '{rriot.r.m}' returned an invalid hostname") if not url.port: raise RoborockException(f"Url parsing '{rriot.r.m}' returned an invalid port") hashed_user = md5hex(rriot.u + ":" + rriot.k)[2:10] hashed_password = md5hex(rriot.s + ":" + rriot.k)[16:] return MqttParams( host=str(url.hostname), port=url.port, tls=(url.scheme == "ssl"), username=hashed_user, password=hashed_password, ) Decoder = Callable[[bytes], list[RoborockMessage]] Encoder = Callable[[RoborockMessage], bytes] def create_mqtt_decoder(local_key: str) -> Decoder: """Create a decoder for MQTT messages.""" def decode(data: bytes) -> list[RoborockMessage]: """Parse the given data into Roborock messages.""" messages, _ = MessageParser.parse(data, local_key) return messages return decode def create_mqtt_encoder(local_key: str) -> Encoder: """Create an encoder for MQTT messages.""" def encode(messages: RoborockMessage) -> bytes: """Build the given Roborock messages into a byte string.""" return MessageParser.build(messages, local_key, prefixed=False) return encode def create_local_decoder(local_key: str, connect_nonce: int | None = None, ack_nonce: int | None = None) -> Decoder: """Create a decoder for local API messages.""" # This buffer is used to accumulate bytes until a complete message can be parsed. # It is defined outside the decode function to maintain state across calls. buffer: bytes = b"" def decode(bytes_data: bytes) -> list[RoborockMessage]: """Parse the given data into Roborock messages.""" nonlocal buffer buffer += bytes_data parsed_messages, remaining = MessageParser.parse( buffer, local_key=local_key, connect_nonce=connect_nonce, ack_nonce=ack_nonce ) buffer = remaining return parsed_messages return decode def create_local_encoder(local_key: str, connect_nonce: int | None = None, ack_nonce: int | None = None) -> Encoder: """Create an encoder for local API messages.""" def encode(message: RoborockMessage) -> bytes: """Called when data is sent to the transport.""" return MessageParser.build(message, local_key=local_key, connect_nonce=connect_nonce, ack_nonce=ack_nonce) return encode Python-roborock-python-roborock-32df4f3/roborock/protocols/000077500000000000000000000000001507503702500242335ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/protocols/a01_protocol.py000066400000000000000000000036331507503702500271140ustar00rootroot00000000000000"""Roborock A01 Protocol encoding and decoding.""" import json import logging from typing import Any from Crypto.Cipher import AES from Crypto.Util.Padding import pad, unpad from roborock.exceptions import RoborockException from roborock.roborock_message import ( RoborockDyadDataProtocol, RoborockMessage, RoborockMessageProtocol, RoborockZeoProtocol, ) _LOGGER = logging.getLogger(__name__) A01_VERSION = b"A01" def encode_mqtt_payload( data: dict[RoborockDyadDataProtocol, Any] | dict[RoborockZeoProtocol, Any] | dict[RoborockDyadDataProtocol | RoborockZeoProtocol, Any], ) -> RoborockMessage: """Encode payload for A01 commands over MQTT.""" dps_data = {"dps": data} payload = pad(json.dumps(dps_data).encode("utf-8"), AES.block_size) return RoborockMessage( protocol=RoborockMessageProtocol.RPC_REQUEST, version=A01_VERSION, payload=payload, ) def decode_rpc_response(message: RoborockMessage) -> dict[int, Any]: """Decode a V1 RPC_RESPONSE message.""" if not message.payload: raise RoborockException("Invalid A01 message format: missing payload") try: unpadded = unpad(message.payload, AES.block_size) except ValueError as err: raise RoborockException(f"Unable to unpad A01 payload: {err}") try: payload = json.loads(unpadded.decode()) except (json.JSONDecodeError, TypeError) as e: raise RoborockException(f"Invalid A01 message payload: {e} for {message.payload!r}") from e datapoints = payload.get("dps", {}) if not isinstance(datapoints, dict): raise RoborockException(f"Invalid A01 message format: 'dps' should be a dictionary for {message.payload!r}") try: return {int(key): value for key, value in datapoints.items()} except ValueError: raise RoborockException(f"Invalid A01 message format: 'dps' key should be an integer for {message.payload!r}") Python-roborock-python-roborock-32df4f3/roborock/protocols/b01_protocol.py000066400000000000000000000041541507503702500271140ustar00rootroot00000000000000"""Roborock B01 Protocol encoding and decoding.""" import json import logging from typing import Any from Crypto.Cipher import AES from Crypto.Util.Padding import pad, unpad from roborock import RoborockB01Methods from roborock.exceptions import RoborockException from roborock.roborock_message import ( RoborockMessage, RoborockMessageProtocol, ) from roborock.util import get_next_int _LOGGER = logging.getLogger(__name__) B01_VERSION = b"B01" CommandType = RoborockB01Methods | str ParamsType = list | dict | int | None def encode_mqtt_payload(dps: int, command: CommandType, params: ParamsType) -> RoborockMessage: """Encode payload for B01 commands over MQTT.""" dps_data = { "dps": { dps: { "method": str(command), "msgId": str(get_next_int(100000000000, 999999999999)), "params": params or [], } } } payload = pad(json.dumps(dps_data).encode("utf-8"), AES.block_size) return RoborockMessage( protocol=RoborockMessageProtocol.RPC_REQUEST, version=B01_VERSION, payload=payload, ) def decode_rpc_response(message: RoborockMessage) -> dict[int, Any]: """Decode a B01 RPC_RESPONSE message.""" if not message.payload: raise RoborockException("Invalid B01 message format: missing payload") try: unpadded = unpad(message.payload, AES.block_size) except ValueError as err: raise RoborockException(f"Unable to unpad B01 payload: {err}") try: payload = json.loads(unpadded.decode()) except (json.JSONDecodeError, TypeError) as e: raise RoborockException(f"Invalid B01 message payload: {e} for {message.payload!r}") from e datapoints = payload.get("dps", {}) if not isinstance(datapoints, dict): raise RoborockException(f"Invalid B01 message format: 'dps' should be a dictionary for {message.payload!r}") try: return {int(key): value for key, value in datapoints.items()} except ValueError: raise RoborockException(f"Invalid B01 message format: 'dps' key should be an integer for {message.payload!r}") Python-roborock-python-roborock-32df4f3/roborock/protocols/v1_protocol.py000066400000000000000000000163361507503702500270650ustar00rootroot00000000000000"""Roborock V1 Protocol Encoder.""" from __future__ import annotations import base64 import json import logging import math import secrets import struct import time from collections.abc import Callable from dataclasses import dataclass, field from typing import Any from roborock.containers import RRiot from roborock.exceptions import RoborockException, RoborockUnsupportedFeature from roborock.protocol import Utils from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from roborock.roborock_typing import RoborockCommand from roborock.util import get_next_int _LOGGER = logging.getLogger(__name__) __all__ = [ "SecurityData", "create_security_data", "decode_rpc_response", ] CommandType = RoborockCommand | str ParamsType = list | dict | int | None @dataclass(frozen=True, kw_only=True) class SecurityData: """Security data included in the request for some V1 commands.""" endpoint: str nonce: bytes def to_dict(self) -> dict[str, Any]: """Convert security data to a dictionary for sending in the payload.""" return {"security": {"endpoint": self.endpoint, "nonce": self.nonce.hex().lower()}} def to_diagnostic_data(self) -> dict[str, Any]: """Convert security data to a dictionary for debugging purposes.""" return {"nonce": self.nonce.hex().lower()} def create_security_data(rriot: RRiot) -> SecurityData: """Create a SecurityData instance for the given endpoint and nonce.""" nonce = secrets.token_bytes(16) endpoint = base64.b64encode(Utils.md5(rriot.k.encode())[8:14]).decode() return SecurityData(endpoint=endpoint, nonce=nonce) @dataclass class RequestMessage: """Data structure for v1 RoborockMessage payloads.""" method: RoborockCommand | str params: ParamsType timestamp: int = field(default_factory=lambda: math.floor(time.time())) request_id: int = field(default_factory=lambda: get_next_int(10000, 32767)) def encode_message( self, protocol: RoborockMessageProtocol, security_data: SecurityData | None = None, version: str = "1.0" ) -> RoborockMessage: """Convert the request message to a RoborockMessage.""" return RoborockMessage( timestamp=self.timestamp, protocol=protocol, payload=self._as_payload(security_data=security_data), version=version.encode(), ) def _as_payload(self, security_data: SecurityData | None) -> bytes: """Convert the request arguments to a dictionary.""" inner = { "id": self.request_id, "method": self.method, "params": self.params or [], **(security_data.to_dict() if security_data else {}), } return bytes( json.dumps( { "dps": {"101": json.dumps(inner, separators=(",", ":"))}, "t": self.timestamp, }, separators=(",", ":"), ).encode() ) ResponseData = dict[str, Any] | list | int @dataclass(kw_only=True, frozen=True) class ResponseMessage: """Data structure for v1 RoborockMessage responses.""" request_id: int | None """The request ID of the response.""" data: ResponseData """The data of the response, where the type depends on the command.""" api_error: RoborockException | None = None """The API error message of the response if any.""" def decode_rpc_response(message: RoborockMessage) -> ResponseMessage: """Decode a V1 RPC_RESPONSE message. This will raise a RoborockException if the message cannot be parsed. A response object will be returned even if there is an error in the response, as long as we can extract the request ID. This is so we can associate an API response with a request even if there was an error. """ if not message.payload: return ResponseMessage(request_id=message.seq, data={}) try: payload = json.loads(message.payload.decode()) except (json.JSONDecodeError, TypeError) as e: raise RoborockException(f"Invalid V1 message payload: {e} for {message.payload!r}") from e _LOGGER.debug("Decoded V1 message payload: %s", payload) datapoints = payload.get("dps", {}) if not isinstance(datapoints, dict): raise RoborockException(f"Invalid V1 message format: 'dps' should be a dictionary for {message.payload!r}") if not (data_point := datapoints.get(str(RoborockMessageProtocol.RPC_RESPONSE))): raise RoborockException( f"Invalid V1 message format: missing '{RoborockMessageProtocol.RPC_RESPONSE}' data point" ) try: data_point_response = json.loads(data_point) except (json.JSONDecodeError, TypeError) as e: raise RoborockException( f"Invalid V1 message data point '{RoborockMessageProtocol.RPC_RESPONSE}': {e} for {message.payload!r}" ) from e request_id: int | None = data_point_response.get("id") exc: RoborockException | None = None if error := data_point_response.get("error"): exc = RoborockException(error) if not (result := data_point_response.get("result")): exc = RoborockException(f"Invalid V1 message format: missing 'result' in data point for {message.payload!r}") else: _LOGGER.debug("Decoded V1 message result: %s", result) if isinstance(result, str): if result == "unknown_method": exc = RoborockUnsupportedFeature("The method called is not recognized by the device.") elif result != "ok": exc = RoborockException(f"Unexpected API Result: {result}") result = {} if not isinstance(result, dict | list | int): raise RoborockException( f"Invalid V1 message format: 'result' was unexpected type {type(result)}. {message.payload!r}" ) if not request_id and exc: raise exc return ResponseMessage(request_id=request_id, data=result, api_error=exc) @dataclass class MapResponse: """Data structure for the V1 Map response.""" request_id: int """The request ID of the map response.""" data: bytes """The map data, decrypted and decompressed.""" def create_map_response_decoder(security_data: SecurityData) -> Callable[[RoborockMessage], MapResponse | None]: """Create a decoder for V1 map response messages.""" def _decode_map_response(message: RoborockMessage) -> MapResponse | None: """Decode a V1 map response message.""" if not message.payload or len(message.payload) < 24: raise RoborockException("Invalid V1 map response format: missing payload") header, body = message.payload[:24], message.payload[24:] [endpoint, _, request_id, _] = struct.unpack("<8s8sH6s", header) if not endpoint.decode().startswith(security_data.endpoint): _LOGGER.debug("Received map response not requested by this device, ignoring.") return None try: decrypted = Utils.decrypt_cbc(body, security_data.nonce) except ValueError as err: raise RoborockException("Failed to decode map message payload") from err decompressed = Utils.decompress(decrypted) return MapResponse(request_id=request_id, data=decompressed) return _decode_map_response Python-roborock-python-roborock-32df4f3/roborock/py.typed000066400000000000000000000000001507503702500236740ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/roborock_future.py000066400000000000000000000020071507503702500257720ustar00rootroot00000000000000from __future__ import annotations import asyncio from asyncio import Future from typing import Any from .exceptions import VacuumError class RoborockFuture: def __init__(self, protocol: int): self.protocol = protocol self.fut: Future = Future() self.loop = self.fut.get_loop() def _set_result(self, item: Any) -> None: if not self.fut.cancelled(): self.fut.set_result(item) def set_result(self, item: Any) -> None: self.loop.call_soon_threadsafe(self._set_result, item) def _set_exception(self, exc: VacuumError) -> None: if not self.fut.cancelled(): self.fut.set_exception(exc) def set_exception(self, exc: VacuumError) -> None: self.loop.call_soon_threadsafe(self._set_exception, exc) async def async_get(self, timeout: float | int) -> tuple[Any, VacuumError | None]: try: async with asyncio.timeout(timeout): return await self.fut finally: self.fut.cancel() Python-roborock-python-roborock-32df4f3/roborock/roborock_message.py000066400000000000000000000145351507503702500261150ustar00rootroot00000000000000from __future__ import annotations import math import time from dataclasses import dataclass, field from enum import StrEnum from roborock import RoborockEnum from roborock.util import get_next_int class RoborockMessageProtocol(RoborockEnum): HELLO_REQUEST = 0 HELLO_RESPONSE = 1 PING_REQUEST = 2 PING_RESPONSE = 3 GENERAL_REQUEST = 4 GENERAL_RESPONSE = 5 RPC_REQUEST = 101 RPC_RESPONSE = 102 MAP_RESPONSE = 301 class RoborockDataProtocol(RoborockEnum): ERROR_CODE = 120 STATE = 121 BATTERY = 122 FAN_POWER = 123 WATER_BOX_MODE = 124 MAIN_BRUSH_WORK_TIME = 125 SIDE_BRUSH_WORK_TIME = 126 FILTER_WORK_TIME = 127 ADDITIONAL_PROPS = 128 TASK_COMPLETE = 130 TASK_CANCEL_LOW_POWER = 131 TASK_CANCEL_IN_MOTION = 132 CHARGE_STATUS = 133 DRYING_STATUS = 134 OFFLINE_STATUS = 135 @classmethod def _missing_(cls: type[RoborockEnum], key) -> RoborockEnum: raise ValueError("%s not a valid key for Data Protocol", key) class RoborockDyadDataProtocol(RoborockEnum): DRYING_STATUS = 134 START = 200 STATUS = 201 SELF_CLEAN_MODE = 202 SELF_CLEAN_LEVEL = 203 WARM_LEVEL = 204 CLEAN_MODE = 205 SUCTION = 206 WATER_LEVEL = 207 BRUSH_SPEED = 208 POWER = 209 COUNTDOWN_TIME = 210 AUTO_SELF_CLEAN_SET = 212 AUTO_DRY = 213 MESH_LEFT = 214 BRUSH_LEFT = 215 ERROR = 216 MESH_RESET = 218 BRUSH_RESET = 219 VOLUME_SET = 221 STAND_LOCK_AUTO_RUN = 222 AUTO_SELF_CLEAN_SET_MODE = 223 AUTO_DRY_MODE = 224 SILENT_DRY_DURATION = 225 SILENT_MODE = 226 SILENT_MODE_START_TIME = 227 SILENT_MODE_END_TIME = 228 RECENT_RUN_TIME = 229 TOTAL_RUN_TIME = 230 FEATURE_INFO = 235 RECOVER_SETTINGS = 236 DRY_COUNTDOWN = 237 ID_QUERY = 10000 F_C = 10001 SCHEDULE_TASK = 10002 SND_SWITCH = 10003 SND_STATE = 10004 PRODUCT_INFO = 10005 PRIVACY_INFO = 10006 OTA_NFO = 10007 RPC_REQUEST = 10101 RPC_RESPONSE = 10102 class RoborockZeoProtocol(RoborockEnum): START = 200 # rw PAUSE = 201 # rw SHUTDOWN = 202 # rw STATE = 203 # ro MODE = 204 # rw PROGRAM = 205 # rw CHILD_LOCK = 206 # rw TEMP = 207 # rw RINSE_TIMES = 208 # rw SPIN_LEVEL = 209 # rw DRYING_MODE = 210 # rw DETERGENT_SET = 211 # rw SOFTENER_SET = 212 # rw DETERGENT_TYPE = 213 # rw SOFTENER_TYPE = 214 # rw COUNTDOWN = 217 # rw WASHING_LEFT = 218 # ro DOORLOCK_STATE = 219 # ro ERROR = 220 # ro CUSTOM_PARAM_SAVE = 221 # rw CUSTOM_PARAM_GET = 222 # ro SOUND_SET = 223 # rw TIMES_AFTER_CLEAN = 224 # ro DEFAULT_SETTING = 225 # rw DETERGENT_EMPTY = 226 # ro SOFTENER_EMPTY = 227 # ro LIGHT_SETTING = 229 # rw DETERGENT_VOLUME = 230 # rw SOFTENER_VOLUME = 231 # rw APP_AUTHORIZATION = 232 # rw ID_QUERY = 10000 F_C = 10001 SND_STATE = 10004 PRODUCT_INFO = 10005 PRIVACY_INFO = 10006 OTA_NFO = 10007 WASHING_LOG = 10008 RPC_REQ = 10101 RPC_RESp = 10102 class RoborockB01Protocol(RoborockEnum): RPC_REQUEST = 101 RPC_RESPONSE = 102 ERROR_CODE = 120 STATE = 121 BATTERY = 122 FAN_POWER = 123 WATER_BOX_MODE = 124 MAIN_BRUSH_LIFE = 125 SIDE_BRUSH_LIFE = 126 FILTER_LIFE = 127 OFFLINE_STATUS = 135 CLEAN_TIMES = 136 CLEANING_PREFERENCE = 137 CLEAN_TASK_TYPE = 138 BACK_TYPE = 139 DOCK_TASK_TYPE = 140 CLEANING_PROGRESS = 141 FC_STATE = 142 START_CLEAN_TASK = 201 START_BACK_DOCK_TASK = 202 START_DOCK_TASK = 203 PAUSE = 204 RESUME = 205 STOP = 206 CEIP = 207 class RoborockB01Props(StrEnum): """Properties requested by the Roborock B01 model.""" STATUS = "status" FAULT = "fault" WIND = "wind" WATER = "water" MODE = "mode" QUANTITY = "quantity" ALARM = "alarm" VOLUME = "volume" HYPA = "hypa" MAIN_BRUSH = "main_brush" SIDE_BRUSH = "side_brush" MOP_LIFE = "mop_life" MAIN_SENSOR = "main_sensor" NET_STATUS = "net_status" REPEAT_STATE = "repeat_state" TANK_STATE = "tank_state" SWEEP_TYPE = "sweep_type" CLEAN_PATH_PREFERENCE = "clean_path_preference" CLOTH_STATE = "cloth_state" TIME_ZONE = "time_zone" TIME_ZONE_INFO = "time_zone_info" LANGUAGE = "language" CLEANING_TIME = "cleaning_time" REAL_CLEAN_TIME = "real_clean_time" CLEANING_AREA = "cleaning_area" CUSTOM_TYPE = "custom_type" SOUND = "sound" WORK_MODE = "work_mode" STATION_ACT = "station_act" CHARGE_STATE = "charge_state" CURRENT_MAP_ID = "current_map_id" MAP_NUM = "map_num" DUST_ACTION = "dust_action" QUIET_IS_OPEN = "quiet_is_open" QUIET_BEGIN_TIME = "quiet_begin_time" QUIET_END_TIME = "quiet_end_time" CLEAN_FINISH = "clean_finish" VOICE_TYPE = "voice_type" VOICE_TYPE_VERSION = "voice_type_version" ORDER_TOTAL = "order_total" BUILD_MAP = "build_map" PRIVACY = "privacy" DUST_AUTO_STATE = "dust_auto_state" DUST_FREQUENCY = "dust_frequency" CHILD_LOCK = "child_lock" MULTI_FLOOR = "multi_floor" MAP_SAVE = "map_save" LIGHT_MODE = "light_mode" GREEN_LASER = "green_laser" DUST_BAG_USED = "dust_bag_used" ORDER_SAVE_MODE = "order_save_mode" MANUFACTURER = "manufacturer" BACK_TO_WASH = "back_to_wash" CHARGE_STATION_TYPE = "charge_station_type" PV_CUT_CHARGE = "pv_cut_charge" PV_CHARGING = "pv_charging" SERIAL_NUMBER = "serial_number" RECOMMEND = "recommend" ADD_SWEEP_STATUS = "add_sweep_status" ROBOROCK_DATA_STATUS_PROTOCOL = [ RoborockDataProtocol.ERROR_CODE, RoborockDataProtocol.STATE, RoborockDataProtocol.BATTERY, RoborockDataProtocol.FAN_POWER, RoborockDataProtocol.WATER_BOX_MODE, RoborockDataProtocol.CHARGE_STATUS, ] ROBOROCK_DATA_CONSUMABLE_PROTOCOL = [ RoborockDataProtocol.MAIN_BRUSH_WORK_TIME, RoborockDataProtocol.SIDE_BRUSH_WORK_TIME, RoborockDataProtocol.FILTER_WORK_TIME, ] @dataclass class RoborockMessage: protocol: RoborockMessageProtocol payload: bytes | None = None seq: int = field(default_factory=lambda: get_next_int(100000, 999999)) version: bytes = b"1.0" random: int = field(default_factory=lambda: get_next_int(10000, 99999)) timestamp: int = field(default_factory=lambda: math.floor(time.time())) Python-roborock-python-roborock-32df4f3/roborock/roborock_typing.py000066400000000000000000000340541507503702500260010ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass, field from enum import Enum, StrEnum from .containers import ( CleanRecord, CleanSummary, Consumable, DustCollectionMode, RoborockBase, SmartWashParams, Status, WashTowelMode, ) class RoborockCommand(str, Enum): ADD_MOP_TEMPLATE_PARAMS = "add_mop_template_params" APP_AMETHYST_SELF_CHECK = "app_amethyst_self_check" APP_CHARGE = "app_charge" APP_DELETE_WIFI = "app_delete_wifi" APP_GET_AMETHYST_STATUS = "app_get_amethyst_status" APP_GET_CARPET_DEEP_CLEAN_STATUS = "app_get_carpet_deep_clean_status" APP_GET_CLEAN_ESTIMATE_INFO = "app_get_clean_estimate_info" APP_GET_DRYER_SETTING = "app_get_dryer_setting" APP_GET_INIT_STATUS = "app_get_init_status" APP_GET_LOCALE = "app_get_locale" APP_GET_WIFI_LIST = "app_get_wifi_list" APP_GOTO_TARGET = "app_goto_target" APP_KEEP_EASTER_EGG = "app_keep_easter_egg" APP_PAUSE = "app_pause" APP_RC_END = "app_rc_end" APP_RC_MOVE = "app_rc_move" APP_RC_START = "app_rc_start" APP_RC_STOP = "app_rc_stop" APP_RESUME_BUILD_MAP = "app_resume_build_map" APP_RESUME_PATROL = "app_resume_patrol" APP_SEGMENT_CLEAN = "app_segment_clean" APP_SET_AMETHYST_STATUS = "app_set_amethyst_status" APP_SET_CARPET_DEEP_CLEAN_STATUS = "app_set_carpet_deep_clean_status" APP_SET_CROSS_CARPET_CLEANING_STATUS = "app_set_cross_carpet_cleaning_status" APP_SET_DOOR_SILL_BLOCKS = "app_set_door_sill_blocks" APP_SET_DIRTY_REPLENISH_CLEAN_STATUS = "app_set_dirty_replenish_clean_status" APP_SET_DRYER_SETTING = "app_set_dryer_setting" APP_SET_DRYER_STATUS = "app_set_dryer_status" APP_SET_DYNAMIC_CONFIG = "app_set_dynamic_config" APP_SET_IGNORE_STUCK_POINT = "app_set_ignore_stuck_point" APP_SET_SMART_CLIFF_FORBIDDEN = "app_set_smart_cliff_forbidden" APP_SET_SMART_DOOR_SILL = "app_set_smart_door_sill" APP_SPOT = "app_spot" APP_START = "app_start" APP_START_BUILD_MAP = "app_start_build_map" APP_START_COLLECT_DUST = "app_start_collect_dust" APP_START_EASTER_EGG = "app_start_easter_egg" APP_START_PATROL = "app_start_patrol" APP_START_PET_PATROL = "app_start_pet_patrol" APP_START_WASH = "app_start_wash" APP_STAT = "app_stat" APP_STOP = "app_stop" APP_STOP_COLLECT_DUST = "app_stop_collect_dust" APP_STOP_WASH = "app_stop_wash" APP_UPDATE_UNSAVE_MAP = "app_update_unsave_map" APP_WAKEUP_ROBOT = "app_wakeup_robot" APP_ZONED_CLEAN = "app_zoned_clean" CHANGE_SOUND_VOLUME = "change_sound_volume" CHECK_HOMESEC_PASSWORD = "check_homesec_password" CLOSE_DND_TIMER = "close_dnd_timer" CLOSE_VALLEY_ELECTRICITY_TIMER = "close_valley_electricity_timer" DEL_CLEAN_RECORD = "del_clean_record" DEL_CLEAN_RECORD_MAP_V2 = "del_clean_record_map_v2" DEL_MAP = "del_map" DEL_MOP_TEMPLATE_PARAMS = "del_mop_template_params" DEL_SERVER_TIMER = "del_server_timer" DEL_TIMER = "del_timer" DNLD_INSTALL_SOUND = "dnld_install_sound" ENABLE_HOMESEC_VOICE = "enable_homesec_voice" ENABLE_LOG_UPLOAD = "enable_log_upload" END_EDIT_MAP = "end_edit_map" FIND_ME = "find_me" GET_AUTO_DELIVERY_CLEANING_FLUID = "get_auto_delivery_cleaning_fluid" GET_CAMERA_STATUS = "get_camera_status" GET_CARPET_CLEAN_MODE = "get_carpet_clean_mode" GET_CARPET_MODE = "get_carpet_mode" GET_CHILD_LOCK_STATUS = "get_child_lock_status" GET_CLEAN_FOLLOW_GROUND_MATERIAL_STATUS = "get_clean_follow_ground_material_status" GET_CLEAN_MOTOR_MODE = "get_clean_motor_mode" GET_CLEAN_RECORD = "get_clean_record" GET_CLEAN_RECORD_MAP = "get_clean_record_map" GET_CLEAN_SEQUENCE = "get_clean_sequence" GET_CLEAN_SUMMARY = "get_clean_summary" GET_COLLISION_AVOID_STATUS = "get_collision_avoid_status" GET_CONSUMABLE = "get_consumable" GET_CURRENT_SOUND = "get_current_sound" GET_CUSTOM_MODE = "get_custom_mode" GET_CUSTOMIZE_CLEAN_MODE = "get_customize_clean_mode" GET_DEVICE_ICE = "get_device_ice" GET_DEVICE_SDP = "get_device_sdp" GET_DND_TIMER = "get_dnd_timer" GET_DOCK_INFO = "get_dock_info" GET_DUST_COLLECTION_MODE = "get_dust_collection_mode" GET_DUST_COLLECTION_SWITCH_STATUS = "get_dust_collection_switch_status" GET_DYNAMIC_DATA = "get_dynamic_data" GET_DYNAMIC_MAP_DIFF = "get_dynamic_map_diff" GET_FAN_MOTOR_WORK_TIMEOUT = "get_fan_motor_work_timeout" GET_FLOW_LED_STATUS = "get_flow_led_status" GET_FRESH_MAP = "get_fresh_map" GET_FW_FEATURES = "get_fw_features" GET_HOMESEC_CONNECT_STATUS = "get_homesec_connect_status" GET_IDENTIFY_FURNITURE_STATUS = "get_identify_furniture_status" GET_IDENTIFY_GROUND_MATERIAL_STATUS = "get_identify_ground_material_status" GET_LED_STATUS = "get_led_status" GET_LOG_UPLOAD_STATUS = "get_log_upload_status" GET_MAP = "get_map" GET_MAP_BEAUTIFICATION_STATUS = "get_map_beautification_status" GET_MAP_STATUS = "get_map_status" GET_MAP_V1 = "get_map_v1" GET_MAP_V2 = "get_map_v2" GET_MAP_CALIBRATION = "get_map_calibration" # Custom command GET_MOP_MOTOR_STATUS = "get_mop_motor_status" GET_MOP_TEMPLATE_PARAMS_BY_ID = "get_mop_template_params_by_id" GET_MOP_TEMPLATE_PARAMS_SUMMARY = "get_mop_template_params_summary" GET_MULTI_MAP = "get_multi_map" GET_MULTI_MAPS_LIST = "get_multi_maps_list" GET_NETWORK_INFO = "get_network_info" GET_OFFLINE_MAP_STATUS = "get_offline_map_status" GET_PERSIST = "get_persist_map" GET_PROP = "get_prop" GET_RANDOM_PKEY = "get_random_pkey" GET_RECOVER_MAP = "get_recover_map" GET_RECOVER_MAPS = "get_recover_maps" GET_ROOM_MAPPING = "get_room_mapping" GET_SCENES_VALID_TIDS = "get_scenes_valid_tids" GET_SEGMENT_STATUS = "get_segment_status" GET_SERIAL_NUMBER = "get_serial_number" GET_SERVER_TIMER = "get_server_timer" GET_SMART_WASH_PARAMS = "get_smart_wash_params" GET_SOUND_PROGRESS = "get_sound_progress" GET_SOUND_VOLUME = "get_sound_volume" GET_STATUS = "get_status" GET_TESTID = "get_testid" GET_TIMER = "get_timer" GET_TIMER_DETAIL = "get_timer_detail" GET_TIMER_SUMMARY = "get_timer_summary" GET_TIMEZONE = "get_timezone" GET_TURN_SERVER = "get_turn_server" GET_VALLEY_ELECTRICITY_TIMER = "get_valley_electricity_timer" GET_WASH_DEBUG_PARAMS = "get_wash_debug_params" GET_WASH_TOWEL_MODE = "get_wash_towel_mode" GET_WASH_TOWEL_PARAMS = "get_wash_towel_params" GET_WATER_BOX_CUSTOM_MODE = "get_water_box_custom_mode" LOAD_MULTI_MAP = "load_multi_map" MANUAL_BAK_MAP = "manual_bak_map" MANUAL_SEGMENT_MAP = "manual_segment_map" MERGE_SEGMENT = "merge_segment" MOP_MODE = "mop_mode" MOP_TEMPLATE_ID = "mop_template_id" NAME_MULTI_MAP = "name_multi_map" NAME_SEGMENT = "name_segment" PLAY_AUDIO = "play_audio" RECOVER_MAP = "recover_map" RECOVER_MULTI_MAP = "recover_multi_map" RESET_CONSUMABLE = "reset_consumable" RESET_HOMESEC_PASSWORD = "reset_homesec_password" RESET_MAP = "reset_map" RESOLVE_ERROR = "resolve_error" RESUME_SEGMENT_CLEAN = "resume_segment_clean" RESUME_ZONED_CLEAN = "resume_zoned_clean" RETRY_REQUEST = "retry_request" REUNION_SCENES = "reunion_scenes" SAVE_FURNITURES = "save_furnitures" SAVE_MAP = "save_map" SEND_ICE_TO_ROBOT = "send_ice_to_robot" SEND_SDP_TO_ROBOT = "send_sdp_to_robot" SET_AIRDRY_HOURS = "set_airdry_hours" SET_APP_TIMEZONE = "set_app_timezone" SET_AUTO_DELIVERY_CLEANING_FLUID = "set_auto_delivery_cleaning_fluid" SET_CAMERA_STATUS = "set_camera_status" SET_CARPET_AREA = "set_carpet_area" SET_CARPET_CLEAN_MODE = "set_carpet_clean_mode" SET_CARPET_MODE = "set_carpet_mode" SET_CHILD_LOCK_STATUS = "set_child_lock_status" SET_CLEAN_FOLLOW_GROUND_MATERIAL_STATUS = "set_clean_follow_ground_material_status" SET_CLEAN_MOTOR_MODE = "set_clean_motor_mode" SET_CLEAN_SEQUENCE = "set_clean_sequence" SET_CLEAN_REPEAT_TIMES = "set_clean_repeat_times" SET_COLLISION_AVOID_STATUS = "set_collision_avoid_status" SET_CUSTOM_MODE = "set_custom_mode" SET_CUSTOMIZE_CLEAN_MODE = "set_customize_clean_mode" SET_DND_TIMER = "set_dnd_timer" SET_DND_TIMER_ACTIONS = "set_dnd_timer_actions" SET_DUST_COLLECTION_MODE = "set_dust_collection_mode" SET_DUST_COLLECTION_SWITCH_STATUS = "set_dust_collection_switch_status" SET_FAN_MOTOR_WORK_TIMEOUT = "set_fan_motor_work_timeout" SET_FDS_ENDPOINT = "set_fds_endpoint" SET_FLOW_LED_STATUS = "set_flow_led_status" SET_HOMESEC_PASSWORD = "set_homesec_password" SET_IDENTIFY_FURNITURE_STATUS = "set_identify_furniture_status" SET_IDENTIFY_GROUND_MATERIAL_STATUS = "set_identify_ground_material_status" SET_IGNORE_CARPET_ZONE = "set_ignore_carpet_zone" SET_IGNORE_IDENTIFY_AREA = "set_ignore_identify_area" SET_LAB_STATUS = "set_lab_status" SET_LED_STATUS = "set_led_status" SET_MAP_BEAUTIFICATION_STATUS = "set_map_beautification_status" SET_MOP_MODE = "set_mop_mode" SET_MOP_MOTOR_STATUS = "set_mop_motor_status" SET_MOP_TEMPLATE_ID = "set_mop_template_id" SET_OFFLINE_MAP_STATUS = "set_offline_map_status" SET_SCENES_SEGMENTS = "set_scenes_segments" SET_SCENES_ZONES = "set_scenes_zones" SET_SEGMENT_GROUND_MATERIAL = "set_segment_ground_material" SET_SERVER_TIMER = "set_server_timer" SET_SMART_WASH_PARAMS = "set_smart_wash_params" SET_SWITCH_MOP_MODE = "set_switch_map_mode" SET_TIMER = "set_timer" SET_TIMEZONE = "set_timezone" SET_VALLEY_ELECTRICITY_TIMER = "set_valley_electricity_timer" SET_VOICE_CHAT_VOLUME = "set_voice_chat_volume" SET_WASH_DEBUG_PARAMS = "set_wash_debug_params" SET_WASH_TOWEL_MODE = "set_wash_towel_mode" SET_WASH_TOWEL_PARAMS = "set_wash_towel_params" SET_WATER_BOX_CUSTOM_MODE = "set_water_box_custom_mode" SET_WATER_BOX_DISTANCE_OFF = "set_water_box_distance_off" SORT_MOP_TEMPLATE_PARAMS = "sort_mop_template_params" SPLIT_SEGMENT = "split_segment" START_CAMERA_PREVIEW = "start_camera_preview" START_CLEAN = "start_clean" START_EDIT_MAP = "start_edit_map" START_VOICE_CHAT = "start_voice_chat" START_WASH_THEN_CHARGE = "start_wash_then_charge" STOP_CAMERA_PREVIEW = "stop_camera_preview" STOP_FAN_MOTOR_WORK = "stop_fan_motor_work" STOP_GOTO_TARGET = "stop_goto_target" STOP_SEGMENT_CLEAN = "stop_segment_clean" STOP_VOICE_CHAT = "stop_voice_chat" STOP_ZONED_CLEAN = "stop_zoned_clean" SWITCH_VIDEO_QUALITY = "switch_video_quality" SWITCH_WATER_MARK = "switch_water_mark" TEST_SOUND_VOLUME = "test_sound_volume" UPD_SERVER_TIMER = "upd_server_timer" UPD_TIMER = "upd_timer" UPDATE_DOCK = "update_dock" UPDATE_MOP_TEMPLATE_PARAMS = "update_mop_template_params" UPLOAD_DATA_FOR_DEBUG_MODE = "upload_data_for_debug_mode" UPLOAD_PHOTO = "upload_photo" USE_NEW_MAP = "use_new_map" USE_OLD_MAP = "use_old_map" USER_UPLOAD_LOG = "user_upload_log" SET_STRETCH_TAG_STATUS = "set_stretch_tag_status" GET_STRETCH_TAG_STATUS = "get_stretch_tag_status" SET_RIGHT_BRUSH_STRETCH_STATUS = "set_right_brush_stretch_status" GET_RIGHT_BRUSH_STRETCH_STATUS = "get_right_brush_stretch_status" SET_DIRTY_OBJECT_DETECT_STATUS = "set_dirty_object_detect_status" GET_DIRTY_OBJECT_DETECT_STATUS = "get_dirty_object_detect_status" SET_WASH_WATER_TEMPERATURE = "set_wash_water_temperature" GET_WASH_WATER_TEMPERATURE = "get_wash_water_temperature" APP_EMPTY_RINSE_TANK_WATER = "app_empty_rinse_tank_water" SET_PET_SUPPLIES_DEEP_CLEAN_STATUS = "set_pet_supplies_deep_clean_status" GET_PET_SUPPLIES_DEEP_CLEAN_STATUS = "get_pet_supplies_deep_clean_status" SET_AP_MIC_LED_STATUS = "set_ap_mic_led_status" GET_AP_MIC_LED_STATUS = "get_ap_mic_led_status" SET_HANDLE_LEAK_WATER_STATUS = "set_handle_leak_water_status" GET_HANDLE_LEAK_WATER_STATUS = "get_handle_leak_water_status" APP_IGNORE_DIRTY_OBJECTS = "app_ignore_dirty_objects" MATTER_GET_STATUS = "matter.get_status" MATTER_DNLD_KEY = "matter.dnld_key" MATTER_RESET = "matter.reset" SET_GAP_DEEP_CLEAN_STATUS = "set_gap_deep_clean_status" GET_GAP_DEEP_CLEAN_STATUS = "get_gap_deep_clean_status" APP_SET_ROBOT_SETTING = "app_set_robot_setting" APP_GET_ROBOT_SETTING = "app_get_robot_setting" class RoborockB01Methods(StrEnum): """Methods used by the Roborock B01 model.""" GET_PROP = "prop.get" GET_MAP_LIST = "service.get_map_list" UPLOAD_BY_MAPTYPE = "service.upload_by_maptype" SET_PROP = "prop.set" GET_PREFERENCE = "service.get_preference" GET_RECORD_LIST = "service.get_record_list" GET_ORDER = "service.get_order" EVENT_ORDER_LIST_POST = "event.order_list.post" POST_PROP = "prop.post" @dataclass class DockSummary(RoborockBase): dust_collection_mode: DustCollectionMode | None = None wash_towel_mode: WashTowelMode | None = None smart_wash_params: SmartWashParams | None = None @dataclass class DeviceProp(RoborockBase): status: Status = field(default_factory=Status) clean_summary: CleanSummary = field(default_factory=CleanSummary) consumable: Consumable = field(default_factory=Consumable) last_clean_record: CleanRecord | None = None dock_summary: DockSummary | None = None dust_collection_mode_name: str | None = None def __post_init__(self) -> None: if ( self.dock_summary and self.dock_summary.dust_collection_mode is not None and self.dock_summary.dust_collection_mode.mode is not None ): self.dust_collection_mode_name = self.dock_summary.dust_collection_mode.mode.name def update(self, device_prop: DeviceProp) -> None: if device_prop.status: self.status = device_prop.status if device_prop.clean_summary: self.clean_summary = device_prop.clean_summary if device_prop.consumable: self.consumable = device_prop.consumable if device_prop.last_clean_record: self.last_clean_record = device_prop.last_clean_record if device_prop.dock_summary: self.dock_summary = device_prop.dock_summary self.__post_init__() Python-roborock-python-roborock-32df4f3/roborock/util.py000066400000000000000000000065171507503702500235470ustar00rootroot00000000000000from __future__ import annotations import asyncio import datetime import logging from asyncio import TimerHandle from collections.abc import Callable, Coroutine, MutableMapping from typing import Any, TypeVar from roborock import RoborockException T = TypeVar("T") DEFAULT_TIME_ZONE: datetime.tzinfo | None = datetime.datetime.now().astimezone().tzinfo def unpack_list(value: list[T], size: int) -> list[T | None]: return (value + [None] * size)[:size] # type: ignore def parse_datetime_to_roborock_datetime( start_datetime: datetime.datetime, end_datetime: datetime.datetime ) -> tuple[datetime.datetime, datetime.datetime]: now = datetime.datetime.now(DEFAULT_TIME_ZONE) start_datetime = start_datetime.replace( year=now.year, month=now.month, day=now.day, second=0, microsecond=0, tzinfo=DEFAULT_TIME_ZONE ) end_datetime = end_datetime.replace( year=now.year, month=now.month, day=now.day, second=0, microsecond=0, tzinfo=DEFAULT_TIME_ZONE ) if start_datetime > end_datetime: end_datetime += datetime.timedelta(days=1) elif end_datetime < now: start_datetime += datetime.timedelta(days=1) end_datetime += datetime.timedelta(days=1) return start_datetime, end_datetime def parse_time_to_datetime( start_time: datetime.time, end_time: datetime.time ) -> tuple[datetime.datetime, datetime.datetime]: """Help to handle time data.""" start_datetime = datetime.datetime.now(DEFAULT_TIME_ZONE).replace( hour=start_time.hour, minute=start_time.minute, second=0, microsecond=0 ) end_datetime = datetime.datetime.now(DEFAULT_TIME_ZONE).replace( hour=end_time.hour, minute=end_time.minute, second=0, microsecond=0 ) return parse_datetime_to_roborock_datetime(start_datetime, end_datetime) class RepeatableTask: def __init__(self, callback: Callable[[], Coroutine], interval: int): self.callback = callback self.interval = interval self._task: TimerHandle | None = None async def _run_task(self): response = None try: response = await self.callback() except RoborockException: pass loop = asyncio.get_running_loop() self._task = loop.call_later(self.interval, self._run_task_soon) return response def _run_task_soon(self): asyncio.create_task(self._run_task()) def cancel(self): if self._task: self._task.cancel() async def reset(self): self.cancel() return await self._run_task() class RoborockLoggerAdapter(logging.LoggerAdapter): def __init__(self, prefix: str, logger: logging.Logger) -> None: super().__init__(logger, {}) self.prefix = prefix def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> tuple[str, MutableMapping[str, Any]]: return f"[{self.prefix}] {msg}", kwargs counter_map: dict[tuple[int, int], int] = {} def get_next_int(min_val: int, max_val: int) -> int: """Gets a random int in the range, precached to help keep it fast.""" if (min_val, max_val) not in counter_map: # If we have never seen this range, or if the cache is getting low, make a bunch of preshuffled values. counter_map[(min_val, max_val)] = min_val counter_map[(min_val, max_val)] += 1 return counter_map[(min_val, max_val)] % max_val + min_val Python-roborock-python-roborock-32df4f3/roborock/version_1_apis/000077500000000000000000000000001507503702500251305ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/version_1_apis/__init__.py000066400000000000000000000002671507503702500272460ustar00rootroot00000000000000from .roborock_client_v1 import AttributeCache, RoborockClientV1 from .roborock_local_client_v1 import RoborockLocalClientV1 from .roborock_mqtt_client_v1 import RoborockMqttClientV1 Python-roborock-python-roborock-32df4f3/roborock/version_1_apis/roborock_client_v1.py000066400000000000000000000550071507503702500312750ustar00rootroot00000000000000import asyncio import dataclasses import json import time from abc import ABC, abstractmethod from collections.abc import Callable, Coroutine from typing import Any, TypeVar, final from roborock import ( AppInitStatus, DeviceProp, DockSummary, RoborockCommand, RoborockDockTypeCode, RoborockException, UnknownMethodError, VacuumError, ) from roborock.api import RoborockClient from roborock.command_cache import ( CacheableAttribute, CommandType, RoborockAttribute, find_cacheable_attribute, get_cache_map, ) from roborock.containers import ( ChildLockStatus, CleanRecord, CleanSummary, Consumable, DeviceData, DnDTimer, DustCollectionMode, FlowLedStatus, ModelStatus, MultiMapsList, NetworkInfo, RoborockBase, RoomMapping, S7MaxVStatus, ServerTimer, SmartWashParams, Status, ValleyElectricityTimer, WashTowelMode, ) from roborock.protocols.v1_protocol import MapResponse, SecurityData, create_map_response_decoder from roborock.roborock_message import ( ROBOROCK_DATA_CONSUMABLE_PROTOCOL, ROBOROCK_DATA_STATUS_PROTOCOL, RoborockDataProtocol, RoborockMessage, RoborockMessageProtocol, ) from roborock.util import RepeatableTask, unpack_list CUSTOM_COMMANDS = {RoborockCommand.GET_MAP_CALIBRATION} COMMANDS_SECURED = { RoborockCommand.GET_MAP_V1, RoborockCommand.GET_MULTI_MAP, } CLOUD_REQUIRED = COMMANDS_SECURED.union(CUSTOM_COMMANDS) WASH_N_FILL_DOCK = [ RoborockDockTypeCode.empty_wash_fill_dock, RoborockDockTypeCode.s8_dock, RoborockDockTypeCode.p10_dock, RoborockDockTypeCode.p10_pro_dock, RoborockDockTypeCode.s8_maxv_ultra_dock, RoborockDockTypeCode.qrevo_s_dock, RoborockDockTypeCode.saros_r10_dock, RoborockDockTypeCode.qrevo_curv_dock, ] RT = TypeVar("RT", bound=RoborockBase) EVICT_TIME = 60 _SendCommandT = Callable[[RoborockCommand | str, list | dict | int | None], Any] class AttributeCache: def __init__(self, attribute: RoborockAttribute, send_command: _SendCommandT): self.attribute = attribute self._send_command = send_command self.attribute = attribute self.task = RepeatableTask(self._async_value, EVICT_TIME) self._value: Any = None self._mutex = asyncio.Lock() self.unsupported: bool = False @property def value(self): return self._value async def _async_value(self): if self.unsupported: return None try: self._value = await self._send_command(self.attribute.get_command, None) except UnknownMethodError as err: # Limit the amount of times we call unsupported methods self.unsupported = True raise err return self._value async def async_value(self, force: bool = False): async with self._mutex: if self._value is None or force: return await self.task.reset() return self._value def stop(self): self.task.cancel() async def update_value(self, params) -> None: if self.attribute.set_command is None: raise RoborockException(f"{self.attribute.attribute} have no set command") response = await self._send_command(self.attribute.set_command, params) await self._async_value() return response async def add_value(self, params): if self.attribute.add_command is None: raise RoborockException(f"{self.attribute.attribute} have no add command") response = await self._send_command(self.attribute.add_command, params) await self._async_value() return response async def close_value(self, params=None) -> None: if self.attribute.close_command is None: raise RoborockException(f"{self.attribute.attribute} have no close command") response = await self._send_command(self.attribute.close_command, params) await self._async_value() return response async def refresh_value(self): await self._async_value() @dataclasses.dataclass class ListenerModel: protocol_handlers: dict[RoborockDataProtocol, list[Callable[[Status | Consumable], None]]] cache: dict[CacheableAttribute, AttributeCache] class RoborockClientV1(RoborockClient, ABC): """Roborock client base class for version 1 devices.""" _listeners: dict[str, ListenerModel] = {} _map_response_decoder: Callable[[RoborockMessage], MapResponse | None] | None = None def __init__(self, device_info: DeviceData, security_data: SecurityData | None) -> None: """Initializes the Roborock client.""" super().__init__(device_info) if security_data is not None: self._diagnostic_data.update({"misc_info": security_data.to_diagnostic_data()}) self._map_response_decoder = create_map_response_decoder(security_data) self._status_type: type[Status] = ModelStatus.get(device_info.model, S7MaxVStatus) self.cache: dict[CacheableAttribute, AttributeCache] = { cacheable_attribute: AttributeCache(attr, self._send_command) for cacheable_attribute, attr in get_cache_map().items() } if device_info.device.duid not in self._listeners: self._listeners[device_info.device.duid] = ListenerModel({}, self.cache) self.listener_model = self._listeners[device_info.device.duid] async def async_release(self) -> None: await super().async_release() [item.stop() for item in self.cache.values()] @property def status_type(self) -> type[Status]: """Gets the status type for this device""" return self._status_type async def get_status(self) -> Status: data = self._status_type.from_dict(await self.cache[CacheableAttribute.status].async_value(force=True)) if data is None: return self._status_type() return data async def get_dnd_timer(self) -> DnDTimer | None: return DnDTimer.from_dict(await self.cache[CacheableAttribute.dnd_timer].async_value()) async def get_valley_electricity_timer(self) -> ValleyElectricityTimer | None: return ValleyElectricityTimer.from_dict( await self.cache[CacheableAttribute.valley_electricity_timer].async_value() ) async def get_clean_summary(self) -> CleanSummary | None: clean_summary: dict | list | int = await self.send_command(RoborockCommand.GET_CLEAN_SUMMARY) if isinstance(clean_summary, dict): return CleanSummary.from_dict(clean_summary) elif isinstance(clean_summary, list): clean_time, clean_area, clean_count, records = unpack_list(clean_summary, 4) return CleanSummary( clean_time=clean_time, clean_area=clean_area, clean_count=clean_count, records=records, ) elif isinstance(clean_summary, int): return CleanSummary(clean_time=clean_summary) return None async def get_clean_record(self, record_id: int) -> CleanRecord | None: record: dict | list = await self.send_command(RoborockCommand.GET_CLEAN_RECORD, [record_id]) if isinstance(record, dict): return CleanRecord.from_dict(record) elif isinstance(record, list): if isinstance(record[-1], dict): records = [CleanRecord.from_dict(rec) for rec in record] final_record = records[-1] try: # This code is semi-presumptions - so it is put in a try finally to be safe. final_record.begin = records[0].begin final_record.begin_datetime = records[0].begin_datetime final_record.start_type = records[0].start_type for rec in records[0:-1]: final_record.duration += rec.duration if rec.duration is not None else 0 final_record.area += rec.area if rec.area is not None else 0 final_record.avoid_count += rec.avoid_count if rec.avoid_count is not None else 0 final_record.wash_count += rec.wash_count if rec.wash_count is not None else 0 final_record.square_meter_area += ( rec.square_meter_area if rec.square_meter_area is not None else 0 ) except Exception: # Return final record when an exception occurred return final_record # There are still a few unknown variables in this. begin, end, duration, area = unpack_list(record, 4) return CleanRecord(begin=begin, end=end, duration=duration, area=area) else: self._logger.warning("Clean record was of a new type, please submit an issue request: %s", record) return None async def get_consumable(self) -> Consumable: data = Consumable.from_dict(await self.cache[CacheableAttribute.consumable].async_value()) if data is None: return Consumable() return data async def get_wash_towel_mode(self) -> WashTowelMode | None: return WashTowelMode.from_dict(await self.cache[CacheableAttribute.wash_towel_mode].async_value()) async def get_dust_collection_mode(self) -> DustCollectionMode | None: return DustCollectionMode.from_dict(await self.cache[CacheableAttribute.dust_collection_mode].async_value()) async def get_smart_wash_params(self) -> SmartWashParams | None: return SmartWashParams.from_dict(await self.cache[CacheableAttribute.smart_wash_params].async_value()) async def get_dock_summary(self, dock_type: RoborockDockTypeCode) -> DockSummary: """Gets the status summary from the dock with the methods available for a given dock. :param dock_type: RoborockDockTypeCode""" commands: list[ Coroutine[ Any, Any, DustCollectionMode | WashTowelMode | SmartWashParams | None, ] ] = [self.get_dust_collection_mode()] if dock_type in WASH_N_FILL_DOCK: commands += [ self.get_wash_towel_mode(), self.get_smart_wash_params(), ] [dust_collection_mode, wash_towel_mode, smart_wash_params] = unpack_list( list(await asyncio.gather(*commands)), 3 ) # type: DustCollectionMode, WashTowelMode | None, SmartWashParams | None # type: ignore return DockSummary(dust_collection_mode, wash_towel_mode, smart_wash_params) async def get_prop(self) -> DeviceProp | None: """Gets device general properties.""" # Mypy thinks that each one of these is typed as a union of all the others. so we do type ignore. status, clean_summary, consumable = await asyncio.gather( *[ self.get_status(), self.get_clean_summary(), self.get_consumable(), ] ) # type: Status, CleanSummary, Consumable # type: ignore last_clean_record = None if clean_summary and clean_summary.records and len(clean_summary.records) > 0: last_clean_record = await self.get_clean_record(clean_summary.records[0]) dock_summary = None if status and status.dock_type is not None and status.dock_type != RoborockDockTypeCode.no_dock: dock_summary = await self.get_dock_summary(status.dock_type) if any([status, clean_summary, consumable]): return DeviceProp( status, clean_summary, consumable, last_clean_record, dock_summary, ) return None async def get_multi_maps_list(self) -> MultiMapsList | None: return await self.send_command(RoborockCommand.GET_MULTI_MAPS_LIST, return_type=MultiMapsList) async def get_networking(self) -> NetworkInfo | None: return await self.send_command(RoborockCommand.GET_NETWORK_INFO, return_type=NetworkInfo) async def get_room_mapping(self) -> list[RoomMapping] | None: """Gets the mapping from segment id -> iot id. Only works on local api.""" mapping: list = await self.send_command(RoborockCommand.GET_ROOM_MAPPING) if isinstance(mapping, list): if len(mapping) == 2 and not isinstance(mapping[0], list): return [RoomMapping(segment_id=mapping[0], iot_id=mapping[1])] return [ RoomMapping(segment_id=segment_id, iot_id=iot_id) # type: ignore for segment_id, iot_id in [unpack_list(room, 2) for room in mapping if isinstance(room, list)] ] return None async def get_child_lock_status(self) -> ChildLockStatus: """Gets current child lock status.""" return ChildLockStatus.from_dict(await self.cache[CacheableAttribute.child_lock_status].async_value()) async def get_flow_led_status(self) -> FlowLedStatus: """Gets current flow led status.""" return FlowLedStatus.from_dict(await self.cache[CacheableAttribute.flow_led_status].async_value()) async def get_sound_volume(self) -> int | None: """Gets current volume level.""" return await self.cache[CacheableAttribute.sound_volume].async_value() async def get_server_timer(self) -> list[ServerTimer]: """Gets current server timer.""" server_timers = await self.cache[CacheableAttribute.server_timer].async_value() if server_timers: if isinstance(server_timers[0], list): return [ServerTimer(*server_timer) for server_timer in server_timers] return [ServerTimer(*server_timers)] return [] async def load_multi_map(self, map_flag: int) -> None: """Load the map into the vacuum's memory.""" await self.send_command(RoborockCommand.LOAD_MULTI_MAP, [map_flag]) async def get_app_init_status(self) -> AppInitStatus: """Gets the app init status (needed for determining vacuum capabilities).""" return await self.send_command(RoborockCommand.APP_GET_INIT_STATUS, return_type=AppInitStatus) @abstractmethod async def _send_command( self, method: RoborockCommand | str, params: list | dict | int | None = None, ) -> Any: """Send a command to the Roborock device.""" def on_message_received(self, messages: list[RoborockMessage]) -> None: try: self._last_device_msg_in = time.monotonic() for data in messages: protocol = data.protocol if data.payload and protocol in [ RoborockMessageProtocol.RPC_RESPONSE, RoborockMessageProtocol.GENERAL_REQUEST, ]: payload = json.loads(data.payload.decode()) for data_point_number, data_point in payload.get("dps").items(): if data_point_number == "102": data_point_response = json.loads(data_point) request_id = data_point_response.get("id") queue = self._waiting_queue.get(request_id) if queue and queue.protocol == protocol: error = data_point_response.get("error") if error: queue.set_exception( VacuumError( error.get("code"), error.get("message"), ), ) else: result = data_point_response.get("result") if isinstance(result, list) and len(result) == 1: result = result[0] queue.set_result(result) else: self._logger.debug("Received response for unknown request id %s", request_id) else: try: data_protocol = RoborockDataProtocol(int(data_point_number)) self._logger.debug(f"Got device update for {data_protocol.name}: {data_point}") if data_protocol in ROBOROCK_DATA_STATUS_PROTOCOL: if data_protocol not in self.listener_model.protocol_handlers: self._logger.debug( f"Got status update({data_protocol.name}) before get_status was called." ) return value = self.listener_model.cache[CacheableAttribute.status].value value[data_protocol.name] = data_point status = self._status_type.from_dict(value) for listener in self.listener_model.protocol_handlers.get(data_protocol, []): listener(status) elif data_protocol in ROBOROCK_DATA_CONSUMABLE_PROTOCOL: if data_protocol not in self.listener_model.protocol_handlers: self._logger.debug( f"Got consumable update({data_protocol.name})" + "before get_consumable was called." ) return value = self.listener_model.cache[CacheableAttribute.consumable].value value[data_protocol.name] = data_point consumable = Consumable.from_dict(value) for listener in self.listener_model.protocol_handlers.get(data_protocol, []): listener(consumable) elif data_protocol in { RoborockDataProtocol.ADDITIONAL_PROPS, RoborockDataProtocol.DRYING_STATUS, }: # Known data protocol, but not yet sure how to correctly utilize it. return else: self._logger.warning( f"Unknown data protocol {data_point_number}, please create an " f"issue on the python-roborock repository" ) self._logger.info(data) return except ValueError: self._logger.warning( f"Got listener data for {data_point_number}, data: {data_point}. " f"This lets us update data quicker, please open an issue " f"at https://github.com/humbertogontijo/python-roborock/issues" ) pass dps = {data_point_number: data_point} self._logger.debug(f"Got unknown data point {dps}") elif data.payload and protocol == RoborockMessageProtocol.MAP_RESPONSE: if self._map_response_decoder is not None: map_response = self._map_response_decoder(data) if map_response is not None: queue = self._waiting_queue.get(map_response.request_id) if queue: queue.set_result(map_response.data) else: self._logger.debug( "Received unsolicited map response for request_id %s", map_response.request_id ) elif data.protocol == RoborockMessageProtocol.GENERAL_RESPONSE and data.payload is None: # Api will often send blank messages with matching sequences, we can ignore these. continue else: queue = self._waiting_queue.get(data.seq) if queue: if data.protocol == RoborockMessageProtocol.HELLO_RESPONSE: queue.set_result(data) else: queue.set_result(data.payload) else: self._logger.debug("Received response for unknown request id %s", data.seq) except Exception as ex: self._logger.exception(ex) async def get_from_cache(self, key: CacheableAttribute) -> AttributeCache | None: val = self.cache.get(key) if val is not None: return await val.async_value() return None def add_listener( self, protocol: RoborockDataProtocol, listener: Callable, cache: dict[CacheableAttribute, AttributeCache] ) -> None: self.listener_model.cache = cache if protocol not in self.listener_model.protocol_handlers: self.listener_model.protocol_handlers[protocol] = [] self.listener_model.protocol_handlers[protocol].append(listener) def remove_listener(self, protocol: RoborockDataProtocol, listener: Callable) -> None: self.listener_model.protocol_handlers[protocol].remove(listener) @final async def send_command( self, method: RoborockCommand | str, params: list | dict | int | None = None, return_type: type[RT] | None = None, ) -> RT: cacheable_attribute_result = find_cacheable_attribute(method) cache = None command_type = None if cacheable_attribute_result is not None: cache = self.cache[cacheable_attribute_result.attribute] command_type = cacheable_attribute_result.type response: Any = None if cache is not None and command_type == CommandType.GET: response = await cache.async_value() else: response = await self._send_command(method, params) if cache is not None and command_type == CommandType.CHANGE: await cache.refresh_value() if return_type: return return_type.from_dict(response) return response Python-roborock-python-roborock-32df4f3/roborock/version_1_apis/roborock_local_client_v1.py000066400000000000000000000245741507503702500324540ustar00rootroot00000000000000import asyncio import logging from asyncio import Lock, TimerHandle, Transport, get_running_loop from collections.abc import Callable from dataclasses import dataclass from enum import StrEnum from .. import CommandVacuumError, DeviceData, RoborockCommand from ..api import RoborockClient from ..exceptions import RoborockConnectionException, RoborockException, VacuumError from ..protocol import create_local_decoder, create_local_encoder from ..protocols.v1_protocol import RequestMessage from ..roborock_message import RoborockMessage, RoborockMessageProtocol from ..util import RoborockLoggerAdapter, get_next_int from .roborock_client_v1 import CLOUD_REQUIRED, RoborockClientV1 _LOGGER = logging.getLogger(__name__) class LocalProtocolVersion(StrEnum): """Supported local protocol versions. Different from vacuum protocol versions.""" L01 = "L01" V1 = "1.0" @dataclass class _LocalProtocol(asyncio.Protocol): """Callbacks for the Roborock local client transport.""" messages_cb: Callable[[bytes], None] connection_lost_cb: Callable[[Exception | None], None] def data_received(self, bytes) -> None: """Called when data is received from the transport.""" self.messages_cb(bytes) def connection_lost(self, exc: Exception | None) -> None: """Called when the transport connection is lost.""" self.connection_lost_cb(exc) class RoborockLocalClientV1(RoborockClientV1, RoborockClient): """Roborock local client for v1 devices.""" def __init__( self, device_data: DeviceData, queue_timeout: int = 4, local_protocol_version: LocalProtocolVersion | None = None, ): """Initialize the Roborock local client.""" if device_data.host is None: raise RoborockException("Host is required") self.host = device_data.host self._batch_structs: list[RoborockMessage] = [] self._executing = False self.transport: Transport | None = None self._mutex = Lock() self.keep_alive_task: TimerHandle | None = None RoborockClientV1.__init__(self, device_data, security_data=None) RoborockClient.__init__(self, device_data) self._local_protocol = _LocalProtocol(self._data_received, self._connection_lost) self._local_protocol_version = local_protocol_version self._connect_nonce = get_next_int(10000, 32767) self._ack_nonce: int | None = None self._set_encoder_decoder() self.queue_timeout = queue_timeout self._logger = RoborockLoggerAdapter(device_data.device.name, _LOGGER) @property def local_protocol_version(self) -> LocalProtocolVersion: return LocalProtocolVersion.V1 if self._local_protocol_version is None else self._local_protocol_version def _data_received(self, message): """Called when data is received from the transport.""" parsed_msg = self._decoder(message) self.on_message_received(parsed_msg) def _connection_lost(self, exc: Exception | None): """Called when the transport connection is lost.""" self._sync_disconnect() self.on_connection_lost(exc) def is_connected(self): return self.transport and self.transport.is_reading() async def keep_alive_func(self, _=None): try: await self.ping() except RoborockException: pass loop = asyncio.get_running_loop() self.keep_alive_task = loop.call_later(10, lambda: asyncio.create_task(self.keep_alive_func())) async def async_connect(self) -> None: should_ping = False async with self._mutex: try: if not self.is_connected(): self._sync_disconnect() async with asyncio.timeout(self.queue_timeout): self._logger.debug(f"Connecting to {self.host}") loop = get_running_loop() self.transport, _ = await loop.create_connection( # type: ignore lambda: self._local_protocol, self.host, 58867 ) self._logger.info(f"Connected to {self.host}") should_ping = True except BaseException as e: raise RoborockConnectionException(f"Failed connecting to {self.host}") from e if should_ping: await self.hello() await self.keep_alive_func() def _sync_disconnect(self) -> None: loop = asyncio.get_running_loop() if self.transport and loop.is_running(): self._logger.debug(f"Disconnecting from {self.host}") self.transport.close() if self.keep_alive_task: self.keep_alive_task.cancel() async def async_disconnect(self) -> None: async with self._mutex: self._sync_disconnect() def _set_encoder_decoder(self): """Updates the encoder decoder. These are updated with nonces after the first hello. Only L01 uses the nonces.""" self._encoder = create_local_encoder(self.device_info.device.local_key, self._connect_nonce, self._ack_nonce) self._decoder = create_local_decoder(self.device_info.device.local_key, self._connect_nonce, self._ack_nonce) async def _do_hello(self, local_protocol_version: LocalProtocolVersion) -> bool: """Perform the initial handshaking.""" self._logger.debug( "Attempting to use the %s protocol for client %s...", local_protocol_version, self.device_info.device.duid, ) request = RoborockMessage( protocol=RoborockMessageProtocol.HELLO_REQUEST, version=local_protocol_version.encode(), random=self._connect_nonce, seq=1, ) try: response = await self._send_message( roborock_message=request, request_id=request.seq, response_protocol=RoborockMessageProtocol.HELLO_RESPONSE, ) self._ack_nonce = response.random self._set_encoder_decoder() self._local_protocol_version = local_protocol_version self._logger.debug( "Client %s speaks the %s protocol.", self.device_info.device.duid, local_protocol_version, ) return True except RoborockException as e: self._logger.debug( "Client %s did not respond or does not speak the %s protocol. %s", self.device_info.device.duid, local_protocol_version, e, ) return False async def hello(self): """Send hello to the device to negotiate protocol.""" if self._local_protocol_version: # version is forced if not await self._do_hello(self._local_protocol_version): raise RoborockException(f"Failed to connect to device with protocol {self._local_protocol_version}") else: # try 1.0, then L01 if not await self._do_hello(LocalProtocolVersion.V1): if not await self._do_hello(LocalProtocolVersion.L01): raise RoborockException("Failed to connect to device with any known protocol") async def ping(self) -> None: ping_message = RoborockMessage( protocol=RoborockMessageProtocol.PING_REQUEST, version=self.local_protocol_version.encode() ) await self._send_message( roborock_message=ping_message, request_id=ping_message.seq, response_protocol=RoborockMessageProtocol.PING_RESPONSE, ) async def _validate_connection(self) -> None: if not self.should_keepalive(): self._logger.info("Resetting Roborock connection due to keepalive timeout") await self.async_disconnect() await self.async_connect() def _send_msg_raw(self, data: bytes): try: if not self.transport: raise RoborockException("Can not send message without connection") self.transport.write(data) except Exception as e: raise RoborockException(e) from e async def _send_command( self, method: RoborockCommand | str, params: list | dict | int | None = None, ): if method in CLOUD_REQUIRED: raise RoborockException(f"Method {method} is not supported over local connection") request_message = RequestMessage(method=method, params=params) roborock_message = request_message.encode_message( RoborockMessageProtocol.GENERAL_REQUEST, version=self.local_protocol_version, ) self._logger.debug("Building message id %s for method %s", request_message.request_id, method) return await self._send_message( roborock_message, request_id=request_message.request_id, response_protocol=RoborockMessageProtocol.GENERAL_REQUEST, method=method, params=params, ) async def _send_message( self, roborock_message: RoborockMessage, request_id: int, response_protocol: int, method: str | None = None, params: list | dict | int | None = None, ) -> RoborockMessage: await self._validate_connection() msg = self._encoder(roborock_message) if method: self._logger.debug(f"id={request_id} Requesting method {method} with {params}") # Send the command to the Roborock device async_response = self._async_response(request_id, response_protocol) self._send_msg_raw(msg) diagnostic_key = method if method is not None else "unknown" try: response = await async_response except VacuumError as err: self._diagnostic_data[diagnostic_key] = { "params": params, "error": err, } raise CommandVacuumError(method, err) from err self._diagnostic_data[diagnostic_key] = { "params": params, "response": response, } if roborock_message.protocol == RoborockMessageProtocol.GENERAL_REQUEST: self._logger.debug(f"id={request_id} Response from method {method}: {response}") if response == "retry": raise RoborockException(f"Command {method} failed with 'retry' message; Device is busy, try again later") return response Python-roborock-python-roborock-32df4f3/roborock/version_1_apis/roborock_mqtt_client_v1.py000066400000000000000000000076001507503702500323360ustar00rootroot00000000000000import logging from vacuum_map_parser_base.config.color import ColorsPalette from vacuum_map_parser_base.config.image_config import ImageConfig from vacuum_map_parser_base.config.size import Sizes from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser from roborock.cloud_api import RoborockMqttClient from ..containers import DeviceData, UserData from ..exceptions import CommandVacuumError, RoborockException, VacuumError from ..protocols.v1_protocol import RequestMessage, create_security_data from ..roborock_message import ( RoborockMessageProtocol, ) from ..roborock_typing import RoborockCommand from ..util import RoborockLoggerAdapter from .roborock_client_v1 import COMMANDS_SECURED, CUSTOM_COMMANDS, RoborockClientV1 _LOGGER = logging.getLogger(__name__) class RoborockMqttClientV1(RoborockMqttClient, RoborockClientV1): """Roborock mqtt client for v1 devices.""" def __init__(self, user_data: UserData, device_info: DeviceData, queue_timeout: int = 10) -> None: """Initialize the Roborock mqtt client.""" rriot = user_data.rriot if rriot is None: raise RoborockException("Got no rriot data from user_data") RoborockMqttClient.__init__(self, user_data, device_info) security_data = create_security_data(rriot) RoborockClientV1.__init__(self, device_info, security_data=security_data) self.queue_timeout = queue_timeout self._logger = RoborockLoggerAdapter(device_info.device.name, _LOGGER) self._security_data = security_data async def _send_command( self, method: RoborockCommand | str, params: list | dict | int | None = None, ): if method in CUSTOM_COMMANDS: # When we have more custom commands do something more complicated here return await self._get_calibration_points() request_message = RequestMessage(method=method, params=params) roborock_message = request_message.encode_message( RoborockMessageProtocol.RPC_REQUEST, security_data=self._security_data, ) self._logger.debug("Building message id %s for method %s", request_message.request_id, method) await self._validate_connection() request_id = request_message.request_id response_protocol = ( RoborockMessageProtocol.MAP_RESPONSE if method in COMMANDS_SECURED else RoborockMessageProtocol.RPC_RESPONSE ) msg = self._encoder(roborock_message) self._logger.debug(f"id={request_id} Requesting method {method} with {params}") async_response = self._async_response(request_id, response_protocol) self._send_msg_raw(msg) diagnostic_key = method if method is not None else "unknown" try: response = await async_response except VacuumError as err: self._diagnostic_data[diagnostic_key] = { "params": params, "error": err, } raise CommandVacuumError(method, err) from err self._diagnostic_data[diagnostic_key] = { "params": params, "response": response, } if response_protocol == RoborockMessageProtocol.MAP_RESPONSE: self._logger.debug(f"id={request_id} Response from {method}: {len(response)} bytes") else: self._logger.debug(f"id={request_id} Response from {method}: {response}") return response async def _get_calibration_points(self): map: bytes = await self.send_command(RoborockCommand.GET_MAP_V1) parser = RoborockMapDataParser(ColorsPalette(), Sizes(), [], ImageConfig(), []) parsed_map = parser.parse(map) calibration = parsed_map.calibration() self._logger.info(parsed_map.calibration()) return calibration async def get_map_v1(self) -> bytes | None: return await self.send_command(RoborockCommand.GET_MAP_V1) Python-roborock-python-roborock-32df4f3/roborock/version_a01_apis/000077500000000000000000000000001507503702500253515ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/roborock/version_a01_apis/__init__.py000066400000000000000000000001571507503702500274650ustar00rootroot00000000000000from .roborock_client_a01 import RoborockClientA01 from .roborock_mqtt_client_a01 import RoborockMqttClientA01 Python-roborock-python-roborock-32df4f3/roborock/version_a01_apis/roborock_client_a01.py000066400000000000000000000156731507503702500315560ustar00rootroot00000000000000import logging from abc import ABC, abstractmethod from collections.abc import Callable from datetime import time from typing import Any from roborock import DeviceData from roborock.api import RoborockClient from roborock.code_mappings import ( DyadBrushSpeed, DyadCleanMode, DyadError, DyadSelfCleanLevel, DyadSelfCleanMode, DyadSuction, DyadWarmLevel, DyadWaterLevel, RoborockDyadStateCode, ZeoDetergentType, ZeoDryingMode, ZeoError, ZeoMode, ZeoProgram, ZeoRinse, ZeoSoftenerType, ZeoSpin, ZeoState, ZeoTemperature, ) from roborock.containers import DyadProductInfo, DyadSndState, RoborockCategory from roborock.exceptions import RoborockException from roborock.protocols.a01_protocol import decode_rpc_response from roborock.roborock_message import ( RoborockDyadDataProtocol, RoborockMessage, RoborockMessageProtocol, RoborockZeoProtocol, ) _LOGGER = logging.getLogger(__name__) DYAD_PROTOCOL_ENTRIES: dict[RoborockDyadDataProtocol, Callable] = { RoborockDyadDataProtocol.STATUS: lambda val: RoborockDyadStateCode(val).name, RoborockDyadDataProtocol.SELF_CLEAN_MODE: lambda val: DyadSelfCleanMode(val).name, RoborockDyadDataProtocol.SELF_CLEAN_LEVEL: lambda val: DyadSelfCleanLevel(val).name, RoborockDyadDataProtocol.WARM_LEVEL: lambda val: DyadWarmLevel(val).name, RoborockDyadDataProtocol.CLEAN_MODE: lambda val: DyadCleanMode(val).name, RoborockDyadDataProtocol.SUCTION: lambda val: DyadSuction(val).name, RoborockDyadDataProtocol.WATER_LEVEL: lambda val: DyadWaterLevel(val).name, RoborockDyadDataProtocol.BRUSH_SPEED: lambda val: DyadBrushSpeed(val).name, RoborockDyadDataProtocol.POWER: lambda val: int(val), RoborockDyadDataProtocol.AUTO_DRY: lambda val: bool(val), RoborockDyadDataProtocol.MESH_LEFT: lambda val: int(360000 - val * 60), RoborockDyadDataProtocol.BRUSH_LEFT: lambda val: int(360000 - val * 60), RoborockDyadDataProtocol.ERROR: lambda val: DyadError(val).name, RoborockDyadDataProtocol.VOLUME_SET: lambda val: int(val), RoborockDyadDataProtocol.STAND_LOCK_AUTO_RUN: lambda val: bool(val), RoborockDyadDataProtocol.AUTO_DRY_MODE: lambda val: bool(val), RoborockDyadDataProtocol.SILENT_DRY_DURATION: lambda val: int(val), # in minutes RoborockDyadDataProtocol.SILENT_MODE: lambda val: bool(val), RoborockDyadDataProtocol.SILENT_MODE_START_TIME: lambda val: time( hour=int(val / 60), minute=val % 60 ), # in minutes since 00:00 RoborockDyadDataProtocol.SILENT_MODE_END_TIME: lambda val: time( hour=int(val / 60), minute=val % 60 ), # in minutes since 00:00 RoborockDyadDataProtocol.RECENT_RUN_TIME: lambda val: [ int(v) for v in val.split(",") ], # minutes of cleaning in past few days. RoborockDyadDataProtocol.TOTAL_RUN_TIME: lambda val: int(val), RoborockDyadDataProtocol.SND_STATE: lambda val: DyadSndState.from_dict(val), RoborockDyadDataProtocol.PRODUCT_INFO: lambda val: DyadProductInfo.from_dict(val), } ZEO_PROTOCOL_ENTRIES: dict[RoborockZeoProtocol, Callable] = { # ro RoborockZeoProtocol.STATE: lambda val: ZeoState(val).name, RoborockZeoProtocol.COUNTDOWN: lambda val: int(val), RoborockZeoProtocol.WASHING_LEFT: lambda val: int(val), RoborockZeoProtocol.ERROR: lambda val: ZeoError(val).name, RoborockZeoProtocol.TIMES_AFTER_CLEAN: lambda val: int(val), RoborockZeoProtocol.DETERGENT_EMPTY: lambda val: bool(val), RoborockZeoProtocol.SOFTENER_EMPTY: lambda val: bool(val), # rw RoborockZeoProtocol.MODE: lambda val: ZeoMode(val).name, RoborockZeoProtocol.PROGRAM: lambda val: ZeoProgram(val).name, RoborockZeoProtocol.TEMP: lambda val: ZeoTemperature(val).name, RoborockZeoProtocol.RINSE_TIMES: lambda val: ZeoRinse(val).name, RoborockZeoProtocol.SPIN_LEVEL: lambda val: ZeoSpin(val).name, RoborockZeoProtocol.DRYING_MODE: lambda val: ZeoDryingMode(val).name, RoborockZeoProtocol.DETERGENT_TYPE: lambda val: ZeoDetergentType(val).name, RoborockZeoProtocol.SOFTENER_TYPE: lambda val: ZeoSoftenerType(val).name, RoborockZeoProtocol.SOUND_SET: lambda val: bool(val), } def convert_dyad_value(protocol: int, value: Any) -> Any: """Convert a dyad protocol value to its corresponding type.""" protocol_value = RoborockDyadDataProtocol(protocol) if (converter := DYAD_PROTOCOL_ENTRIES.get(protocol_value)) is not None: return converter(value) return None def convert_zeo_value(protocol: int, value: Any) -> Any: """Convert a zeo protocol value to its corresponding type.""" protocol_value = RoborockZeoProtocol(protocol) if (converter := ZEO_PROTOCOL_ENTRIES.get(protocol_value)) is not None: return converter(value) return None class RoborockClientA01(RoborockClient, ABC): """Roborock client base class for A01 devices.""" value_converter: Callable[[int, Any], Any] | None = None def __init__(self, device_info: DeviceData, category: RoborockCategory): """Initialize the Roborock client.""" super().__init__(device_info) if category == RoborockCategory.WET_DRY_VAC: self.value_converter = convert_dyad_value elif category == RoborockCategory.WASHING_MACHINE: self.value_converter = convert_zeo_value else: _LOGGER.debug("Device category %s is not (yet) supported", category) self.value_converter = None def on_message_received(self, messages: list[RoborockMessage]) -> None: if self.value_converter is None: return for message in messages: protocol = message.protocol if message.payload and protocol in [ RoborockMessageProtocol.RPC_RESPONSE, RoborockMessageProtocol.GENERAL_REQUEST, ]: try: data_points = decode_rpc_response(message) except RoborockException as err: self._logger.debug("Failed to decode message: %s", err) continue for data_point_number, data_point in data_points.items(): self._logger.debug("received msg with dps, protocol: %s, %s", data_point_number, protocol) if converted_response := self.value_converter(data_point_number, data_point): queue = self._waiting_queue.get(int(data_point_number)) if queue and queue.protocol == protocol: queue.set_result(converted_response) else: self._logger.debug( "Received unknown data point %s for protocol %s, ignoring", data_point_number, protocol ) @abstractmethod async def update_values( self, dyad_data_protocols: list[RoborockDyadDataProtocol | RoborockZeoProtocol] ) -> dict[RoborockDyadDataProtocol | RoborockZeoProtocol, Any]: """This should handle updating for each given protocol.""" Python-roborock-python-roborock-32df4f3/roborock/version_a01_apis/roborock_mqtt_client_a01.py000066400000000000000000000057711507503702500326210ustar00rootroot00000000000000import asyncio import json import logging import typing from Crypto.Cipher import AES from Crypto.Util.Padding import unpad from roborock.cloud_api import RoborockMqttClient from roborock.containers import DeviceData, RoborockCategory, UserData from roborock.exceptions import RoborockException from roborock.protocols.a01_protocol import encode_mqtt_payload from roborock.roborock_message import ( RoborockDyadDataProtocol, RoborockMessage, RoborockMessageProtocol, RoborockZeoProtocol, ) from ..util import RoborockLoggerAdapter from .roborock_client_a01 import RoborockClientA01 _LOGGER = logging.getLogger(__name__) class RoborockMqttClientA01(RoborockMqttClient, RoborockClientA01): """Roborock mqtt client for A01 devices.""" def __init__( self, user_data: UserData, device_info: DeviceData, category: RoborockCategory, queue_timeout: int = 10 ) -> None: """Initialize the Roborock mqtt client.""" rriot = user_data.rriot if rriot is None: raise RoborockException("Got no rriot data from user_data") RoborockMqttClient.__init__(self, user_data, device_info) RoborockClientA01.__init__(self, device_info, category) self.queue_timeout = queue_timeout self._logger = RoborockLoggerAdapter(device_info.device.name, _LOGGER) async def _send_message(self, roborock_message: RoborockMessage): await self._validate_connection() response_protocol = RoborockMessageProtocol.RPC_RESPONSE m = self._encoder(roborock_message) payload = json.loads(unpad(roborock_message.payload, AES.block_size)) futures = [] if "10000" in payload["dps"]: for dps in json.loads(payload["dps"]["10000"]): futures.append(self._async_response(dps, response_protocol)) self._send_msg_raw(m) responses = await asyncio.gather(*futures, return_exceptions=True) dps_responses: dict[int, typing.Any] = {} if "10000" in payload["dps"]: for i, dps in enumerate(json.loads(payload["dps"]["10000"])): response = responses[i] if isinstance(response, BaseException): dps_responses[dps] = None else: dps_responses[dps] = response return dps_responses async def update_values( self, dyad_data_protocols: list[RoborockDyadDataProtocol | RoborockZeoProtocol] ) -> dict[RoborockDyadDataProtocol | RoborockZeoProtocol, typing.Any]: message = encode_mqtt_payload( {RoborockDyadDataProtocol.ID_QUERY: str([int(protocol) for protocol in dyad_data_protocols])} ) return await self._send_message(message) async def set_value( self, protocol: RoborockDyadDataProtocol | RoborockZeoProtocol, value: typing.Any ) -> dict[int, typing.Any]: """Set a value for a specific protocol on the A01 device.""" message = encode_mqtt_payload({protocol: value}) return await self._send_message(message) Python-roborock-python-roborock-32df4f3/roborock/web_api.py000066400000000000000000000743241507503702500242010ustar00rootroot00000000000000from __future__ import annotations import base64 import hashlib import hmac import logging import math import secrets import string import time from dataclasses import dataclass import aiohttp from aiohttp import ContentTypeError, FormData from pyrate_limiter import BucketFullException, Duration, Limiter, Rate from roborock.containers import HomeData, HomeDataRoom, HomeDataScene, ProductResponse, RRiot, UserData from roborock.exceptions import ( RoborockAccountDoesNotExist, RoborockException, RoborockInvalidCode, RoborockInvalidCredentials, RoborockInvalidEmail, RoborockInvalidUserAgreement, RoborockMissingParameters, RoborockNoResponseFromBaseURL, RoborockNoUserAgreement, RoborockRateLimit, RoborockTooFrequentCodeRequests, ) _LOGGER = logging.getLogger(__name__) BASE_URLS = [ "https://usiot.roborock.com", "https://euiot.roborock.com", "https://cniot.roborock.com", "https://ruiot.roborock.com", ] @dataclass class IotLoginInfo: """Information about the login to the iot server.""" base_url: str country_code: str country: str class RoborockApiClient: _LOGIN_RATES = [ Rate(1, Duration.SECOND), Rate(3, Duration.MINUTE), Rate(10, Duration.HOUR), Rate(20, Duration.DAY), ] _HOME_DATA_RATES = [ Rate(1, Duration.SECOND), Rate(5, Duration.MINUTE), Rate(15, Duration.HOUR), Rate(40, Duration.DAY), ] _login_limiter = Limiter(_LOGIN_RATES) _home_data_limiter = Limiter(_HOME_DATA_RATES) def __init__( self, username: str, base_url: str | None = None, session: aiohttp.ClientSession | None = None ) -> None: """Sample API Client.""" self._username = username self._base_url = base_url self._device_identifier = secrets.token_urlsafe(16) self.session = session self._iot_login_info: IotLoginInfo | None = None async def _get_iot_login_info(self) -> IotLoginInfo: if self._iot_login_info is None: valid_urls = BASE_URLS if self._base_url is None else [self._base_url] for iot_url in valid_urls: url_request = PreparedRequest(iot_url, self.session) response = await url_request.request( "post", "/api/v1/getUrlByEmail", params={"email": self._username, "needtwostepauth": "false"}, ) if response is None: continue response_code = response.get("code") if response_code != 200: if response_code == 2003: raise RoborockInvalidEmail("Your email was incorrectly formatted.") elif response_code == 1001: raise RoborockMissingParameters( "You are missing parameters for this request, are you sure you entered your username?" ) else: raise RoborockException(f"{response.get('msg')} - response code: {response_code}") country_code = response["data"]["countrycode"] country = response["data"]["country"] if country_code is not None or country is not None: self._iot_login_info = IotLoginInfo( base_url=response["data"]["url"], country=country, country_code=country_code, ) _LOGGER.debug("Country determined to be %s and code is %s", country, country_code) return self._iot_login_info raise RoborockNoResponseFromBaseURL( "No account was found for any base url we tried. Either your email is incorrect or we do not have a" " record of the roborock server your device is on." ) return self._iot_login_info @property async def base_url(self): if self._base_url is not None: return self._base_url return (await self._get_iot_login_info()).base_url @property async def country(self): return (await self._get_iot_login_info()).country @property async def country_code(self): return (await self._get_iot_login_info()).country_code def _get_header_client_id(self): md5 = hashlib.md5() md5.update(self._username.encode()) md5.update(self._device_identifier.encode()) return base64.b64encode(md5.digest()).decode() async def nc_prepare(self, user_data: UserData, timezone: str) -> dict: """This gets a few critical parameters for adding a device to your account.""" if ( user_data.rriot is None or user_data.rriot.r is None or user_data.rriot.u is None or user_data.rriot.r.a is None ): raise RoborockException("Your userdata is missing critical attributes.") base_url = user_data.rriot.r.a prepare_request = PreparedRequest(base_url, self.session) hid = await self._get_home_id(user_data) data = FormData() data.add_field("hid", hid) data.add_field("tzid", timezone) prepare_response = await prepare_request.request( "post", "/nc/prepare", headers={ "Authorization": _get_hawk_authentication( user_data.rriot, "/nc/prepare", {"hid": hid, "tzid": timezone} ), }, data=data, ) if prepare_response is None: raise RoborockException("prepare_response is None") if not prepare_response.get("success"): raise RoborockException(f"{prepare_response.get('msg')} - response code: {prepare_response.get('code')}") return prepare_response["result"] async def add_device(self, user_data: UserData, s: str, t: str) -> dict: """This will add a new device to your account it is recommended to only use this during a pairing cycle with a device. Please see here: https://github.com/Python-roborock/Roborockmitmproxy/blob/main/handshake_protocol.md """ if ( user_data.rriot is None or user_data.rriot.r is None or user_data.rriot.u is None or user_data.rriot.r.a is None ): raise RoborockException("Your userdata is missing critical attributes.") base_url = user_data.rriot.r.a add_device_request = PreparedRequest(base_url, self.session) add_device_response = await add_device_request.request( "GET", "/user/devices/newadd", headers={ "Authorization": _get_hawk_authentication( user_data.rriot, "/user/devices/newadd", params={"s": s, "t": t} ), }, params={"s": s, "t": t}, ) if add_device_response is None: raise RoborockException("add_device is None") if not add_device_response.get("success"): raise RoborockException( f"{add_device_response.get('msg')} - response code: {add_device_response.get('code')}" ) return add_device_response["result"] async def request_code(self) -> None: try: self._login_limiter.try_acquire("login") except BucketFullException as ex: _LOGGER.info(ex.meta_info) raise RoborockRateLimit("Reached maximum requests for login. Please try again later.") from ex base_url = await self.base_url header_clientid = self._get_header_client_id() code_request = PreparedRequest(base_url, self.session, {"header_clientid": header_clientid}) code_response = await code_request.request( "post", "/api/v1/sendEmailCode", params={ "username": self._username, "type": "auth", }, ) if code_response is None: raise RoborockException("Failed to get a response from send email code") response_code = code_response.get("code") if response_code != 200: _LOGGER.info("Request code failed for %s with the following context: %s", self._username, code_response) if response_code == 2008: raise RoborockAccountDoesNotExist("Account does not exist - check your login and try again.") elif response_code == 9002: raise RoborockTooFrequentCodeRequests("You have attempted to request too many codes. Try again later") else: raise RoborockException(f"{code_response.get('msg')} - response code: {code_response.get('code')}") async def request_code_v4(self) -> None: """Request a code using the v4 endpoint.""" if await self.country_code is None or await self.country is None: _LOGGER.info("No country code or country found, trying old version of request code.") return await self.request_code() try: self._login_limiter.try_acquire("login") except BucketFullException as ex: _LOGGER.info(ex.meta_info) raise RoborockRateLimit("Reached maximum requests for login. Please try again later.") from ex base_url = await self.base_url header_clientid = self._get_header_client_id() code_request = PreparedRequest( base_url, self.session, { "header_clientid": header_clientid, "Content-Type": "application/x-www-form-urlencoded", "header_clientlang": "en", }, ) code_response = await code_request.request( "post", "/api/v4/email/code/send", params={"email": self._username, "type": "login", "platform": ""}, ) if code_response is None: raise RoborockException("Failed to get a response from send email code") response_code = code_response.get("code") if response_code != 200: _LOGGER.info("Request code failed for %s with the following context: %s", self._username, code_response) if response_code == 2008: raise RoborockAccountDoesNotExist("Account does not exist - check your login and try again.") elif response_code == 9002: raise RoborockTooFrequentCodeRequests("You have attempted to request too many codes. Try again later") else: raise RoborockException(f"{code_response.get('msg')} - response code: {code_response.get('code')}") async def _sign_key_v3(self, s: str) -> str: """Sign a randomly generated string.""" base_url = await self.base_url header_clientid = self._get_header_client_id() code_request = PreparedRequest(base_url, self.session, {"header_clientid": header_clientid}) code_response = await code_request.request( "post", "/api/v3/key/sign", params={"s": s}, ) if not code_response or "data" not in code_response or "k" not in code_response["data"]: raise RoborockException("Failed to get a response from sign key") response_code = code_response.get("code") if response_code != 200: _LOGGER.info("Request code failed for %s with the following context: %s", self._username, code_response) raise RoborockException(f"{code_response.get('msg')} - response code: {code_response.get('code')}") return code_response["data"]["k"] async def code_login_v4( self, code: int | str, country: str | None = None, country_code: int | None = None ) -> UserData: """ Login via code authentication. :param code: The code from the email. :param country: The two-character representation of the country, i.e. "US" :param country_code: the country phone number code i.e. 1 for US. """ base_url = await self.base_url if country is None: country = await self.country if country_code is None: country_code = await self.country_code if country_code is None or country is None: _LOGGER.info("No country code or country found, trying old version of code login.") return await self.code_login(code) header_clientid = self._get_header_client_id() x_mercy_ks = "".join(secrets.choice(string.ascii_letters + string.digits) for _ in range(16)) x_mercy_k = await self._sign_key_v3(x_mercy_ks) login_request = PreparedRequest( base_url, self.session, {"header_clientid": header_clientid, "x-mercy-ks": x_mercy_ks, "x-mercy-k": x_mercy_k}, ) login_response = await login_request.request( "post", "/api/v4/auth/email/login/code", params={ "country": country, "countryCode": country_code, "email": self._username, "code": code, # Major and minor version are the user agreement version, we will need to see if this needs to be # dynamic https://usiot.roborock.com/api/v3/app/agreement/latest?country=US "majorVersion": 14, "minorVersion": 0, }, ) if login_response is None: raise RoborockException("Login request response is None") response_code = login_response.get("code") if response_code != 200: _LOGGER.info("Login failed for %s with the following context: %s", self._username, login_response) if response_code == 2018: raise RoborockInvalidCode("Invalid code - check your code and try again.") if response_code == 3009: raise RoborockNoUserAgreement("You must accept the user agreement in the Roborock app to continue.") if response_code == 3006: raise RoborockInvalidUserAgreement( "User agreement must be accepted again - or you are attempting to use the Mi Home app account." ) raise RoborockException(f"{login_response.get('msg')} - response code: {response_code}") user_data = login_response.get("data") if not isinstance(user_data, dict): raise RoborockException("Got unexpected data type for user_data") return UserData.from_dict(user_data) async def pass_login(self, password: str) -> UserData: try: self._login_limiter.try_acquire("login") except BucketFullException as ex: _LOGGER.info(ex.meta_info) raise RoborockRateLimit("Reached maximum requests for login. Please try again later.") from ex base_url = await self.base_url header_clientid = self._get_header_client_id() login_request = PreparedRequest(base_url, self.session, {"header_clientid": header_clientid}) login_response = await login_request.request( "post", "/api/v1/login", params={ "username": self._username, "password": password, "needtwostepauth": "false", }, ) if login_response is None: raise RoborockException("Login response is none") if login_response.get("code") != 200: _LOGGER.info("Login failed for %s with the following context: %s", self._username, login_response) raise RoborockException(f"{login_response.get('msg')} - response code: {login_response.get('code')}") user_data = login_response.get("data") if not isinstance(user_data, dict): raise RoborockException("Got unexpected data type for user_data") return UserData.from_dict(user_data) async def pass_login_v3(self, password: str) -> UserData: """Seemingly it follows the format below, but password is encrypted in some manner. # login_response = await login_request.request( # "post", # "/api/v3/auth/email/login", # params={ # "email": self._username, # "password": password, # "twoStep": 1, # "version": 0 # }, # ) """ raise NotImplementedError("Pass_login_v3 has not yet been implemented") async def code_login(self, code: int | str) -> UserData: base_url = await self.base_url header_clientid = self._get_header_client_id() login_request = PreparedRequest(base_url, self.session, {"header_clientid": header_clientid}) login_response = await login_request.request( "post", "/api/v1/loginWithCode", params={ "username": self._username, "verifycode": code, "verifycodetype": "AUTH_EMAIL_CODE", }, ) if login_response is None: raise RoborockException("Login request response is None") response_code = login_response.get("code") if response_code != 200: _LOGGER.info("Login failed for %s with the following context: %s", self._username, login_response) if response_code == 2018: raise RoborockInvalidCode("Invalid code - check your code and try again.") if response_code == 3009: raise RoborockNoUserAgreement("You must accept the user agreement in the Roborock app to continue.") if response_code == 3006: raise RoborockInvalidUserAgreement( "User agreement must be accepted again - or you are attempting to use the Mi Home app account." ) raise RoborockException(f"{login_response.get('msg')} - response code: {response_code}") user_data = login_response.get("data") if not isinstance(user_data, dict): raise RoborockException("Got unexpected data type for user_data") return UserData.from_dict(user_data) async def _get_home_id(self, user_data: UserData): base_url = await self.base_url header_clientid = self._get_header_client_id() home_id_request = PreparedRequest(base_url, self.session, {"header_clientid": header_clientid}) home_id_response = await home_id_request.request( "get", "/api/v1/getHomeDetail", headers={"Authorization": user_data.token}, ) if home_id_response is None: raise RoborockException("home_id_response is None") if home_id_response.get("code") != 200: _LOGGER.info("Get Home Id failed with the following context: %s", home_id_response) if home_id_response.get("code") == 2010: raise RoborockInvalidCredentials( f"Invalid credentials ({home_id_response.get('msg')}) - check your login and try again." ) raise RoborockException(f"{home_id_response.get('msg')} - response code: {home_id_response.get('code')}") return home_id_response["data"]["rrHomeId"] async def get_home_data(self, user_data: UserData) -> HomeData: try: self._home_data_limiter.try_acquire("home_data") except BucketFullException as ex: _LOGGER.info(ex.meta_info) raise RoborockRateLimit("Reached maximum requests for home data. Please try again later.") from ex rriot = user_data.rriot if rriot is None: raise RoborockException("rriot is none") home_id = await self._get_home_id(user_data) if rriot.r.a is None: raise RoborockException("Missing field 'a' in rriot reference") home_request = PreparedRequest( rriot.r.a, self.session, { "Authorization": _get_hawk_authentication(rriot, f"/user/homes/{str(home_id)}"), }, ) home_response = await home_request.request("get", "/user/homes/" + str(home_id)) if not home_response.get("success"): raise RoborockException(home_response) home_data = home_response.get("result") if isinstance(home_data, dict): return HomeData.from_dict(home_data) else: raise RoborockException("home_response result was an unexpected type") async def get_home_data_v2(self, user_data: UserData) -> HomeData: """This is the same as get_home_data, but uses a different endpoint and includes non-robotic vacuums.""" try: self._home_data_limiter.try_acquire("home_data") except BucketFullException as ex: _LOGGER.info(ex.meta_info) raise RoborockRateLimit("Reached maximum requests for home data. Please try again later.") from ex rriot = user_data.rriot if rriot is None: raise RoborockException("rriot is none") home_id = await self._get_home_id(user_data) if rriot.r.a is None: raise RoborockException("Missing field 'a' in rriot reference") home_request = PreparedRequest( rriot.r.a, self.session, { "Authorization": _get_hawk_authentication(rriot, "/v2/user/homes/" + str(home_id)), }, ) home_response = await home_request.request("get", "/v2/user/homes/" + str(home_id)) if not home_response.get("success"): raise RoborockException(home_response) home_data = home_response.get("result") if isinstance(home_data, dict): return HomeData.from_dict(home_data) else: raise RoborockException("home_response result was an unexpected type") async def get_home_data_v3(self, user_data: UserData) -> HomeData: """This is the same as get_home_data, but uses a different endpoint and includes non-robotic vacuums.""" try: self._home_data_limiter.try_acquire("home_data") except BucketFullException as ex: _LOGGER.info(ex.meta_info) raise RoborockRateLimit("Reached maximum requests for home data. Please try again later.") from ex rriot = user_data.rriot home_id = await self._get_home_id(user_data) if rriot.r.a is None: raise RoborockException("Missing field 'a' in rriot reference") home_request = PreparedRequest( rriot.r.a, self.session, { "Authorization": _get_hawk_authentication(rriot, "/v3/user/homes/" + str(home_id)), }, ) home_response = await home_request.request("get", "/v3/user/homes/" + str(home_id)) if not home_response.get("success"): raise RoborockException(home_response) home_data = home_response.get("result") if isinstance(home_data, dict): return HomeData.from_dict(home_data) raise RoborockException(f"home_response result was an unexpected type: {home_data}") async def get_rooms(self, user_data: UserData, home_id: int | None = None) -> list[HomeDataRoom]: rriot = user_data.rriot if rriot is None: raise RoborockException("rriot is none") if home_id is None: home_id = await self._get_home_id(user_data) if rriot.r.a is None: raise RoborockException("Missing field 'a' in rriot reference") room_request = PreparedRequest( rriot.r.a, self.session, { "Authorization": _get_hawk_authentication(rriot, "/v2/user/homes/" + str(home_id)), }, ) room_response = await room_request.request("get", f"/user/homes/{str(home_id)}/rooms" + str(home_id)) if not room_response.get("success"): raise RoborockException(room_response) rooms = room_response.get("result") if isinstance(rooms, list): output_list = [] for room in rooms: output_list.append(HomeDataRoom.from_dict(room)) return output_list else: raise RoborockException("home_response result was an unexpected type") async def get_scenes(self, user_data: UserData, device_id: str) -> list[HomeDataScene]: rriot = user_data.rriot if rriot is None: raise RoborockException("rriot is none") if rriot.r.a is None: raise RoborockException("Missing field 'a' in rriot reference") scenes_request = PreparedRequest( rriot.r.a, self.session, { "Authorization": _get_hawk_authentication(rriot, f"/user/scene/device/{str(device_id)}"), }, ) scenes_response = await scenes_request.request("get", f"/user/scene/device/{str(device_id)}") if not scenes_response.get("success"): raise RoborockException(scenes_response) scenes = scenes_response.get("result") if isinstance(scenes, list): return [HomeDataScene.from_dict(scene) for scene in scenes] else: raise RoborockException("scene_response result was an unexpected type") async def execute_scene(self, user_data: UserData, scene_id: int) -> None: rriot = user_data.rriot if rriot is None: raise RoborockException("rriot is none") if rriot.r.a is None: raise RoborockException("Missing field 'a' in rriot reference") execute_scene_request = PreparedRequest( rriot.r.a, self.session, { "Authorization": _get_hawk_authentication(rriot, f"/user/scene/{str(scene_id)}/execute"), }, ) execute_scene_response = await execute_scene_request.request("POST", f"/user/scene/{str(scene_id)}/execute") if not execute_scene_response.get("success"): raise RoborockException(execute_scene_response) async def get_products(self, user_data: UserData) -> ProductResponse: """Gets all products and their schemas, good for determining status codes and model numbers.""" base_url = await self.base_url header_clientid = self._get_header_client_id() product_request = PreparedRequest(base_url, self.session, {"header_clientid": header_clientid}) product_response = await product_request.request( "get", "/api/v4/product", headers={"Authorization": user_data.token}, ) if product_response is None: raise RoborockException("home_id_response is None") if product_response.get("code") != 200: raise RoborockException(f"{product_response.get('msg')} - response code: {product_response.get('code')}") result = product_response.get("data") if isinstance(result, dict): return ProductResponse.from_dict(result) raise RoborockException("product result was an unexpected type") async def download_code(self, user_data: UserData, product_id: int): base_url = await self.base_url header_clientid = self._get_header_client_id() product_request = PreparedRequest(base_url, self.session, {"header_clientid": header_clientid}) request = {"apilevel": 99999, "productids": [product_id], "type": 2} response = await product_request.request( "post", "/api/v1/appplugin", json=request, headers={"Authorization": user_data.token, "Content-Type": "application/json"}, ) return response["data"][0]["url"] async def download_category_code(self, user_data: UserData): base_url = await self.base_url header_clientid = self._get_header_client_id() product_request = PreparedRequest(base_url, self.session, {"header_clientid": header_clientid}) response = await product_request.request( "get", "api/v1/plugins?apiLevel=99999&type=2", headers={ "Authorization": user_data.token, }, ) return {r["category"]: r["url"] for r in response["data"]["categoryPluginList"]} class PreparedRequest: def __init__( self, base_url: str, session: aiohttp.ClientSession | None = None, base_headers: dict | None = None ) -> None: self.base_url = base_url self.base_headers = base_headers or {} self.session = session async def request(self, method: str, url: str, params=None, data=None, headers=None, json=None) -> dict: _url = "/".join(s.strip("/") for s in [self.base_url, url]) _headers = {**self.base_headers, **(headers or {})} close_session = self.session is None session = self.session if self.session is not None else aiohttp.ClientSession() try: async with session.request(method, _url, params=params, data=data, headers=_headers, json=json) as resp: return await resp.json() except ContentTypeError as err: """If we get an error, lets log everything for debugging.""" try: resp_json = await resp.json(content_type=None) _LOGGER.info("Resp: %s", resp_json) except ContentTypeError as err_2: _LOGGER.info(err_2) resp_raw = await resp.read() _LOGGER.info("Resp raw: %s", resp_raw) # Still raise the err so that it's clear it failed. raise err finally: if close_session: await session.close() def _process_extra_hawk_values(values: dict | None) -> str: if values is None: return "" else: sorted_keys = sorted(values.keys()) result = [] for key in sorted_keys: value = values.get(key) result.append(f"{key}={value}") return hashlib.md5("&".join(result).encode()).hexdigest() def _get_hawk_authentication(rriot: RRiot, url: str, formdata: dict | None = None, params: dict | None = None) -> str: timestamp = math.floor(time.time()) nonce = secrets.token_urlsafe(6) formdata_str = _process_extra_hawk_values(formdata) params_str = _process_extra_hawk_values(params) prestr = ":".join( [ rriot.u, rriot.s, nonce, str(timestamp), hashlib.md5(url.encode()).hexdigest(), params_str, formdata_str, ] ) mac = base64.b64encode(hmac.new(rriot.h.encode(), prestr.encode(), hashlib.sha256).digest()).decode() return f'Hawk id="{rriot.u}",s="{rriot.s}",ts="{timestamp}",nonce="{nonce}",mac="{mac}"' Python-roborock-python-roborock-32df4f3/tests/000077500000000000000000000000001507503702500215315ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/__init__.py000066400000000000000000000000001507503702500236300ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/__snapshots__/000077500000000000000000000000001507503702500243475ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/__snapshots__/test_containers.ambr000066400000000000000000000007101507503702500304140ustar00rootroot00000000000000# serializer version: 1 # name: test_multi_maps_list_info MultiMapsList(max_multi_map=4, max_bak_map=1, multi_map_count=2, map_info=[MultiMapsListMapInfo(map_flag=0, name='Downstairs', add_time=1757636125, length=10, bak_maps=[MultiMapsListMapInfoBakMaps(mapflag=None, add_time=1739205442)]), MultiMapsListMapInfo(map_flag=1, name='Foyer', add_time=1734283706, length=5, bak_maps=[MultiMapsListMapInfoBakMaps(mapflag=None, add_time=1728184107)])]) # --- Python-roborock-python-roborock-32df4f3/tests/conftest.py000066400000000000000000000357021507503702500237370ustar00rootroot00000000000000import asyncio import io import logging import re from asyncio import Protocol from collections.abc import AsyncGenerator, Callable, Generator from queue import Queue from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from aioresponses import aioresponses from roborock import HomeData, UserData from roborock.containers import DeviceData from roborock.roborock_message import RoborockMessage from roborock.version_1_apis.roborock_local_client_v1 import RoborockLocalClientV1 from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1 from tests.mock_data import HOME_DATA_RAW, HOME_DATA_SCENES_RAW, TEST_LOCAL_API_HOST, USER_DATA _LOGGER = logging.getLogger(__name__) # Used by fixtures to handle incoming requests and prepare responses RequestHandler = Callable[[bytes], bytes | None] QUEUE_TIMEOUT = 10 class FakeSocketHandler: """Fake socket used by the test to simulate a connection to the broker. The socket handler is used to intercept the socket send and recv calls and populate the response buffer with data to be sent back to the client. The handle request callback handles the incoming requests and prepares the responses. """ def __init__(self, handle_request: RequestHandler, response_queue: Queue[bytes]) -> None: self.response_buf = io.BytesIO() self.handle_request = handle_request self.response_queue = response_queue def pending(self) -> int: """Return the number of bytes in the response buffer.""" return len(self.response_buf.getvalue()) def handle_socket_recv(self, read_size: int) -> bytes: """Intercept a client recv() and populate the buffer.""" if self.pending() == 0: raise BlockingIOError("No response queued") self.response_buf.seek(0) data = self.response_buf.read(read_size) _LOGGER.debug("Response: 0x%s", data.hex()) # Consume the rest of the data in the buffer remaining_data = self.response_buf.read() self.response_buf = io.BytesIO(remaining_data) return data def handle_socket_send(self, client_request: bytes) -> int: """Receive an incoming request from the client.""" _LOGGER.debug("Request: 0x%s", client_request.hex()) if (response := self.handle_request(client_request)) is not None: # Enqueue a response to be sent back to the client in the buffer. # The buffer will be emptied when the client calls recv() on the socket _LOGGER.debug("Queued: 0x%s", response.hex()) self.response_buf.write(response) return len(client_request) def push_response(self) -> None: """Push a response to the client.""" if not self.response_queue.empty(): response = self.response_queue.get() # Enqueue a response to be sent back to the client in the buffer. # The buffer will be emptied when the client calls recv() on the socket _LOGGER.debug("Queued: 0x%s", response.hex()) self.response_buf.write(response) @pytest.fixture(name="received_requests") def received_requests_fixture() -> Queue[bytes]: """Fixture that provides access to the received requests.""" return Queue() @pytest.fixture(name="response_queue") def response_queue_fixture() -> Generator[Queue[bytes], None, None]: """Fixture that provides access to the received requests.""" response_queue: Queue[bytes] = Queue() yield response_queue assert response_queue.empty(), "Not all fake responses were consumed" @pytest.fixture(name="request_handler") def request_handler_fixture(received_requests: Queue[bytes], response_queue: Queue[bytes]) -> RequestHandler: """Fixture records incoming requests and replies with responses from the queue.""" def handle_request(client_request: bytes) -> bytes | None: """Handle an incoming request from the client.""" received_requests.put(client_request) # Insert a prepared response into the response buffer if not response_queue.empty(): return response_queue.get() return None return handle_request @pytest.fixture(name="fake_socket_handler") def fake_socket_handler_fixture(request_handler: RequestHandler, response_queue: Queue[bytes]) -> FakeSocketHandler: """Fixture that creates a fake MQTT broker.""" return FakeSocketHandler(request_handler, response_queue) @pytest.fixture(name="mock_sock") def mock_sock_fixture(fake_socket_handler: FakeSocketHandler) -> Mock: """Fixture that creates a mock socket connection and wires it to the handler.""" mock_sock = Mock() mock_sock.recv = fake_socket_handler.handle_socket_recv mock_sock.send = fake_socket_handler.handle_socket_send mock_sock.pending = fake_socket_handler.pending return mock_sock @pytest.fixture(name="mock_create_connection") def create_connection_fixture(mock_sock: Mock) -> Generator[None, None, None]: """Fixture that overrides the MQTT socket creation to wire it up to the mock socket.""" with patch("paho.mqtt.client.socket.create_connection", return_value=mock_sock): yield @pytest.fixture(name="mock_select") def select_fixture(mock_sock: Mock, fake_socket_handler: FakeSocketHandler) -> Generator[None, None, None]: """Fixture that overrides the MQTT client select calls to make select work on the mock socket. This patch select to activate our mock socket when ready with data. Internal mqtt sockets are always ready since they are used internally to wake the select loop. Ours is ready if there is data in the buffer. """ def is_ready(sock: Any) -> bool: return sock is not mock_sock or (fake_socket_handler.pending() > 0) def handle_select(rlist: list, wlist: list, *args: Any) -> list: return [list(filter(is_ready, rlist)), list(filter(is_ready, wlist))] with patch("paho.mqtt.client.select.select", side_effect=handle_select): yield @pytest.fixture(name="mqtt_client") async def mqtt_client(mock_create_connection: None, mock_select: None) -> AsyncGenerator[RoborockMqttClientV1, None]: user_data = UserData.from_dict(USER_DATA) home_data = HomeData.from_dict(HOME_DATA_RAW) device_info = DeviceData( device=home_data.devices[0], model=home_data.products[0].model, ) client = RoborockMqttClientV1(user_data, device_info, queue_timeout=QUEUE_TIMEOUT) try: yield client finally: if not client.is_connected(): try: await client.async_release() except Exception: pass @pytest.fixture(name="mock_rest", autouse=True) def mock_rest() -> aioresponses: """Mock all rest endpoints so they won't hit real endpoints""" with aioresponses() as mocked: # Match the base URL and allow any query params mocked.post( re.compile(r"https://.*iot\.roborock\.com/api/v1/getUrlByEmail.*"), status=200, payload={ "code": 200, "data": {"country": "US", "countrycode": "1", "url": "https://usiot.roborock.com"}, "msg": "success", }, ) mocked.post( re.compile(r"https://.*iot\.roborock\.com/api/v1/login.*"), status=200, payload={"code": 200, "data": USER_DATA, "msg": "success"}, ) mocked.post( re.compile(r"https://.*iot\.roborock\.com/api/v1/loginWithCode.*"), status=200, payload={"code": 200, "data": USER_DATA, "msg": "success"}, ) mocked.post( re.compile(r"https://.*iot\.roborock\.com/api/v1/sendEmailCode.*"), status=200, payload={"code": 200, "data": None, "msg": "success"}, ) mocked.get( re.compile(r"https://.*iot\.roborock\.com/api/v1/getHomeDetail.*"), status=200, payload={ "code": 200, "data": {"deviceListOrder": None, "id": 123456, "name": "My Home", "rrHomeId": 123456, "tuyaHomeId": 0}, "msg": "success", }, ) mocked.get( re.compile(r"https://api-.*\.roborock\.com/v2/user/homes*"), status=200, payload={"api": None, "code": 200, "result": HOME_DATA_RAW, "status": "ok", "success": True}, ) mocked.post( re.compile(r"https://api-.*\.roborock\.com/nc/prepare"), status=200, payload={ "api": None, "result": {"r": "US", "s": "ffffff", "t": "eOf6d2BBBB"}, "status": "ok", "success": True, }, ) mocked.get( re.compile(r"https://api-.*\.roborock\.com/user/devices/newadd/*"), status=200, payload={ "api": "获取新增设备信息", "result": { "activeTime": 1737724598, "attribute": None, "cid": None, "createTime": 0, "deviceStatus": None, "duid": "rand_duid", "extra": "{}", "f": False, "featureSet": "0", "fv": "02.16.12", "iconUrl": "", "lat": None, "localKey": "random_lk", "lon": None, "name": "S7", "newFeatureSet": "0000000000002000", "online": True, "productId": "rand_prod_id", "pv": "1.0", "roomId": None, "runtimeEnv": None, "setting": None, "share": False, "shareTime": None, "silentOtaSwitch": False, "sn": "Rand_sn", "timeZoneId": "America/New_York", "tuyaMigrated": False, "tuyaUuid": None, }, "status": "ok", "success": True, }, ) mocked.get( re.compile(r"https://api-.*\.roborock\.com/user/scene/device/.*"), status=200, payload={"api": None, "code": 200, "result": HOME_DATA_SCENES_RAW, "status": "ok", "success": True}, ) mocked.post( re.compile(r"https://api-.*\.roborock\.com/user/scene/.*/execute"), status=200, payload={"api": None, "code": 200, "result": None, "status": "ok", "success": True}, ) mocked.post( re.compile(r"https://.*iot\.roborock\.com/api/v4/email/code/send.*"), status=200, payload={"code": 200, "data": None, "msg": "success"}, ) mocked.post( re.compile(r"https://.*iot\.roborock\.com/api/v3/key/sign.*"), status=200, payload={"code": 200, "data": {"k": "mock_k"}, "msg": "success"}, ) mocked.post( re.compile(r"https://.*iot\.roborock\.com/api/v4/auth/email/login/code.*"), status=200, payload={"code": 200, "data": USER_DATA, "msg": "success"}, ) yield mocked @pytest.fixture(autouse=True) def skip_rate_limit(): """Don't rate limit tests as they aren't actually hitting the api.""" with ( patch("roborock.web_api.RoborockApiClient._login_limiter.try_acquire"), patch("roborock.web_api.RoborockApiClient._home_data_limiter.try_acquire"), ): yield @pytest.fixture(name="mock_create_local_connection") def create_local_connection_fixture(request_handler: RequestHandler) -> Generator[None, None, None]: """Fixture that overrides the transport creation to wire it up to the mock socket.""" async def create_connection(protocol_factory: Callable[[], Protocol], *args) -> tuple[Any, Any]: protocol = protocol_factory() def handle_write(data: bytes) -> None: _LOGGER.debug("Received: %s", data) response = request_handler(data) if response is not None: _LOGGER.debug("Replying with %s", response) loop = asyncio.get_running_loop() loop.call_soon(protocol.data_received, response) closed = asyncio.Event() mock_transport = Mock() mock_transport.write = handle_write mock_transport.close = closed.set mock_transport.is_reading = lambda: not closed.is_set() return (mock_transport, "proto") with patch("roborock.version_1_apis.roborock_local_client_v1.get_running_loop") as mock_loop: mock_loop.return_value.create_connection.side_effect = create_connection yield @pytest.fixture(name="local_client") async def local_client_fixture(mock_create_local_connection: None) -> AsyncGenerator[RoborockLocalClientV1, None]: home_data = HomeData.from_dict(HOME_DATA_RAW) device_info = DeviceData( device=home_data.devices[0], model=home_data.products[0].model, host=TEST_LOCAL_API_HOST, ) client = RoborockLocalClientV1(device_info, queue_timeout=QUEUE_TIMEOUT) try: yield client finally: if not client.is_connected(): try: await client.async_release() except Exception: pass class FakeChannel: """A fake channel that handles publish and subscribe calls.""" def __init__(self): """Initialize the fake channel.""" self.subscribers: list[Callable[[RoborockMessage], None]] = [] self.published_messages: list[RoborockMessage] = [] self.response_queue: list[RoborockMessage] = [] self._is_connected = False self.publish_side_effect: Exception | None = None self.publish = AsyncMock(side_effect=self._publish) self.subscribe = AsyncMock(side_effect=self._subscribe) self.connect = AsyncMock(side_effect=self._connect) self.close = MagicMock(side_effect=self._close) async def _connect(self) -> None: self._is_connected = True def _close(self) -> None: self._is_connected = False @property def is_connected(self) -> bool: """Return true if connected.""" return self._is_connected async def _publish(self, message: RoborockMessage) -> None: """Simulate publishing a message and triggering a response.""" self.published_messages.append(message) if self.publish_side_effect: raise self.publish_side_effect # When a message is published, simulate a response if self.response_queue: response = self.response_queue.pop(0) # Give a chance for the subscriber to be registered for subscriber in list(self.subscribers): subscriber(response) async def _subscribe(self, callback: Callable[[RoborockMessage], None]) -> Callable[[], None]: """Simulate subscribing to messages.""" self.subscribers.append(callback) return lambda: self.subscribers.remove(callback) Python-roborock-python-roborock-32df4f3/tests/devices/000077500000000000000000000000001507503702500231535ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/devices/__init__.py000066400000000000000000000000431507503702500252610ustar00rootroot00000000000000"""Tests for the device module.""" Python-roborock-python-roborock-32df4f3/tests/devices/__snapshots__/000077500000000000000000000000001507503702500257715ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/devices/__snapshots__/test_v1_device.ambr000066400000000000000000000050261507503702500315430ustar00rootroot00000000000000# serializer version: 1 # name: test_device_trait_command_parsing[payload0-] StatusTrait(adbumper_status=None, auto_dust_collection=None, avoid_count=None, back_type=None, battery=100, camera_status=None, charge_status=None, clean_area=91287500, clean_fluid_status=None, clean_percent=None, clean_time=5405, clear_water_box_status=None, collision_avoid_status=None, command=, common_status=None, corner_clean_mode=None, current_map=0, debug_mode=None, dirty_water_box_status=None, distance_off=0, dnd_enabled=1, dock_cool_fan_status=None, dock_error_status=None, dock_type=None, dry_status=None, dss=None, dust_bag_status=None, dust_collection_status=None, error_code=, error_code_name=None, fan_power=, fan_power_name='custom', fan_power_options=['off', 'quiet', 'balanced', 'turbo', 'max', 'custom', 'max_plus'], hatch_door_status=None, home_sec_enable_password=None, home_sec_status=None, in_cleaning=, in_fresh_state=1, in_returning=0, in_warmup=None, is_exploring=None, is_locating=0, kct=None, lab_status=1, last_clean_t=None, lock_status=0, map_present=1, map_status=3, mop_forbidden_enable=0, mop_mode=None, mop_mode_name=None, msg_seq=515, msg_ver=2, rdt=None, repeat=None, replenish_mode=None, rss=None, square_meter_clean_area=91.3, state=, state_name='charging', subdivision_sets=None, switch_map_mode=None, unsave_map_flag=0, unsave_map_reason=4, wash_phase=None, wash_ready=None, wash_status=None, water_box_carriage_status=0, water_box_filter_status=None, water_box_mode=, water_box_mode_name='custom', water_box_status=0, water_shortage_status=None) # --- # name: test_device_trait_command_parsing[payload1-] DoNotDisturbTrait(start_hour=22, start_minute=0, end_hour=8, end_minute=0, enabled=1) # --- # name: test_device_trait_command_parsing[payload2-] CleanSummaryTrait(clean_area=24258125000, clean_count=296, clean_time=1442559, command=, dust_collection_count=None, last_clean_t=None, records=[1756848207, 1754930385, 1753203976, 1752183435, 1747427370, 1746204046, 1745601543, 1744387080, 1743528522, 1742489154, 1741022299, 1740433682, 1739902516, 1738875106, 1738864366, 1738620067, 1736873889, 1736197544, 1736121269, 1734458038], square_meter_clean_area=24258.1) # --- # name: test_device_trait_command_parsing[payload3-] SoundVolumeTrait(volume=90) # --- Python-roborock-python-roborock-32df4f3/tests/devices/test_a01_channel.py000066400000000000000000000030551507503702500266400ustar00rootroot00000000000000"""Tests for the a01_channel.""" from typing import Any import pytest from roborock.devices.a01_channel import send_decoded_command from roborock.protocols.a01_protocol import encode_mqtt_payload from roborock.roborock_message import ( RoborockDyadDataProtocol, RoborockMessage, RoborockMessageProtocol, ) from ..conftest import FakeChannel @pytest.fixture def mock_mqtt_channel() -> FakeChannel: """Fixture for a fake MQTT channel.""" return FakeChannel() async def test_id_query(mock_mqtt_channel: FakeChannel): """Test successful command sending and response decoding.""" # Command parameters to send params: dict[RoborockDyadDataProtocol, Any] = { RoborockDyadDataProtocol.ID_QUERY: [ RoborockDyadDataProtocol.WARM_LEVEL, RoborockDyadDataProtocol.POWER, ] } encoded = encode_mqtt_payload( { RoborockDyadDataProtocol.WARM_LEVEL: 101, RoborockDyadDataProtocol.POWER: 75, } ) response_message = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=encoded.payload, version=encoded.version ) mock_mqtt_channel.response_queue.append(response_message) # Call the function to be tested result = await send_decoded_command(mock_mqtt_channel, params) # type: ignore[call-overload] # Assertions assert result == {RoborockDyadDataProtocol.WARM_LEVEL: 101, RoborockDyadDataProtocol.POWER: 75} mock_mqtt_channel.publish.assert_awaited_once() mock_mqtt_channel.subscribe.assert_awaited_once() Python-roborock-python-roborock-32df4f3/tests/devices/test_device_manager.py000066400000000000000000000110051507503702500275120ustar00rootroot00000000000000"""Tests for the DeviceManager class.""" from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest from roborock.containers import HomeData, UserData from roborock.devices.cache import CacheData, InMemoryCache from roborock.devices.device_manager import create_device_manager, create_home_data_api from roborock.exceptions import RoborockException from .. import mock_data USER_DATA = UserData.from_dict(mock_data.USER_DATA) NETWORK_INFO = mock_data.NETWORK_INFO @pytest.fixture(autouse=True, name="mqtt_session") def setup_mqtt_session() -> Generator[Mock, None, None]: """Fixture to set up the MQTT session for the tests.""" with patch("roborock.devices.device_manager.create_lazy_mqtt_session") as mock_create_session: yield mock_create_session @pytest.fixture(autouse=True) def channel_fixture() -> Generator[Mock, None, None]: """Fixture to set up the local session for the tests.""" with patch("roborock.devices.device_manager.create_v1_channel") as mock_channel: mock_unsub = Mock() mock_channel.return_value.subscribe = AsyncMock() mock_channel.return_value.subscribe.return_value = mock_unsub yield mock_channel async def home_home_data_no_devices() -> HomeData: """Mock home data API that returns no devices.""" return HomeData( id=1, name="Test Home", devices=[], products=[], ) async def mock_home_data() -> HomeData: """Mock home data API that returns devices.""" return HomeData.from_dict(mock_data.HOME_DATA_RAW) async def test_no_devices() -> None: """Test the DeviceManager created with no devices returned from the API.""" device_manager = await create_device_manager(USER_DATA, home_home_data_no_devices) devices = await device_manager.get_devices() assert devices == [] async def test_with_device() -> None: """Test the DeviceManager created with devices returned from the API.""" device_manager = await create_device_manager(USER_DATA, mock_home_data) devices = await device_manager.get_devices() assert len(devices) == 1 assert devices[0].duid == "abc123" assert devices[0].name == "Roborock S7 MaxV" device = await device_manager.get_device("abc123") assert device is not None assert device.duid == "abc123" assert device.name == "Roborock S7 MaxV" await device_manager.close() async def test_get_non_existent_device() -> None: """Test getting a non-existent device.""" device_manager = await create_device_manager(USER_DATA, mock_home_data) device = await device_manager.get_device("non_existent_duid") assert device is None await device_manager.close() async def test_home_data_api_exception() -> None: """Test the home data API with an exception.""" async def home_data_api_exception() -> HomeData: raise RoborockException("Test exception") with pytest.raises(RoborockException, match="Test exception"): await create_device_manager(USER_DATA, home_data_api_exception) async def test_create_home_data_api_exception() -> None: """Test that exceptions from the home data API are propagated through the wrapper.""" with patch("roborock.devices.device_manager.RoborockApiClient.get_home_data_v3") as mock_get_home_data: mock_get_home_data.side_effect = RoborockException("Test exception") api = create_home_data_api(USER_DATA, mock_get_home_data) with pytest.raises(RoborockException, match="Test exception"): await api() async def test_cache_logic() -> None: """Test that the cache logic works correctly.""" call_count = 0 async def mock_home_data_with_counter() -> HomeData: nonlocal call_count call_count += 1 return HomeData.from_dict(mock_data.HOME_DATA_RAW) class TestCache: def __init__(self): self._data = CacheData() async def get(self) -> CacheData: return self._data async def set(self, value: CacheData) -> None: self._data = value # First call happens during create_device_manager initialization device_manager = await create_device_manager(USER_DATA, mock_home_data_with_counter, cache=InMemoryCache()) assert call_count == 1 # Second call should use cache, not increment call_count devices2 = await device_manager.discover_devices() assert call_count == 1 # Should still be 1, not 2 assert len(devices2) == 1 await device_manager.close() assert len(devices2) == 1 await device_manager.close() Python-roborock-python-roborock-32df4f3/tests/devices/test_local_channel.py000066400000000000000000000204231507503702500273470ustar00rootroot00000000000000"""Tests for the LocalChannel class.""" import asyncio import json from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest from roborock.devices.local_channel import LocalChannel from roborock.exceptions import RoborockConnectionException from roborock.protocol import create_local_decoder, create_local_encoder from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol TEST_HOST = "192.168.1.100" TEST_LOCAL_KEY = "local_key" TEST_PORT = 58867 TEST_REQUEST = RoborockMessage( protocol=RoborockMessageProtocol.RPC_REQUEST, payload=json.dumps({"dps": {"101": json.dumps({"id": 12345, "method": "get_status"})}}).encode(), ) TEST_RESPONSE = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps({"dps": {"102": json.dumps({"id": 12345, "result": {"state": "cleaning"}})}}).encode(), ) TEST_REQUEST2 = RoborockMessage( protocol=RoborockMessageProtocol.RPC_REQUEST, payload=json.dumps({"dps": {"101": json.dumps({"id": 54321, "method": "get_status"})}}).encode(), ) TEST_RESPONSE2 = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps({"dps": {"102": json.dumps({"id": 54321, "result": {"state": "cleaning"}})}}).encode(), ) ENCODER = create_local_encoder(TEST_LOCAL_KEY) DECODER = create_local_decoder(TEST_LOCAL_KEY) @pytest.fixture(name="mock_transport") def setup_mock_transport() -> Mock: """Mock transport for testing.""" transport = Mock() transport.write = Mock() transport.close = Mock() return transport @pytest.fixture(name="mock_loop") def setup_mock_loop(mock_transport: Mock) -> Generator[Mock, None, None]: """Mock event loop for testing.""" loop = Mock() loop.create_connection = AsyncMock(return_value=(mock_transport, Mock())) with patch("asyncio.get_running_loop", return_value=loop): yield loop @pytest.fixture(name="local_channel") def setup_local_channel() -> LocalChannel: """Fixture to set up the local channel for tests.""" return LocalChannel(host=TEST_HOST, local_key=TEST_LOCAL_KEY) @pytest.fixture(name="received_messages") async def setup_subscribe_callback(local_channel: LocalChannel) -> list[RoborockMessage]: """Fixture to record messages received by the subscriber.""" messages: list[RoborockMessage] = [] await local_channel.subscribe(messages.append) return messages async def test_successful_connection(local_channel: LocalChannel, mock_loop: Mock, mock_transport: Mock) -> None: """Test successful connection to device.""" await local_channel.connect() mock_loop.create_connection.assert_called_once() call_args = mock_loop.create_connection.call_args assert call_args[0][1] == TEST_HOST assert call_args[0][2] == TEST_PORT assert local_channel._is_connected is True async def test_connection_failure(local_channel: LocalChannel, mock_loop: Mock) -> None: """Test connection failure handling.""" mock_loop.create_connection.side_effect = OSError("Connection failed") with pytest.raises(RoborockConnectionException, match="Failed to connect to 192.168.1.100:58867"): await local_channel.connect() assert local_channel._is_connected is False async def test_already_connected_warning( local_channel: LocalChannel, mock_loop: Mock, caplog: pytest.LogCaptureFixture ) -> None: """Test warning when trying to connect when already connected.""" await local_channel.connect() await local_channel.connect() # Second connection attempt assert "Already connected" in caplog.text assert mock_loop.create_connection.call_count == 1 async def test_close_connection(local_channel: LocalChannel, mock_loop: Mock, mock_transport: Mock) -> None: """Test closing the connection.""" await local_channel.connect() local_channel.close() mock_transport.close.assert_called_once() assert local_channel._is_connected is False async def test_close_without_connection(local_channel: LocalChannel) -> None: """Test closing when not connected.""" local_channel.close() assert local_channel._is_connected is False async def test_publish_not_connected(local_channel: LocalChannel) -> None: """Test sending command when not connected raises exception.""" with pytest.raises(RoborockConnectionException, match="Not connected to device"): await local_channel.publish(TEST_REQUEST) async def test_successful_command_response(local_channel: LocalChannel, mock_loop: Mock, mock_transport: Mock) -> None: """Test successful command sending and response handling.""" await local_channel.connect() # Send command in background task await local_channel.publish(TEST_REQUEST) await asyncio.sleep(0.01) # yield # Simulate receiving response via the protocol callback local_channel._data_received(ENCODER(TEST_RESPONSE)) await asyncio.sleep(0.01) # yield # Verify command was sent mock_transport.write.assert_called_once() sent_data = mock_transport.write.call_args[0][0] decoded_sent = next(iter(DECODER(sent_data))) assert decoded_sent == TEST_REQUEST async def test_message_decode_error(local_channel: LocalChannel, caplog: pytest.LogCaptureFixture) -> None: """Test handling of message decode errors.""" local_channel._data_received(b"invalid_payload") await asyncio.sleep(0.01) # yield assert len(caplog.records) == 1 assert caplog.records[0].levelname == "WARNING" assert "Failed to decode message" in caplog.records[0].message async def test_subscribe_callback( local_channel: LocalChannel, received_messages: list[RoborockMessage], mock_loop: Mock ) -> None: """Test that subscribe callback receives all messages.""" await local_channel.connect() # Send some messages without an RPC local_channel._data_received(ENCODER(TEST_RESPONSE)) local_channel._data_received(ENCODER(TEST_RESPONSE2)) await asyncio.sleep(0.01) # yield assert received_messages == [TEST_RESPONSE, TEST_RESPONSE2] async def test_subscribe_callback_exception_handling( local_channel: LocalChannel, mock_loop: Mock, caplog: pytest.LogCaptureFixture ) -> None: """Test that exceptions in subscriber callbacks are handled gracefully.""" def failing_callback(message: RoborockMessage) -> None: raise ValueError("Test exception") await local_channel.subscribe(failing_callback) await local_channel.connect() # Send message that will cause callback to fail local_channel._data_received(ENCODER(TEST_RESPONSE)) await asyncio.sleep(0.01) # yield # Should log the exception but not crash assert any("Uncaught error in callback 'failing_callback'" in record.message for record in caplog.records) async def test_unsubscribe(local_channel: LocalChannel, mock_loop: Mock) -> None: """Test unsubscribing from messages.""" messages: list[RoborockMessage] = [] unsubscribe = await local_channel.subscribe(messages.append) await local_channel.connect() # Send message while subscribed local_channel._data_received(ENCODER(TEST_RESPONSE)) await asyncio.sleep(0.01) # yield assert len(messages) == 1 # Unsubscribe and send another message unsubscribe() local_channel._data_received(ENCODER(TEST_RESPONSE2)) await asyncio.sleep(0.01) # yield # Should still have only one message assert len(messages) == 1 async def test_connection_lost_callback( local_channel: LocalChannel, mock_loop: Mock, caplog: pytest.LogCaptureFixture ) -> None: """Test connection lost callback handling.""" await local_channel.connect() # Simulate connection loss test_exception = OSError("Connection lost") local_channel._connection_lost(test_exception) assert local_channel._is_connected is False assert local_channel._transport is None assert "Connection lost to 192.168.1.100" in caplog.text async def test_connection_lost_without_exception( local_channel: LocalChannel, mock_loop: Mock, caplog: pytest.LogCaptureFixture ) -> None: """Test connection lost callback without exception.""" await local_channel.connect() # Simulate connection loss without exception local_channel._connection_lost(None) assert local_channel._is_connected is False assert local_channel._transport is None assert "Connection lost to 192.168.1.100" in caplog.text Python-roborock-python-roborock-32df4f3/tests/devices/test_mqtt_channel.py000066400000000000000000000231551507503702500272470ustar00rootroot00000000000000"""Tests for the MqttChannel class.""" import asyncio import json import logging from collections.abc import AsyncGenerator, Callable from unittest.mock import AsyncMock, Mock import pytest from roborock.containers import HomeData, UserData from roborock.devices.mqtt_channel import MqttChannel from roborock.mqtt.session import MqttParams from roborock.protocol import create_mqtt_decoder, create_mqtt_encoder from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from .. import mock_data USER_DATA = UserData.from_dict(mock_data.USER_DATA) TEST_MQTT_PARAMS = MqttParams( host="localhost", port=1883, tls=False, username="username", password="password", timeout=10.0, ) TEST_LOCAL_KEY = "local_key" TEST_REQUEST = RoborockMessage( protocol=RoborockMessageProtocol.RPC_REQUEST, payload=json.dumps({"dps": {"101": json.dumps({"id": 12345, "method": "get_status"})}}).encode(), ) TEST_RESPONSE = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps({"dps": {"102": json.dumps({"id": 12345, "result": {"state": "cleaning"}})}}).encode(), ) TEST_REQUEST2 = RoborockMessage( protocol=RoborockMessageProtocol.RPC_REQUEST, payload=json.dumps({"dps": {"101": json.dumps({"id": 54321, "method": "get_status"})}}).encode(), ) TEST_RESPONSE2 = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps({"dps": {"102": json.dumps({"id": 54321, "result": {"state": "cleaning"}})}}).encode(), ) ENCODER = create_mqtt_encoder(TEST_LOCAL_KEY) DECODER = create_mqtt_decoder(TEST_LOCAL_KEY) @pytest.fixture(name="mqtt_session", autouse=True) def setup_mqtt_session() -> Mock: """Fixture to set up the MQTT session for the tests.""" return AsyncMock() @pytest.fixture(name="mqtt_channel", autouse=True) def setup_mqtt_channel(mqtt_session: Mock) -> MqttChannel: """Fixture to set up the MQTT channel for the tests.""" return MqttChannel( mqtt_session, duid="abc123", local_key=TEST_LOCAL_KEY, rriot=USER_DATA.rriot, mqtt_params=TEST_MQTT_PARAMS ) @pytest.fixture(name="mqtt_subscribers", autouse=True) async def setup_subscribe_callback(mqtt_session: Mock) -> AsyncGenerator[list[Callable[[bytes], None]], None]: """Fixture to record messages received by the subscriber.""" subscriber_callbacks = [] def mock_subscribe(_: str, callback: Callable[[bytes], None]) -> Callable[[], None]: subscriber_callbacks.append(callback) return lambda: subscriber_callbacks.remove(callback) mqtt_session.subscribe.side_effect = mock_subscribe yield subscriber_callbacks assert not subscriber_callbacks, "Not all subscribers were unsubscribed" @pytest.fixture(name="mqtt_message_handler") async def setup_message_handler(mqtt_subscribers: list[Callable[[bytes], None]]) -> Callable[[bytes], None]: """Fixture to allow simulating incoming MQTT messages.""" def invoke_all_callbacks(message: bytes) -> None: for callback in mqtt_subscribers: callback(message) return invoke_all_callbacks @pytest.fixture def warning_caplog( caplog: pytest.LogCaptureFixture, ) -> pytest.LogCaptureFixture: """Fixture to capture warning messages.""" caplog.set_level(logging.WARNING) return caplog async def home_home_data_no_devices() -> HomeData: """Mock home data API that returns no devices.""" return HomeData( id=1, name="Test Home", devices=[], products=[], ) async def mock_home_data() -> HomeData: """Mock home data API that returns devices.""" return HomeData.from_dict(mock_data.HOME_DATA_RAW) async def test_publish_success( mqtt_session: Mock, mqtt_channel: MqttChannel, mqtt_message_handler: Callable[[bytes], None], ) -> None: """Test successful RPC command sending and response handling.""" # Send a test request. We use a task so we can simulate receiving the response # while the command is still being processed. await mqtt_channel.publish(TEST_REQUEST) await asyncio.sleep(0.01) # yield # Simulate receiving the response message via MQTT mqtt_message_handler(ENCODER(TEST_RESPONSE)) await asyncio.sleep(0.01) # yield # Verify the command was sent assert mqtt_session.publish.called assert mqtt_session.publish.call_args[0][0] == "rr/m/i/user123/username/abc123" raw_sent_msg = mqtt_session.publish.call_args[0][1] # == b"encoded_message" decoded_message = next(iter(DECODER(raw_sent_msg))) assert decoded_message == TEST_REQUEST assert decoded_message.protocol == RoborockMessageProtocol.RPC_REQUEST @pytest.mark.parametrize(("connected"), [(True), (False)]) async def test_connection_status( mqtt_session: Mock, mqtt_channel: MqttChannel, connected: bool, ) -> None: """Test successful RPC command sending and response handling.""" mqtt_session.connected = connected assert mqtt_channel.is_connected is connected assert mqtt_channel.is_local_connected is False async def test_message_decode_error( mqtt_channel: MqttChannel, mqtt_message_handler: Callable[[bytes], None], caplog: pytest.LogCaptureFixture, ) -> None: """Test an error during message decoding.""" callback = Mock() unsub = await mqtt_channel.subscribe(callback) mqtt_message_handler(b"invalid_payload") await asyncio.sleep(0.01) # yield assert len(caplog.records) == 1 assert caplog.records[0].levelname == "WARNING" assert "Failed to decode message" in caplog.records[0].message unsub() async def test_concurrent_subscribers(mqtt_session: Mock, mqtt_channel: MqttChannel) -> None: """Test multiple concurrent subscribers receive all messages.""" # Set up multiple subscribers subscriber1_messages: list[RoborockMessage] = [] subscriber2_messages: list[RoborockMessage] = [] subscriber3_messages: list[RoborockMessage] = [] unsub1 = await mqtt_channel.subscribe(subscriber1_messages.append) unsub2 = await mqtt_channel.subscribe(subscriber2_messages.append) unsub3 = await mqtt_channel.subscribe(subscriber3_messages.append) # Verify that each subscription creates a separate call to the MQTT session assert mqtt_session.subscribe.call_count == 3 # All subscriptions should be to the same topic for call in mqtt_session.subscribe.call_args_list: assert call[0][0] == "rr/m/o/user123/username/abc123" # Get the message handlers for each subscriber handler1 = mqtt_session.subscribe.call_args_list[0][0][1] handler2 = mqtt_session.subscribe.call_args_list[1][0][1] handler3 = mqtt_session.subscribe.call_args_list[2][0][1] # Simulate receiving messages - each handler should decode the message independently handler1(ENCODER(TEST_REQUEST)) handler2(ENCODER(TEST_REQUEST)) handler3(ENCODER(TEST_REQUEST)) await asyncio.sleep(0.01) # yield # All subscribers should receive the message assert len(subscriber1_messages) == 1 assert len(subscriber2_messages) == 1 assert len(subscriber3_messages) == 1 assert subscriber1_messages[0] == TEST_REQUEST assert subscriber2_messages[0] == TEST_REQUEST assert subscriber3_messages[0] == TEST_REQUEST # Send another message to all handlers handler1(ENCODER(TEST_RESPONSE)) handler2(ENCODER(TEST_RESPONSE)) handler3(ENCODER(TEST_RESPONSE)) await asyncio.sleep(0.01) # yield # All subscribers should have received both messages assert len(subscriber1_messages) == 2 assert len(subscriber2_messages) == 2 assert len(subscriber3_messages) == 2 assert subscriber1_messages == [TEST_REQUEST, TEST_RESPONSE] assert subscriber2_messages == [TEST_REQUEST, TEST_RESPONSE] assert subscriber3_messages == [TEST_REQUEST, TEST_RESPONSE] # Test unsubscribing one subscriber unsub1() # Send another message only to remaining handlers handler2(ENCODER(TEST_REQUEST2)) handler3(ENCODER(TEST_REQUEST2)) await asyncio.sleep(0.01) # yield # First subscriber should not have received the new message assert len(subscriber1_messages) == 2 assert len(subscriber2_messages) == 3 assert len(subscriber3_messages) == 3 assert subscriber2_messages[2] == TEST_REQUEST2 assert subscriber3_messages[2] == TEST_REQUEST2 # Unsubscribe remaining subscribers unsub2() unsub3() async def test_concurrent_subscribers_with_callback_exception( mqtt_session: Mock, mqtt_channel: MqttChannel, caplog: pytest.LogCaptureFixture ) -> None: """Test that exception in one subscriber callback doesn't affect others.""" caplog.set_level(logging.ERROR) def failing_callback(message: RoborockMessage) -> None: raise ValueError("Callback error") subscriber2_messages: list[RoborockMessage] = [] unsub1 = await mqtt_channel.subscribe(failing_callback) unsub2 = await mqtt_channel.subscribe(subscriber2_messages.append) # Get the message handlers handler1 = mqtt_session.subscribe.call_args_list[0][0][1] handler2 = mqtt_session.subscribe.call_args_list[1][0][1] # Simulate receiving a message - first handler will raise exception handler1(ENCODER(TEST_REQUEST)) handler2(ENCODER(TEST_REQUEST)) await asyncio.sleep(0.01) # yield # Exception should be logged but other subscribers should still work assert len(subscriber2_messages) == 1 assert subscriber2_messages[0] == TEST_REQUEST # Check that exception was logged error_records = [record for record in caplog.records if record.levelname == "ERROR"] assert len(error_records) == 1 assert "Uncaught error in callback 'failing_callback'" in error_records[0].message # Unsubscribe all remaining subscribers unsub1() unsub2() Python-roborock-python-roborock-32df4f3/tests/devices/test_v1_channel.py000066400000000000000000000420331507503702500266040ustar00rootroot00000000000000"""Tests for the V1Channel class. This test simulates communication across both the MQTT and local connections and failure modes, ensuring the V1Channel behaves correctly in various scenarios. """ import json import logging from collections.abc import Iterator from unittest.mock import AsyncMock, Mock, patch import pytest from roborock.containers import NetworkInfo, RoborockStateCode, S5MaxStatus, UserData from roborock.devices.cache import CacheData, InMemoryCache from roborock.devices.local_channel import LocalSession from roborock.devices.v1_channel import V1Channel from roborock.exceptions import RoborockException from roborock.protocol import ( create_local_decoder, create_local_encoder, create_mqtt_decoder, create_mqtt_encoder, ) from roborock.protocols.v1_protocol import MapResponse, SecurityData from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from roborock.roborock_typing import RoborockCommand from .. import mock_data from ..conftest import FakeChannel USER_DATA = UserData.from_dict(mock_data.USER_DATA) TEST_DEVICE_UID = "abc123" TEST_LOCAL_KEY = "local_key" TEST_SECURITY_DATA = SecurityData(endpoint="test_endpoint", nonce=b"test_nonce_16byte") TEST_HOST = "1.1.1.1" # Test messages for V1 protocol TEST_REQUEST = RoborockMessage( protocol=RoborockMessageProtocol.RPC_REQUEST, payload=json.dumps({"dps": {"101": json.dumps({"id": 12346, "method": "get_status"})}}).encode(), ) TEST_RESPONSE = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps( {"dps": {"102": json.dumps({"id": 12346, "result": {"state": RoborockStateCode.cleaning}})}} ).encode(), ) TEST_RESPONSE_2 = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps( {"dps": {"102": json.dumps({"id": 12347, "result": {"state": RoborockStateCode.cleaning}})}} ).encode(), ) TEST_NETWORK_INFO_RESPONSE = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps({"dps": {"102": json.dumps({"id": 12345, "result": mock_data.NETWORK_INFO})}}).encode(), ) TEST_NETWORKING_INFO = NetworkInfo.from_dict(mock_data.NETWORK_INFO) # Encoders/Decoders MQTT_ENCODER = create_mqtt_encoder(TEST_LOCAL_KEY) MQTT_DECODER = create_mqtt_decoder(TEST_LOCAL_KEY) LOCAL_ENCODER = create_local_encoder(TEST_LOCAL_KEY) LOCAL_DECODER = create_local_decoder(TEST_LOCAL_KEY) @pytest.fixture(name="mock_mqtt_channel") async def setup_mock_mqtt_channel() -> FakeChannel: """Mock MQTT channel for testing.""" channel = FakeChannel() await channel.connect() return channel @pytest.fixture(name="mock_local_channel") async def setup_mock_local_channel() -> FakeChannel: """Mock Local channel for testing.""" return FakeChannel() @pytest.fixture(name="mock_local_session") def setup_mock_local_session(mock_local_channel: Mock) -> Mock: """Mock Local session factory for testing.""" mock_session = Mock(spec=LocalSession) mock_session.return_value = mock_local_channel return mock_session @pytest.fixture(name="mock_request_id", autouse=True) def setup_mock_request_id() -> Iterator[None]: """Assign sequential request ids for testing.""" next_id = 12345 def fake_next_int(*args) -> int: nonlocal next_id id_to_return = next_id next_id += 1 return id_to_return with patch("roborock.protocols.v1_protocol.get_next_int", side_effect=fake_next_int): yield @pytest.fixture(name="mock_create_map_response_decoder") def setup_mock_map_decoder() -> Iterator[Mock]: """Mock the map response decoder to control its behavior in tests.""" with patch("roborock.devices.v1_rpc_channel.create_map_response_decoder") as mock_create_decoder: yield mock_create_decoder @pytest.fixture(name="v1_channel") def setup_v1_channel( mock_mqtt_channel: Mock, mock_local_session: Mock, mock_create_map_response_decoder: Mock, ) -> V1Channel: """Fixture to set up the V1 channel for tests.""" return V1Channel( device_uid=TEST_DEVICE_UID, security_data=TEST_SECURITY_DATA, mqtt_channel=mock_mqtt_channel, local_session=mock_local_session, cache=InMemoryCache(), ) @pytest.fixture(name="warning_caplog") def setup_warning_caplog(caplog: pytest.LogCaptureFixture) -> pytest.LogCaptureFixture: """Fixture to capture warning messages.""" caplog.set_level(logging.WARNING) return caplog async def test_v1_channel_subscribe_mqtt_only_success( v1_channel: V1Channel, mock_mqtt_channel: FakeChannel, mock_local_session: Mock, mock_local_channel: FakeChannel, ) -> None: """Test successful subscription with MQTT only (local connection fails).""" # Setup: MQTT succeeds, local fails mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) mock_local_channel.connect.side_effect = RoborockException("Connection failed") callback = Mock() unsub = await v1_channel.subscribe(callback) # Verify MQTT connection was established assert mock_mqtt_channel.subscribers # Verify local connection was attempted but failed mock_local_session.assert_called_once_with(TEST_HOST) mock_local_channel.connect.assert_called_once() # Verify properties assert v1_channel.is_mqtt_connected assert not v1_channel.is_local_connected # Test unsubscribe unsub() assert not mock_mqtt_channel.subscribers async def test_v1_channel_mqtt_disconnected( v1_channel: V1Channel, mock_mqtt_channel: FakeChannel, mock_local_session: Mock, mock_local_channel: FakeChannel, ) -> None: """Test successful subscription with MQTT only (local connection fails).""" # Setup: MQTT succeeds, local fails mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) mock_local_channel.connect.side_effect = RoborockException("Connection failed") callback = Mock() unsub = await v1_channel.subscribe(callback) # Verify MQTT connection was established assert mock_mqtt_channel.subscribers # Verify local connection was attempted but failed mock_local_session.assert_called_once_with(TEST_HOST) mock_local_channel.connect.assert_called_once() # Simulate an MQTT disconnection where the channel is not healthy mock_mqtt_channel.close() # Verify properties assert not v1_channel.is_mqtt_connected assert not v1_channel.is_local_connected # Test unsubscribe unsub() assert not mock_mqtt_channel.subscribers async def test_v1_channel_subscribe_local_success( v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_local_channel: Mock, mock_local_session: Mock, ) -> None: """Test successful subscription with local connections.""" mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) # Mock network info retrieval callback = Mock() unsub = await v1_channel.subscribe(callback) # Verify local connection was attempted and succeeded mock_local_session.assert_called_once_with(TEST_HOST) mock_local_channel.connect.assert_called_once() # Verify local connection established and not mqtt assert not mock_mqtt_channel.subscribers assert mock_local_channel.subscribers # Verify properties assert not v1_channel.is_mqtt_connected assert v1_channel.is_local_connected # Test unsubscribe cleans up both unsub() assert not mock_mqtt_channel.subscribers assert not mock_local_channel.subscribers async def test_v1_channel_subscribe_already_connected_error(v1_channel: V1Channel, mock_mqtt_channel: Mock) -> None: """Test error when trying to subscribe when already connected.""" mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) # First subscription succeeds await v1_channel.subscribe(Mock()) # Second subscription should fail with pytest.raises(ValueError, match="Only one subscription allowed at a time"): await v1_channel.subscribe(Mock()) async def test_v1_channel_local_connection_warning_logged( v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_local_channel: Mock, warning_caplog: pytest.LogCaptureFixture, ) -> None: """Test that local connection failures are logged as warnings.""" mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) mock_local_channel.connect.side_effect = RoborockException("Local connection failed") await v1_channel.subscribe(Mock()) assert "Could not establish local connection for device abc123" in warning_caplog.text assert "Local connection failed" in warning_caplog.text async def test_v1_channel_send_command_local_preferred( v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_local_channel: Mock, ) -> None: """Test command sending prefers local connection when available.""" # Establish connections mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) await v1_channel.subscribe(Mock()) # Send command mock_local_channel.response_queue.append(TEST_RESPONSE) result = await v1_channel.rpc_channel.send_command( RoborockCommand.CHANGE_SOUND_VOLUME, response_type=S5MaxStatus, ) # Verify local response was parsed assert result.state == RoborockStateCode.cleaning async def test_v1_channel_send_command_local_fails( v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_local_channel: Mock, ) -> None: """Test case where sending with local connection fails.""" # Establish connections mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) await v1_channel.subscribe(Mock()) # Local command fails mock_local_channel.publish = Mock() mock_local_channel.publish.side_effect = RoborockException("Local failed") # Send command with pytest.raises(RoborockException, match="Local failed"): await v1_channel.rpc_channel.send_command( RoborockCommand.CHANGE_SOUND_VOLUME, response_type=S5MaxStatus, ) async def test_v1_channel_send_decoded_command_mqtt_only( v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_local_channel: Mock, ) -> None: """Test command sending works with MQTT only.""" # Setup: only MQTT connection mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) mock_local_channel.connect.side_effect = RoborockException("No local") await v1_channel.subscribe(Mock()) # Send command mock_mqtt_channel.response_queue.append(TEST_RESPONSE) result = await v1_channel.rpc_channel.send_command( RoborockCommand.CHANGE_SOUND_VOLUME, response_type=S5MaxStatus, ) # Verify only MQTT was used assert result.state == RoborockStateCode.cleaning async def test_v1_channel_send_decoded_command_with_params( v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_local_channel: Mock, ) -> None: """Test command sending with parameters.""" mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) await v1_channel.subscribe(Mock()) # Send command with params mock_local_channel.response_queue.append(TEST_RESPONSE) test_params = {"volume": 80} await v1_channel.rpc_channel.send_command( RoborockCommand.CHANGE_SOUND_VOLUME, response_type=S5MaxStatus, params=test_params, ) # Verify command was sent with correct params assert mock_local_channel.published_messages sent_message = mock_local_channel.published_messages[0] assert sent_message assert isinstance(sent_message, RoborockMessage) assert sent_message.payload payload = sent_message.payload.decode() json_data = json.loads(payload) assert "dps" in json_data assert "101" in json_data["dps"] decoded_payload = json.loads(json_data["dps"]["101"]) assert decoded_payload["method"] == "change_sound_volume" assert decoded_payload["params"] == {"volume": 80} async def test_v1_channel_networking_info_retrieved_during_connection( v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_local_channel: Mock, mock_local_session: Mock, ) -> None: """Test that networking information is retrieved during local connection setup.""" # Setup: MQTT returns network info when requested mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) # Subscribe - this should trigger network info retrieval for local connection await v1_channel.subscribe(Mock()) # Verify local connection was esablished assert v1_channel.is_local_connected # Verify network info was requested via MQTT assert mock_mqtt_channel.published_messages # Verify local session was created with the correct IP mock_local_session.assert_called_once_with(mock_data.NETWORK_INFO["ip"]) async def test_v1_channel_networking_info_cached_during_connection( mock_mqtt_channel: Mock, mock_local_channel: Mock, mock_local_session: Mock, ) -> None: """Test that networking information is cached and reused on subsequent connections.""" # Create a cache with pre-populated network info cache_data = CacheData() cache_data.network_info[TEST_DEVICE_UID] = TEST_NETWORKING_INFO mock_cache = AsyncMock() mock_cache.get.return_value = cache_data mock_cache.set = AsyncMock() # Create V1Channel with the mock cache v1_channel = V1Channel( device_uid=TEST_DEVICE_UID, security_data=TEST_SECURITY_DATA, mqtt_channel=mock_mqtt_channel, local_session=mock_local_session, cache=mock_cache, ) # Subscribe - should use cached network info await v1_channel.subscribe(Mock()) # Verify local connections are established assert v1_channel.is_local_connected # Verify network info was NOT requested via MQTT (cache hit) assert not mock_mqtt_channel.published_messages assert not mock_local_channel.published_messages # Verify local session was created with the correct IP from cache mock_local_session.assert_called_once_with(mock_data.NETWORK_INFO["ip"]) # Verify cache was accessed but not updated (cache hit) mock_cache.get.assert_called_once() mock_cache.set.assert_not_called() # V1Channel edge cases tests async def test_v1_channel_local_connect_network_info_failure( v1_channel: V1Channel, mock_mqtt_channel: Mock, ) -> None: """Test local connection when network info retrieval fails.""" mock_mqtt_channel.publish_side_effect = RoborockException("Network info failed") with pytest.raises(RoborockException): await v1_channel._local_connect() async def test_v1_channel_command_encoding_validation( v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_local_channel: Mock, ) -> None: """Test that command encoding works for different protocols.""" mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) await v1_channel.subscribe(Mock()) # Send mqtt command and capture the request mock_mqtt_channel.response_queue.append(TEST_RESPONSE) await v1_channel.mqtt_rpc_channel.send_command(RoborockCommand.CHANGE_SOUND_VOLUME, params={"volume": 50}) assert mock_mqtt_channel.published_messages mqtt_message = mock_mqtt_channel.published_messages[0] # Send local command and capture the request mock_local_channel.response_queue.append(TEST_RESPONSE_2) await v1_channel.rpc_channel.send_command(RoborockCommand.CHANGE_SOUND_VOLUME, params={"volume": 50}) assert mock_local_channel.published_messages local_message = mock_local_channel.published_messages[0] # Verify both are RoborockMessage instances assert isinstance(mqtt_message, RoborockMessage) assert isinstance(local_message, RoborockMessage) # But they should have different protocols assert mqtt_message.protocol == RoborockMessageProtocol.RPC_REQUEST assert local_message.protocol == RoborockMessageProtocol.GENERAL_REQUEST @patch("roborock.devices.v1_rpc_channel.create_map_response_decoder") async def test_v1_channel_send_map_command( mock_create_decoder: Mock, v1_channel: V1Channel, mock_mqtt_channel: Mock, mock_create_map_response_decoder: Mock, ) -> None: """Test that the map channel can correctly decode a map response.""" # Establish connections mock_mqtt_channel.response_queue.append(TEST_NETWORK_INFO_RESPONSE) await v1_channel.subscribe(Mock()) # Prepare a mock map response decompressed_map_data = b"this is the decompressed map data" request_id = 12346 # from the mock_request_id fixture # Mock the decoder to return a known response map_response = MapResponse(request_id=request_id, data=decompressed_map_data) mock_create_map_response_decoder.return_value.return_value = map_response # The actual message content doesn't matter as much since the decoder is mocked map_response_message = RoborockMessage( protocol=RoborockMessageProtocol.MAP_RESPONSE, payload=b"dummy_payload", ) mock_mqtt_channel.response_queue.append(map_response_message) # Send the command and get the result result = await v1_channel.map_rpc_channel.send_command(RoborockCommand.GET_MAP_V1) # Verify the result is the data from our mocked decoder assert result == decompressed_map_data Python-roborock-python-roborock-32df4f3/tests/devices/test_v1_device.py000066400000000000000000000105671507503702500264420ustar00rootroot00000000000000"""Tests for the Device class.""" import pathlib from collections.abc import Callable from unittest.mock import AsyncMock, Mock import pytest from syrupy import SnapshotAssertion from roborock.containers import HomeData, S7MaxVStatus, UserData from roborock.devices.cache import NoCache from roborock.devices.device import RoborockDevice from roborock.devices.traits import v1 from roborock.devices.traits.v1.common import V1TraitMixin from roborock.devices.v1_rpc_channel import decode_rpc_response from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from .. import mock_data USER_DATA = UserData.from_dict(mock_data.USER_DATA) HOME_DATA = HomeData.from_dict(mock_data.HOME_DATA_RAW) STATUS = S7MaxVStatus.from_dict(mock_data.STATUS) TESTDATA = pathlib.Path("tests/protocols/testdata/v1_protocol/") @pytest.fixture(autouse=True, name="channel") def device_channel_fixture() -> AsyncMock: """Fixture to set up the channel for tests.""" return AsyncMock() @pytest.fixture(autouse=True, name="rpc_channel") def rpc_channel_fixture() -> AsyncMock: """Fixture to set up the channel for tests.""" return AsyncMock() @pytest.fixture(autouse=True, name="mqtt_rpc_channel") def mqtt_rpc_channel_fixture() -> AsyncMock: """Fixture to set up the channel for tests.""" return AsyncMock() @pytest.fixture(autouse=True, name="map_rpc_channel") def map_rpc_channel_fixture() -> AsyncMock: """Fixture to set up the channel for tests.""" return AsyncMock() @pytest.fixture(autouse=True, name="device") def device_fixture(channel: AsyncMock, rpc_channel: AsyncMock, mqtt_rpc_channel: AsyncMock) -> RoborockDevice: """Fixture to set up the device for tests.""" return RoborockDevice( device_info=HOME_DATA.devices[0], product=HOME_DATA.products[0], channel=channel, trait=v1.create(HOME_DATA.products[0], HOME_DATA, rpc_channel, mqtt_rpc_channel, AsyncMock(), NoCache()), ) async def test_device_connection(device: RoborockDevice, channel: AsyncMock) -> None: """Test the Device connection setup.""" unsub = Mock() subscribe = AsyncMock() subscribe.return_value = unsub channel.subscribe = subscribe assert device.duid == "abc123" assert device.name == "Roborock S7 MaxV" assert not subscribe.called await device.connect() assert subscribe.called assert not unsub.called await device.close() assert unsub.called @pytest.mark.parametrize( ("connected", "local_connected"), [ (True, False), (False, False), (True, True), (False, True), ], ) async def test_connection_status( device: RoborockDevice, channel: AsyncMock, connected: bool, local_connected: bool, ) -> None: """Test successful RPC command sending and response handling.""" channel.is_connected = connected channel.is_local_connected = local_connected assert device.is_connected is connected assert device.is_local_connected is local_connected @pytest.fixture(name="setup_rpc_channel") def setup_rpc_channel_fixture(rpc_channel: AsyncMock, payload: pathlib.Path) -> AsyncMock: """Fixture to set up the RPC channel for the tests.""" # The values other than the payload are arbitrary message = RoborockMessage( protocol=RoborockMessageProtocol.GENERAL_RESPONSE, payload=payload.read_bytes(), seq=12750, version=b"1.0", random=97431, timestamp=1652547161, ) response_message = decode_rpc_response(message) rpc_channel.send_command.return_value = response_message.data return rpc_channel @pytest.mark.parametrize( ("payload", "property_method"), [ (TESTDATA / "get_status.json", lambda x: x.status), (TESTDATA / "get_dnd.json", lambda x: x.dnd), (TESTDATA / "get_clean_summary.json", lambda x: x.clean_summary), (TESTDATA / "get_volume.json", lambda x: x.sound_volume), ], ) async def test_device_trait_command_parsing( device: RoborockDevice, setup_rpc_channel: AsyncMock, snapshot: SnapshotAssertion, property_method: Callable[..., V1TraitMixin], payload: str, ) -> None: """Test the device trait command.""" trait = property_method(device.v1_properties) assert trait assert isinstance(trait, V1TraitMixin) await trait.refresh() assert setup_rpc_channel.send_command.called assert trait == snapshot Python-roborock-python-roborock-32df4f3/tests/devices/traits/000077500000000000000000000000001507503702500244615ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/devices/traits/__init__.py000066400000000000000000000000371507503702500265720ustar00rootroot00000000000000"""Tests for device traits.""" Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/000077500000000000000000000000001507503702500250075ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/__init__.py000066400000000000000000000000661507503702500271220ustar00rootroot00000000000000pytest_plugins = ["tests.devices.traits.v1.fixtures"] Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/fixtures.py000066400000000000000000000037431507503702500272410ustar00rootroot00000000000000"""Fixtures for V1 trait tests.""" from unittest.mock import AsyncMock import pytest from roborock.containers import HomeData, S7MaxVStatus, UserData from roborock.devices.cache import Cache, InMemoryCache from roborock.devices.device import RoborockDevice from roborock.devices.traits import v1 from .... import mock_data USER_DATA = UserData.from_dict(mock_data.USER_DATA) HOME_DATA = HomeData.from_dict(mock_data.HOME_DATA_RAW) STATUS = S7MaxVStatus.from_dict(mock_data.STATUS) @pytest.fixture(autouse=True, name="channel") def device_channel_fixture() -> AsyncMock: """Fixture to set up the channel for tests.""" return AsyncMock() @pytest.fixture(autouse=True, name="mock_rpc_channel") def rpc_channel_fixture() -> AsyncMock: """Fixture to set up the channel for tests.""" return AsyncMock() @pytest.fixture(autouse=True, name="mock_mqtt_rpc_channel") def mqtt_rpc_channel_fixture() -> AsyncMock: """Fixture to set up the channel for tests.""" return AsyncMock() @pytest.fixture(autouse=True, name="mock_map_rpc_channel") def map_rpc_channel_fixture() -> AsyncMock: """Fixture to set up the channel for tests.""" return AsyncMock() @pytest.fixture(autouse=True, name="roborock_cache") def roborock_cache_fixture() -> Cache: """Fixture to provide a NoCache instance for tests.""" return InMemoryCache() @pytest.fixture(autouse=True, name="device") def device_fixture( channel: AsyncMock, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, mock_map_rpc_channel: AsyncMock, roborock_cache: Cache, ) -> RoborockDevice: """Fixture to set up the device for tests.""" return RoborockDevice( device_info=HOME_DATA.devices[0], product=HOME_DATA.products[0], channel=channel, trait=v1.create( HOME_DATA.products[0], HOME_DATA, mock_rpc_channel, mock_mqtt_rpc_channel, mock_map_rpc_channel, roborock_cache, ), ) Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/test_clean_summary.py000066400000000000000000000067111507503702500312640ustar00rootroot00000000000000"""Tests for the CleanSummary class.""" from unittest.mock import AsyncMock import pytest from roborock.containers import CleanSummary from roborock.devices.device import RoborockDevice from roborock.devices.traits.v1.clean_summary import CleanSummaryTrait from roborock.exceptions import RoborockException from roborock.roborock_typing import RoborockCommand CLEAN_SUMMARY_DATA = [ 1442559, 24258125000, 296, [ 1756848207, 1754930385, 1753203976, 1752183435, 1747427370, 1746204046, 1745601543, 1744387080, 1743528522, 1742489154, 1741022299, 1740433682, 1739902516, 1738875106, 1738864366, 1738620067, 1736873889, 1736197544, 1736121269, 1734458038, ], ] @pytest.fixture def clean_summary_trait(device: RoborockDevice) -> CleanSummaryTrait: """Create a DoNotDisturbTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.clean_summary @pytest.fixture def sample_clean_summary() -> CleanSummary: """Create a sample CleanSummary for testing.""" return CleanSummary( clean_area=100, clean_time=3600, ) async def test_get_clean_summary_success( clean_summary_trait: CleanSummaryTrait, mock_rpc_channel: AsyncMock, sample_clean_summary: CleanSummary ) -> None: """Test successfully getting clean summary.""" # Setup mock to return the sample clean summary mock_rpc_channel.send_command.return_value = CLEAN_SUMMARY_DATA # Call the method await clean_summary_trait.refresh() # Verify the result assert clean_summary_trait.clean_area == 24258125000 assert clean_summary_trait.clean_time == 1442559 assert clean_summary_trait.square_meter_clean_area == 24258.1 assert clean_summary_trait.clean_count == 296 assert clean_summary_trait.records assert len(clean_summary_trait.records) == 20 assert clean_summary_trait.records[0] == 1756848207 # Verify the RPC call was made correctly mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.GET_CLEAN_SUMMARY) async def test_get_clean_summary_clean_time_only( clean_summary_trait: CleanSummaryTrait, mock_rpc_channel: AsyncMock, sample_clean_summary: CleanSummary ) -> None: """Test successfully getting clean summary where the response only has the clean time.""" mock_rpc_channel.send_command.return_value = [1442559] # Call the method await clean_summary_trait.refresh() # Verify the result assert clean_summary_trait.clean_area is None assert clean_summary_trait.clean_time == 1442559 assert clean_summary_trait.square_meter_clean_area is None assert clean_summary_trait.clean_count is None assert not clean_summary_trait.records # Verify the RPC call was made correctly mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.GET_CLEAN_SUMMARY) async def test_get_clean_summary_propagates_exception( clean_summary_trait: CleanSummaryTrait, mock_rpc_channel: AsyncMock ) -> None: """Test that exceptions from RPC channel are propagated in get_clean_summary.""" # Setup mock to raise an exception mock_rpc_channel.send_command.side_effect = RoborockException("Communication error") # Verify the exception is propagated with pytest.raises(RoborockException, match="Communication error"): await clean_summary_trait.refresh() Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/test_command.py000066400000000000000000000043171507503702500300430ustar00rootroot00000000000000"""Tests for the CommandTrait class.""" from unittest.mock import AsyncMock import pytest from roborock.devices.traits.v1.command import CommandTrait from roborock.exceptions import RoborockException from roborock.roborock_typing import RoborockCommand @pytest.fixture(name="command_trait") def command_trait_fixture() -> CommandTrait: """Create a CommandTrait instance with a mocked RPC channel.""" trait = CommandTrait() trait._rpc_channel = AsyncMock() # type: ignore[assignment] return trait async def test_send_command_success(command_trait: CommandTrait) -> None: """Test successfully sending a command.""" mock_rpc_channel = command_trait._rpc_channel assert mock_rpc_channel is not None mock_rpc_channel.send_command.return_value = {"result": "ok"} # Call the method result = await command_trait.send(RoborockCommand.APP_START) # Verify the result assert result == {"result": "ok"} # Verify the RPC call was made correctly mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.APP_START, params=None) async def test_send_command_with_params(command_trait: CommandTrait) -> None: """Test successfully sending a command with parameters.""" mock_rpc_channel = command_trait._rpc_channel assert mock_rpc_channel is not None mock_rpc_channel.send_command.return_value = {"result": "ok"} params = {"segments": [1, 2, 3]} # Call the method result = await command_trait.send(RoborockCommand.APP_SEGMENT_CLEAN, params) # Verify the result assert result == {"result": "ok"} # Verify the RPC call was made correctly mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.APP_SEGMENT_CLEAN, params=params) async def test_send_command_propagates_exception(command_trait: CommandTrait) -> None: """Test that exceptions from RPC channel are propagated.""" mock_rpc_channel = command_trait._rpc_channel assert mock_rpc_channel is not None mock_rpc_channel.send_command.side_effect = RoborockException("Communication error") # Verify the exception is propagated with pytest.raises(RoborockException, match="Communication error"): await command_trait.send(RoborockCommand.APP_START) Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/test_consumable.py000066400000000000000000000045551507503702500305610ustar00rootroot00000000000000"""Tests for the DoNotDisturbTrait class.""" from unittest.mock import AsyncMock import pytest from roborock.devices.device import RoborockDevice from roborock.devices.traits.v1.consumeable import ConsumableAttribute, ConsumableTrait from roborock.roborock_typing import RoborockCommand CONSUMABLE_DATA = [ { "main_brush_work_time": 879348, "side_brush_work_time": 707618, "filter_work_time": 738722, "filter_element_work_time": 0, "sensor_dirty_time": 455517, } ] @pytest.fixture def consumable_trait(device: RoborockDevice) -> ConsumableTrait: """Create a ConsumableTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.consumables async def test_get_consumable_data_success(consumable_trait: ConsumableTrait, mock_rpc_channel: AsyncMock) -> None: """Test successfully getting consumable data.""" # Setup mock to return the sample consumable data mock_rpc_channel.send_command.return_value = CONSUMABLE_DATA # Call the method await consumable_trait.refresh() # Verify the result assert consumable_trait.main_brush_work_time == 879348 assert consumable_trait.side_brush_work_time == 707618 assert consumable_trait.filter_work_time == 738722 assert consumable_trait.filter_element_work_time == 0 assert consumable_trait.sensor_dirty_time == 455517 # Verify the RPC call was made correctly mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.GET_CONSUMABLE) @pytest.mark.parametrize( ("consumable", "reset_param"), [ (ConsumableAttribute.MAIN_BRUSH_WORK_TIME, "main_brush_work_time"), (ConsumableAttribute.SIDE_BRUSH_WORK_TIME, "side_brush_work_time"), (ConsumableAttribute.FILTER_WORK_TIME, "filter_work_time"), (ConsumableAttribute.SENSOR_DIRTY_TIME, "sensor_dirty_time"), ], ) async def test_reset_consumable_data( consumable_trait: ConsumableTrait, mock_rpc_channel: AsyncMock, consumable: ConsumableAttribute, reset_param: str, ) -> None: """Test successfully resetting consumable data.""" # Call the method await consumable_trait.reset_consumable(consumable) # Verify the RPC call was made correctly with expected parameters mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.RESET_CONSUMABLE, params=[reset_param]) # Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/test_dnd.py000066400000000000000000000112321507503702500271640ustar00rootroot00000000000000"""Tests for the DoNotDisturbTrait class.""" from unittest.mock import AsyncMock import pytest from roborock.containers import DnDTimer from roborock.devices.device import RoborockDevice from roborock.devices.traits.v1.do_not_disturb import DoNotDisturbTrait from roborock.roborock_typing import RoborockCommand @pytest.fixture async def dnd_trait(device: RoborockDevice) -> DoNotDisturbTrait: """Create a DoNotDisturbTrait instance with mocked dependencies.""" assert device.v1_properties assert device.v1_properties.dnd return device.v1_properties.dnd @pytest.fixture def sample_dnd_timer() -> DnDTimer: """Create a sample DnDTimer for testing.""" return DnDTimer( start_hour=22, start_minute=0, end_hour=8, end_minute=0, enabled=1, ) async def test_get_dnd_timer_success( dnd_trait: DoNotDisturbTrait, mock_rpc_channel: AsyncMock, sample_dnd_timer: DnDTimer ) -> None: """Test successfully getting DnD timer settings.""" # Setup mock to return the sample DnD timer mock_rpc_channel.send_command.return_value = sample_dnd_timer.as_dict() # Call the method await dnd_trait.refresh() # Verify the result assert dnd_trait.start_hour == 22 assert dnd_trait.start_minute == 0 assert dnd_trait.end_hour == 8 assert dnd_trait.end_minute == 0 assert dnd_trait.enabled == 1 assert dnd_trait.is_on # Verify the RPC call was made correctly mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.GET_DND_TIMER) async def test_get_dnd_timer_disabled(dnd_trait: DoNotDisturbTrait, mock_rpc_channel: AsyncMock) -> None: """Test getting DnD timer when it's disabled.""" disabled_timer = DnDTimer( start_hour=22, start_minute=0, end_hour=8, end_minute=0, enabled=0, ) mock_rpc_channel.send_command.return_value = disabled_timer.as_dict() await dnd_trait.refresh() assert dnd_trait.enabled == 0 assert not dnd_trait.is_on mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.GET_DND_TIMER) async def test_set_dnd_timer_success( dnd_trait: DoNotDisturbTrait, mock_rpc_channel: AsyncMock, sample_dnd_timer: DnDTimer ) -> None: """Test successfully setting DnD timer settings.""" # Call the method await dnd_trait.set_dnd_timer(sample_dnd_timer) # Verify the RPC call was made correctly with dataclass converted to dict expected_params = { "startHour": 22, "startMinute": 0, "endHour": 8, "endMinute": 0, "enabled": 1, } mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.SET_DND_TIMER, params=expected_params) async def test_clear_dnd_timer_success(dnd_trait: DoNotDisturbTrait, mock_rpc_channel: AsyncMock) -> None: """Test successfully clearing DnD timer settings.""" # Call the method await dnd_trait.clear_dnd_timer() # Verify the RPC call was made correctly mock_rpc_channel.send_command.assert_called_once_with(RoborockCommand.CLOSE_DND_TIMER) async def test_get_dnd_timer_propagates_exception(dnd_trait: DoNotDisturbTrait, mock_rpc_channel: AsyncMock) -> None: """Test that exceptions from RPC channel are propagated in get_dnd_timer.""" from roborock.exceptions import RoborockException # Setup mock to raise an exception mock_rpc_channel.send_command.side_effect = RoborockException("Communication error") # Verify the exception is propagated with pytest.raises(RoborockException, match="Communication error"): await dnd_trait.refresh() async def test_set_dnd_timer_propagates_exception( dnd_trait: DoNotDisturbTrait, mock_rpc_channel: AsyncMock, sample_dnd_timer: DnDTimer ) -> None: """Test that exceptions from RPC channel are propagated in set_dnd_timer.""" from roborock.exceptions import RoborockException # Setup mock to raise an exception mock_rpc_channel.send_command.side_effect = RoborockException("Communication error") # Verify the exception is propagated with pytest.raises(RoborockException, match="Communication error"): await dnd_trait.set_dnd_timer(sample_dnd_timer) async def test_clear_dnd_timer_propagates_exception(dnd_trait: DoNotDisturbTrait, mock_rpc_channel: AsyncMock) -> None: """Test that exceptions from RPC channel are propagated in clear_dnd_timer.""" from roborock.exceptions import RoborockException # Setup mock to raise an exception mock_rpc_channel.send_command.side_effect = RoborockException("Communication error") # Verify the exception is propagated with pytest.raises(RoborockException, match="Communication error"): await dnd_trait.clear_dnd_timer() Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/test_home.py000066400000000000000000000265651507503702500273660ustar00rootroot00000000000000"""Tests for the Home related functionality.""" from collections.abc import Iterator from unittest.mock import AsyncMock, patch import pytest from roborock.code_mappings import RoborockStateCode from roborock.containers import CombinedMapInfo from roborock.devices.cache import InMemoryCache from roborock.devices.device import RoborockDevice from roborock.devices.traits.v1.home import HomeTrait from roborock.devices.traits.v1.maps import MapsTrait from roborock.devices.traits.v1.rooms import RoomsTrait from roborock.devices.traits.v1.status import StatusTrait from roborock.exceptions import RoborockDeviceBusy from roborock.roborock_typing import RoborockCommand from tests import mock_data MULTI_MAP_LIST_DATA = [ { "max_multi_map": 2, "max_bak_map": 1, "multi_map_count": 2, "map_info": [ { "mapFlag": 0, "add_time": 1747132930, "length": 0, "name": "Ground Floor", "bak_maps": [{"mapFlag": 4, "add_time": 1747132936}], }, { "mapFlag": 123, "add_time": 1747132940, "length": 0, "name": "Second Floor", "bak_maps": [{"mapFlag": 5, "add_time": 1747132946}], }, ], } ] ROOM_MAPPING_DATA_MAP_0 = [[16, "2362048"], [17, "2362044"]] ROOM_MAPPING_DATA_MAP_123 = [[18, "2362041"], [19, "2362042"]] UPDATED_STATUS_MAP_0 = { **mock_data.STATUS, "map_status": 0 * 4 + 3, # Set current map to 0 } UPDATED_STATUS_MAP_123 = { **mock_data.STATUS, "map_status": 123 * 4 + 3, # Set current map to 123 } @pytest.fixture(autouse=True) def no_sleep() -> Iterator[None]: """Patch sleep to avoid delays in tests.""" with patch("roborock.devices.traits.v1.home.asyncio.sleep"): yield @pytest.fixture def cache(): """Create an in-memory cache for testing.""" return InMemoryCache() @pytest.fixture(autouse=True) async def status_trait(mock_rpc_channel: AsyncMock, device: RoborockDevice) -> StatusTrait: """Create a StatusTrait instance with mocked dependencies.""" assert device.v1_properties status_trait = device.v1_properties.status # Verify initial state assert status_trait.current_map is None mock_rpc_channel.send_command.side_effect = [UPDATED_STATUS_MAP_0] await status_trait.refresh() assert status_trait.current_map == 0 mock_rpc_channel.reset_mock() return status_trait @pytest.fixture def maps_trait(device: RoborockDevice) -> MapsTrait: """Create a MapsTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.maps @pytest.fixture def rooms_trait(device: RoborockDevice) -> RoomsTrait: """Create a RoomsTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.rooms @pytest.fixture def home_trait(status_trait: StatusTrait, maps_trait: MapsTrait, rooms_trait: RoomsTrait, cache) -> HomeTrait: """Create a HomeTrait instance with mocked dependencies.""" return HomeTrait(status_trait, maps_trait, rooms_trait, cache) async def test_discover_home_empty_cache( status_trait: StatusTrait, home_trait: HomeTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test discovering home when cache is empty.""" # Setup mocks for the discovery process mock_rpc_channel.send_command.side_effect = [ UPDATED_STATUS_MAP_123, # Status after switching to map 123 ROOM_MAPPING_DATA_MAP_123, # Rooms for map 123 UPDATED_STATUS_MAP_0, # Status after switching back to map 0 ROOM_MAPPING_DATA_MAP_0, # Rooms for map 0 ] mock_mqtt_rpc_channel.send_command.side_effect = [ MULTI_MAP_LIST_DATA, # Multi maps list {}, # LOAD_MULTI_MAP response for map 123 {}, # LOAD_MULTI_MAP response back to map 0 ] # Before discovery, no cache should exist assert home_trait.home_cache is None assert home_trait.current_map_data is None # Perform home discovery await home_trait.discover_home() # Verify cache is populated assert home_trait.home_cache is not None assert len(home_trait.home_cache) == 2 # Check map 0 data map_0_data = home_trait.home_cache[0] assert map_0_data.map_flag == 0 assert map_0_data.name == "Ground Floor" assert len(map_0_data.rooms) == 2 assert map_0_data.rooms[0].segment_id == 16 assert map_0_data.rooms[0].name == "Example room 1" assert map_0_data.rooms[1].segment_id == 17 assert map_0_data.rooms[1].name == "Example room 2" # Check map 123 data map_123_data = home_trait.home_cache[123] assert map_123_data.map_flag == 123 assert map_123_data.name == "Second Floor" assert len(map_123_data.rooms) == 2 assert map_123_data.rooms[0].segment_id == 18 assert map_123_data.rooms[0].name == "Example room 3" assert map_123_data.rooms[1].segment_id == 19 assert map_123_data.rooms[1].name == "Unknown" # Not in mock home data # Verify current map data is accessible current_map_data = home_trait.current_map_data assert current_map_data is not None assert current_map_data.map_flag == 0 assert current_map_data.name == "Ground Floor" async def test_discover_home_with_existing_cache( home_trait: HomeTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test that discovery is skipped when cache already exists.""" # Pre-populate the cache cache_data = await home_trait._cache.get() cache_data.home_cache = {0: CombinedMapInfo(map_flag=0, name="Dummy", rooms=[])} await home_trait._cache.set(cache_data) # Call discover_home await home_trait.discover_home() # Verify no RPC calls were made (discovery was skipped) assert mock_rpc_channel.send_command.call_count == 0 assert mock_mqtt_rpc_channel.send_command.call_count == 0 # Verify cache was loaded from storage assert home_trait.home_cache == {0: CombinedMapInfo(map_flag=0, name="Dummy", rooms=[])} async def test_discover_home_no_maps( home_trait: HomeTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test discovery when no maps are available.""" # Setup mock to return empty maps list mock_mqtt_rpc_channel.send_command.side_effect = [ [{"max_multi_map": 0, "max_bak_map": 0, "multi_map_count": 0, "map_info": []}] ] with pytest.raises(Exception, match="Cannot perform home discovery without current map info"): await home_trait.discover_home() async def test_refresh_updates_current_map_cache( status_trait: StatusTrait, home_trait: HomeTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test that refresh updates the cache for the current map.""" # Pre-populate cache with some data cache_data = await home_trait._cache.get() cache_data.home_cache = {0: CombinedMapInfo(map_flag=0, name="Old Ground Floor", rooms=[])} await home_trait._cache.set(cache_data) home_trait._home_cache = cache_data.home_cache # Setup mocks for refresh mock_rpc_channel.send_command.side_effect = [ ROOM_MAPPING_DATA_MAP_0, # Room mapping refresh ] mock_mqtt_rpc_channel.send_command.side_effect = [ MULTI_MAP_LIST_DATA, # Maps refresh ] # Perform refresh await home_trait.refresh() # Verify cache was updated for current map updated_cache = home_trait.home_cache assert updated_cache is not None assert 0 in updated_cache map_data = updated_cache[0] assert map_data.name == "Ground Floor" # Updated from "Old Ground Floor" assert len(map_data.rooms) == 2 async def test_refresh_no_cache_no_update( home_trait: HomeTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test that refresh doesn't update when no cache exists.""" # Setup mocks for refresh # mock_mqtt_rpc_channel.send_command.side_effect = [ # MULTI_MAP_LIST_DATA, # Maps refresh # ] # Perform refresh without existing cache with pytest.raises(Exception, match="Cannot refresh home data without home cache, did you call discover_home()?"): await home_trait.refresh() async def test_current_map_data_property( home_trait: HomeTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test current_map_data property returns correct data.""" # Setup discovery mock_rpc_channel.send_command.side_effect = [ UPDATED_STATUS_MAP_123, # Status after switching to map 123 ROOM_MAPPING_DATA_MAP_123, # Rooms for map 123 UPDATED_STATUS_MAP_0, # Status after switching back to map 0 ROOM_MAPPING_DATA_MAP_0, # Rooms for map 0 ] mock_mqtt_rpc_channel.send_command.side_effect = [ MULTI_MAP_LIST_DATA, # Multi maps list {}, # LOAD_MULTI_MAP response for map 123 {}, # LOAD_MULTI_MAP response back to map 0 ] await home_trait.discover_home() # Test current map data (should be map 0) current_data = home_trait.current_map_data assert current_data is not None assert current_data.map_flag == 0 assert current_data.name == "Ground Floor" # Test when no cache exists home_trait._home_cache = None assert home_trait.current_map_data is None async def test_discover_home_device_busy_cleaning( status_trait: StatusTrait, home_trait: HomeTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test that discovery raises RoborockDeviceBusy when device is cleaning.""" # Set the status trait state to cleaning status_trait.state = RoborockStateCode.cleaning # Attempt to discover home while cleaning with pytest.raises(RoborockDeviceBusy, match="Cannot perform home discovery while the device is cleaning"): await home_trait.discover_home() # Verify no RPC calls were made (discovery was prevented) assert mock_rpc_channel.send_command.call_count == 0 assert mock_mqtt_rpc_channel.send_command.call_count == 0 async def test_single_map_no_switching( home_trait: HomeTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test that single map discovery doesn't trigger map switching.""" single_map_data = [ { "max_multi_map": 1, "max_bak_map": 0, "multi_map_count": 1, "map_info": [ { "mapFlag": 0, "add_time": 1747132930, "length": 0, "name": "Only Floor", "bak_maps": [], }, ], } ] mock_rpc_channel.send_command.side_effect = [ ROOM_MAPPING_DATA_MAP_0, # Rooms for the single map ] mock_mqtt_rpc_channel.send_command.side_effect = [ single_map_data, # Single map list ] await home_trait.discover_home() # Verify cache is populated assert home_trait.home_cache is not None assert len(home_trait.home_cache) == 1 assert 0 in home_trait.home_cache # Verify no LOAD_MULTI_MAP commands were sent (no map switching) load_map_calls = [ call for call in mock_mqtt_rpc_channel.send_command.call_args_list if call[1].get("command") == RoborockCommand.LOAD_MULTI_MAP ] assert len(load_map_calls) == 0 Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/test_map_content.py000066400000000000000000000025621507503702500307340ustar00rootroot00000000000000"""Tests for the MapContentTrait.""" from unittest.mock import AsyncMock, MagicMock, patch import pytest from roborock.devices.device import RoborockDevice from roborock.devices.traits.v1.map_content import MapContentTrait from roborock.map.map_parser import ParsedMapData from roborock.roborock_typing import RoborockCommand @pytest.fixture def map_content_trait(device: RoborockDevice) -> MapContentTrait: """Create a MapContentTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.map_content async def test_refresh_map_content_trait( map_content_trait: MapContentTrait, mock_map_rpc_channel: AsyncMock, ) -> None: """Test successfully getting and parsing map content.""" map_data = b"dummy_map_bytes" mock_map_rpc_channel.send_command.return_value = map_data mock_parsed_data = ParsedMapData( image_content=b"dummy_image_content", map_data=MagicMock(), ) with patch("roborock.devices.traits.v1.map_content.MapParser.parse", return_value=mock_parsed_data) as mock_parse: await map_content_trait.refresh() mock_parse.assert_called_once_with(map_data) assert map_content_trait.image_content == b"dummy_image_content" assert map_content_trait.map_data is not None mock_map_rpc_channel.send_command.assert_called_once_with(RoborockCommand.GET_MAP_V1) Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/test_maps.py000066400000000000000000000121621507503702500273620ustar00rootroot00000000000000"""Tests for the Maps related functionality.""" from unittest.mock import AsyncMock import pytest from roborock.devices.device import RoborockDevice from roborock.devices.traits.v1.maps import MapsTrait from roborock.devices.traits.v1.status import StatusTrait from roborock.roborock_typing import RoborockCommand from tests import mock_data UPDATED_STATUS = { **mock_data.STATUS, "map_status": 123 * 4 + 3, # Set current map to 123 } MULTI_MAP_LIST_DATA = [ { "max_multi_map": 1, "max_bak_map": 1, "multi_map_count": 1, "map_info": [ { "mapFlag": 0, "add_time": 1747132930, "length": 0, "name": "Map 1", "bak_maps": [{"mapFlag": 4, "add_time": 1747132936}], }, { "mapFlag": 123, "add_time": 1747132930, "length": 0, "name": "Map 2", "bak_maps": [{"mapFlag": 4, "add_time": 1747132936}], }, ], } ] @pytest.fixture def status_trait(device: RoborockDevice) -> StatusTrait: """Create a MapsTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.status @pytest.fixture def maps_trait(device: RoborockDevice) -> MapsTrait: """Create a MapsTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.maps async def test_refresh_maps_trait( maps_trait: MapsTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, status_trait: StatusTrait, ) -> None: """Test successfully getting multi maps list.""" # Setup mock to return the sample multi maps list mock_rpc_channel.send_command.side_effect = [ mock_data.STATUS, # Initial status fetch ] mock_mqtt_rpc_channel.send_command.side_effect = [ MULTI_MAP_LIST_DATA, ] await status_trait.refresh() assert status_trait.current_map == 0 # Populating the status information gives us the current map # flag, but we have not loaded the rest of the information. assert maps_trait.current_map == 0 assert maps_trait.current_map_info is None # Load the maps information await maps_trait.refresh() assert maps_trait.max_multi_map == 1 assert maps_trait.max_bak_map == 1 assert maps_trait.multi_map_count == 1 assert maps_trait.map_info assert len(maps_trait.map_info) == 2 map_infos = maps_trait.map_info assert len(map_infos) == 2 assert map_infos[0].map_flag == 0 assert map_infos[0].name == "Map 1" assert map_infos[0].add_time == 1747132930 assert map_infos[1].map_flag == 123 assert map_infos[1].name == "Map 2" assert map_infos[1].add_time == 1747132930 assert maps_trait.current_map == 0 assert maps_trait.current_map_info is not None assert maps_trait.current_map_info.map_flag == 0 assert maps_trait.current_map_info.name == "Map 1" # Verify the RPC call was made correctly assert mock_rpc_channel.send_command.call_count == 1 mock_rpc_channel.send_command.assert_any_call(RoborockCommand.GET_STATUS) assert mock_mqtt_rpc_channel.send_command.call_count == 1 mock_mqtt_rpc_channel.send_command.assert_any_call(RoborockCommand.GET_MULTI_MAPS_LIST) async def test_set_current_map( status_trait: StatusTrait, maps_trait: MapsTrait, mock_rpc_channel: AsyncMock, mock_mqtt_rpc_channel: AsyncMock, ) -> None: """Test successfully setting the current map.""" assert hasattr(maps_trait, "mqtt_rpc_channel") mock_rpc_channel.send_command.side_effect = [ mock_data.STATUS, # Initial status fetch UPDATED_STATUS, # Response for refreshing status ] mock_mqtt_rpc_channel.send_command.side_effect = [ MULTI_MAP_LIST_DATA, # Response for LOAD_MULTI_MAP {}, # Response for setting the current map ] await status_trait.refresh() # First refresh to populate initial state await maps_trait.refresh() # Verify current map assert maps_trait.current_map == 0 assert maps_trait.current_map_info assert maps_trait.current_map_info.map_flag == 0 assert maps_trait.current_map_info.name == "Map 1" # Call the method to set current map await maps_trait.set_current_map(123) # Verify the current map is updated assert maps_trait.current_map == 123 assert maps_trait.current_map_info assert maps_trait.current_map_info.map_flag == 123 assert maps_trait.current_map_info.name == "Map 2" # Verify the command sent are: # 1. GET_STATUS to get initial status # 2. GET_MULTI_MAPS_LIST to get the map list # 3. LOAD_MULTI_MAP to set the map # 4. GET_STATUS to refresh the current map in status assert mock_rpc_channel.send_command.call_count == 2 mock_rpc_channel.send_command.assert_any_call(RoborockCommand.GET_STATUS) assert mock_mqtt_rpc_channel.send_command.call_count == 2 mock_mqtt_rpc_channel.send_command.assert_any_call(RoborockCommand.GET_MULTI_MAPS_LIST) mock_mqtt_rpc_channel.send_command.assert_any_call(RoborockCommand.LOAD_MULTI_MAP, params=[123]) Python-roborock-python-roborock-32df4f3/tests/devices/traits/v1/test_rooms.py000066400000000000000000000044241507503702500275630ustar00rootroot00000000000000"""Tests for the RoomMapping related functionality.""" from typing import Any from unittest.mock import AsyncMock import pytest from roborock.devices.device import RoborockDevice from roborock.devices.traits.v1.rooms import RoomsTrait from roborock.devices.traits.v1.status import StatusTrait from roborock.roborock_typing import RoborockCommand @pytest.fixture def status_trait(device: RoborockDevice) -> StatusTrait: """Create a StatusTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.status @pytest.fixture def rooms_trait(device: RoborockDevice) -> RoomsTrait: """Create a RoomsTrait instance with mocked dependencies.""" assert device.v1_properties return device.v1_properties.rooms # Rooms from mock_data.HOME_DATA # {"id": 2362048, "name": "Example room 1"}, # {"id": 2362044, "name": "Example room 2"}, # {"id": 2362041, "name": "Example room 3"}, @pytest.mark.parametrize( ("room_mapping_data"), [ ([[16, "2362048"], [17, "2362044"], [18, "2362041"]]), ([[16, "2362048", 6], [17, "2362044", 14], [18, "2362041", 13]]), ], ) async def test_refresh_rooms_trait( rooms_trait: RoomsTrait, mock_rpc_channel: AsyncMock, room_mapping_data: list[Any], ) -> None: """Test successfully getting room mapping.""" # Setup mock to return the sample room mapping mock_rpc_channel.send_command.side_effect = [room_mapping_data] # Before refresh, rooms should be empty assert not rooms_trait.rooms # Load the room mapping information refreshed_trait = await rooms_trait.refresh() # Verify the room mappings are now populated assert refreshed_trait.rooms rooms = refreshed_trait.rooms assert len(rooms) == 3 assert rooms[0].segment_id == 16 assert rooms[0].name == "Example room 1" assert rooms[0].iot_id == "2362048" assert rooms[1].segment_id == 17 assert rooms[1].name == "Example room 2" assert rooms[1].iot_id == "2362044" assert rooms[2].segment_id == 18 assert rooms[2].name == "Example room 3" assert rooms[2].iot_id == "2362041" # Verify the RPC call was made correctly assert mock_rpc_channel.send_command.call_count == 1 mock_rpc_channel.send_command.assert_any_call(RoborockCommand.GET_ROOM_MAPPING) Python-roborock-python-roborock-32df4f3/tests/map/000077500000000000000000000000001507503702500223065ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/map/test_map_parser.py000066400000000000000000000013011507503702500260430ustar00rootroot00000000000000"""Tests for the map parser.""" from pathlib import Path import pytest from roborock.exceptions import RoborockException from roborock.map.map_parser import MapParser, MapParserConfig MAP_DATA_FILE = Path(__file__).parent / "raw_map_data" DEFAULT_MAP_CONFIG = MapParserConfig() @pytest.mark.parametrize("map_content", [b"", b"12345"]) def test_invalid_map_content(map_content: bytes): """Test that parsing map data returns the expected image and data.""" parser = MapParser(DEFAULT_MAP_CONFIG) with pytest.raises(RoborockException, match="Failed to parse map data"): parser.parse(map_content) # We can add additional tests here in the future that actually parse valid map data Python-roborock-python-roborock-32df4f3/tests/mock_data.py000066400000000000000000000563031507503702500240340ustar00rootroot00000000000000"""Mock data for Roborock tests.""" import hashlib import json # All data is based on a U.S. customer with a Roborock S7 MaxV Ultra USER_EMAIL = "user@domain.com" BASE_URL = "https://usiot.roborock.com" USER_ID = "user123" K_VALUE = "qiCNieZa" USER_DATA = { "uid": 123456, "tokentype": "token_type", "token": "abc123", "rruid": "abc123", "region": "us", "countrycode": "1", "country": "US", "nickname": "user_nickname", "rriot": { "u": USER_ID, "s": "pass123", "h": "unknown123", "k": K_VALUE, "r": { "r": "US", "a": "https://api-us.roborock.com", "m": "tcp://mqtt-us.roborock.com:8883", # Skip SSL code in MQTT client library "l": "https://wood-us.roborock.com", }, }, "tuyaDeviceState": 2, "avatarurl": "https://files.roborock.com/iottest/default_avatar.png", } LOCAL_KEY = "key123key123key1" # 16 bytes / 128 bits PRODUCT_ID = "product-id-123" HOME_DATA_SCENES_RAW = [ { "id": 1234567, "name": "My plan", "param": json.dumps( { "triggers": [], "action": { "type": "S", "items": [ { "id": 5, "type": "CMD", "name": "", "entityId": "EEEEEEEEEEEEEE", "param": json.dumps( { "id": 5, "method": "do_scenes_app_start", "params": [ { "fan_power": 104, "water_box_mode": 200, "mop_mode": 300, "mop_template_id": 300, "repeat": 1, "auto_dustCollection": 1, "source": 101, } ], } ), "finishDpIds": [130], }, { "id": 4, "type": "CMD", "name": "", "entityId": "EEEEEEEEEEEEEE", "param": json.dumps( { "id": 4, "method": "do_scenes_segments", "params": { "data": [ { "tid": "111111111111111111", "segs": [ {"sid": 19}, {"sid": 18}, {"sid": 22}, {"sid": 21}, {"sid": 16}, ], "map_flag": 0, "fan_power": 105, "water_box_mode": 201, "mop_mode": 300, "mop_template_id": 300, "repeat": 1, "clean_order_mode": 1, "auto_dry": 1, "auto_dustCollection": 1, "region_num": 0, } ], "source": 101, }, } ), "finishDpIds": [130], }, ], }, "matchType": "NONE", "tagId": "4444", } ), "enabled": True, "extra": None, "type": "WORKFLOW", } ] HOME_DATA_RAW = { "id": 123456, "name": "My Home", "lon": None, "lat": None, "geoName": None, "products": [ { "id": PRODUCT_ID, "name": "Roborock S7 MaxV", "code": "a27", "model": "roborock.vacuum.a27", "iconUrl": None, "attribute": None, "capability": 0, "category": "robot.vacuum.cleaner", "schema": [ { "id": "101", "name": "rpc_request", "code": "rpc_request_code", "mode": "rw", "type": "RAW", "property": None, "desc": None, }, { "id": "102", "name": "rpc_response", "code": "rpc_response", "mode": "rw", "type": "RAW", "property": None, "desc": None, }, { "id": "120", "name": "错误代码", "code": "error_code", "mode": "ro", "type": "ENUM", "property": '{"range": []}', "desc": None, }, { "id": "121", "name": "设备状态", "code": "state", "mode": "ro", "type": "ENUM", "property": '{"range": []}', "desc": None, }, { "id": "122", "name": "设备电量", "code": "battery", "mode": "ro", "type": "ENUM", "property": '{"range": []}', "desc": None, }, { "id": "123", "name": "清扫模式", "code": "fan_power", "mode": "rw", "type": "ENUM", "property": '{"range": []}', "desc": None, }, { "id": "124", "name": "拖地模式", "code": "water_box_mode", "mode": "rw", "type": "ENUM", "property": '{"range": []}', "desc": None, }, { "id": "125", "name": "主刷寿命", "code": "main_brush_life", "mode": "rw", "type": "VALUE", "property": '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', "desc": None, }, { "id": "126", "name": "边刷寿命", "code": "side_brush_life", "mode": "rw", "type": "VALUE", "property": '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', "desc": None, }, { "id": "127", "name": "滤网寿命", "code": "filter_life", "mode": "rw", "type": "VALUE", "property": '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', "desc": None, }, { "id": "128", "name": "额外状态", "code": "additional_props", "mode": "ro", "type": "RAW", "property": None, "desc": None, }, { "id": "130", "name": "完成事件", "code": "task_complete", "mode": "ro", "type": "RAW", "property": None, "desc": None, }, { "id": "131", "name": "电量不足任务取消", "code": "task_cancel_low_power", "mode": "ro", "type": "RAW", "property": None, "desc": None, }, { "id": "132", "name": "运动中任务取消", "code": "task_cancel_in_motion", "mode": "ro", "type": "RAW", "property": None, "desc": None, }, { "id": "133", "name": "充电状态", "code": "charge_status", "mode": "ro", "type": "RAW", "property": None, "desc": None, }, { "id": "134", "name": "烘干状态", "code": "drying_status", "mode": "ro", "type": "RAW", "property": None, "desc": None, }, ], } ], "devices": [ { "duid": "abc123", "name": "Roborock S7 MaxV", "attribute": None, "activeTime": 1672364449, "localKey": LOCAL_KEY, "runtimeEnv": None, "timeZoneId": "America/Los_Angeles", "iconUrl": "no_url", "productId": PRODUCT_ID, "lon": None, "lat": None, "share": False, "shareTime": None, "online": True, "fv": "02.56.02", "pv": "1.0", "roomId": 2362003, "tuyaUuid": None, "tuyaMigrated": False, "extra": '{"RRPhotoPrivacyVersion": "1"}', "sn": "abc123", "featureSet": "2234201184108543", "newFeatureSet": "0000000000002041", "deviceStatus": { "121": 8, "122": 100, "123": 102, "124": 203, "125": 94, "126": 90, "127": 87, "128": 0, "133": 1, "120": 0, }, "silentOtaSwitch": True, } ], "receivedDevices": [], "rooms": [ {"id": 2362048, "name": "Example room 1"}, {"id": 2362044, "name": "Example room 2"}, {"id": 2362041, "name": "Example room 3"}, ], } WASHER_PRODUCT = { "id": PRODUCT_ID, "name": "Zeo One", "model": "roborock.wm.a102", "category": "roborock.wm", "capability": 2, "schema": [ { "id": "134", "name": "烘干状态", "code": "drying_status", "mode": "ro", "type": "RAW", }, { "id": "200", "name": "启动", "code": "start", "mode": "rw", "type": "BOOL", }, { "id": "201", "name": "暂停", "code": "pause", "mode": "rw", "type": "BOOL", }, { "id": "202", "name": "关机", "code": "shutdown", "mode": "rw", "type": "BOOL", }, { "id": "203", "name": "状态", "code": "status", "mode": "ro", "type": "VALUE", }, { "id": "204", "name": "模式", "code": "mode", "mode": "rw", "type": "VALUE", }, { "id": "205", "name": "程序", "code": "program", "mode": "rw", "type": "VALUE", }, { "id": "206", "name": "童锁", "code": "child_lock", "mode": "rw", "type": "BOOL", }, { "id": "207", "name": "洗涤温度", "code": "temp", "mode": "rw", "type": "VALUE", }, { "id": "208", "name": "漂洗次数", "code": "rinse_times", "mode": "rw", "type": "VALUE", }, { "id": "209", "name": "滚筒转速", "code": "spin_level", "mode": "rw", "type": "VALUE", }, { "id": "210", "name": "干燥度", "code": "drying_mode", "mode": "rw", "type": "VALUE", }, { "id": "211", "name": "自动投放-洗衣液", "code": "detergent_set", "mode": "rw", "type": "BOOL", }, { "id": "212", "name": "自动投放-柔顺剂", "code": "softener_set", "mode": "rw", "type": "BOOL", }, { "id": "213", "name": "洗衣液投放量", "code": "detergent_type", "mode": "rw", "type": "VALUE", }, { "id": "214", "name": "柔顺剂投放量", "code": "softener_type", "mode": "rw", "type": "VALUE", }, { "id": "217", "name": "预约时间", "code": "countdown", "mode": "rw", "type": "VALUE", }, { "id": "218", "name": "洗衣剩余时间", "code": "washing_left", "mode": "ro", "type": "VALUE", }, { "id": "219", "name": "门锁状态", "code": "doorlock_state", "mode": "ro", "type": "BOOL", }, { "id": "220", "name": "故障", "code": "error", "mode": "ro", "type": "VALUE", }, { "id": "221", "name": "云程序设置", "code": "custom_param_save", "mode": "rw", "type": "VALUE", }, { "id": "222", "name": "云程序读取", "code": "custom_param_get", "mode": "ro", "type": "VALUE", }, { "id": "223", "name": "提示音", "code": "sound_set", "mode": "rw", "type": "BOOL", }, { "id": "224", "name": "距离上次筒自洁次数", "code": "times_after_clean", "mode": "ro", "type": "VALUE", }, { "id": "225", "name": "记忆洗衣偏好开关", "code": "default_setting", "mode": "rw", "type": "BOOL", }, { "id": "226", "name": "洗衣液用尽", "code": "detergent_empty", "mode": "ro", "type": "BOOL", }, { "id": "227", "name": "柔顺剂用尽", "code": "softener_empty", "mode": "ro", "type": "BOOL", }, { "id": "229", "name": "筒灯设定", "code": "light_setting", "mode": "rw", "type": "BOOL", }, { "id": "230", "name": "洗衣液投放量(单次)", "code": "detergent_volume", "mode": "rw", "type": "VALUE", }, { "id": "231", "name": "柔顺剂投放量(单次)", "code": "softener_volume", "mode": "rw", "type": "VALUE", }, { "id": "232", "name": "远程控制授权", "code": "app_authorization", "mode": "rw", "type": "VALUE", }, { "id": "10000", "name": "ID点查询", "code": "id_query", "mode": "rw", "type": "STRING", }, { "id": "10001", "name": "防串货", "code": "f_c", "mode": "ro", "type": "STRING", }, { "id": "10004", "name": "语音包/OBA信息", "code": "snd_state", "mode": "rw", "type": "STRING", }, { "id": "10005", "name": "产品信息", "code": "product_info", "mode": "ro", "type": "STRING", }, { "id": "10006", "name": "隐私协议", "code": "privacy_info", "mode": "rw", "type": "STRING", }, { "id": "10007", "name": "OTA info", "code": "ota_nfo", "mode": "rw", "type": "STRING", }, { "id": "10008", "name": "洗衣记录", "code": "washing_log", "mode": "ro", "type": "BOOL", }, { "id": "10101", "name": "rpc req", "code": "rpc_req", "mode": "wo", "type": "STRING", }, { "id": "10102", "name": "rpc resp", "code": "rpc_resp", "mode": "ro", "type": "STRING", }, ], } ZEO_ONE_DEVICE = { "duid": "zeo_duid", "name": "Zeo One", "localKey": LOCAL_KEY, "fv": "01.00.94", "productId": PRODUCT_ID, "activeTime": 1699964128, "timeZoneId": "Europe/Berlin", "iconUrl": "", "share": True, "shareTime": 1712763572, "online": True, "pv": "A01", "tuyaMigrated": False, "sn": "zeo_sn", "featureSet": "0", "newFeatureSet": "40", "deviceStatus": { "208": 2, "205": 33, "221": 0, "226": 0, "10001": '{"f":"t"}', "214": 2, "225": 0, "232": 0, "222": 347414, "206": 0, "200": 1, "219": 0, "223": 0, "220": 0, "201": 0, "202": 1, "10005": '{"sn":"zeo_sn","ssid":"internet","timezone":"Europe/Berlin","posix_timezone":"CET-1CEST,M3.5.0,M10.5.0/3","ip":"192.111.11.11","mac":"b0:4a:00:00:00:00","rssi":-57,"oba":{"language":"en","name":"A.03.0403_CE","bom":"A.03.0403","location":"de","wifiplan":"EU","timezone":"CET-1CEST,M3.5.0,M10.5.0/3;Europe/Berlin","logserver":"awsde0","loglevel":"4","featureset":"0"}}', # noqa: E501 "211": 1, "210": 1, "217": 0, "203": 7, "213": 2, "209": 7, "224": 21, "218": 227, "212": 1, "207": 4, "204": 1, "10007": '{"mqttOtaData":{"mqttOtaStatus":{"status":"IDLE"}}}', "227": 1, }, "silentOtaSwitch": False, "f": False, } CLEAN_RECORD = { "begin": 1672543330, "end": 1672544638, "duration": 1176, "area": 20965000, "error": 0, "complete": 1, "start_type": 2, "clean_type": 3, "finish_reason": 56, "dust_collection_status": 1, "avoid_count": 19, "wash_count": 2, "map_flag": 0, } CLEAN_SUMMARY = { "clean_time": 74382, "clean_area": 1159182500, "clean_count": 31, "dust_collection_count": 25, "records": [ 1672543330, 1672458041, ], } CONSUMABLE = { "main_brush_work_time": 74382, "side_brush_work_time": 74383, "filter_work_time": 74384, "filter_element_work_time": 0, "sensor_dirty_time": 74385, "strainer_work_times": 65, "dust_collection_work_times": 25, "cleaning_brush_work_times": 66, } DND_TIMER = { "start_hour": 22, "start_minute": 0, "end_hour": 7, "end_minute": 0, "enabled": 1, } STATUS = { "msg_ver": 2, "msg_seq": 458, "state": 8, "battery": 100, "clean_time": 1176, "clean_area": 20965000, "error_code": 0, "map_present": 1, "in_cleaning": 0, "in_returning": 0, "in_fresh_state": 1, "lab_status": 1, "water_box_status": 1, "back_type": -1, "wash_phase": 0, "wash_ready": 0, "fan_power": 102, "dnd_enabled": 0, "map_status": 3, "is_locating": 0, "lock_status": 0, "water_box_mode": 203, "water_box_carriage_status": 1, "mop_forbidden_enable": 1, "camera_status": 3457, "is_exploring": 0, "home_sec_status": 0, "home_sec_enable_password": 0, "adbumper_status": [0, 0, 0], "water_shortage_status": 0, "dock_type": 3, "dust_collection_status": 0, "auto_dust_collection": 1, "avoid_count": 19, "mop_mode": 300, "debug_mode": 0, "collision_avoid_status": 1, "switch_map_mode": 0, "dock_error_status": 0, "charge_status": 1, "unsave_map_reason": 0, "unsave_map_flag": 0, } BASE_URL_REQUEST = { "code": 200, "msg": "success", "data": {"url": "https://sample.com", "countrycode": 1, "country": "US"}, } GET_CODE_RESPONSE = {"code": 200, "msg": "success", "data": None} HASHED_USER = hashlib.md5((USER_ID + ":" + K_VALUE).encode()).hexdigest()[2:10] MQTT_PUBLISH_TOPIC = f"rr/m/o/{USER_ID}/{HASHED_USER}/{PRODUCT_ID}" TEST_LOCAL_API_HOST = "1.1.1.1" NETWORK_INFO = { "ip": TEST_LOCAL_API_HOST, "ssid": "test_wifi", "mac": "aa:bb:cc:dd:ee:ff", "bssid": "aa:bb:cc:dd:ee:ff", "rssi": -50, } APP_GET_INIT_STATUS = { "local_info": { "name": "custom_A.03.0069_FCC", "bom": "A.03.0069", "location": "us", "language": "en", "wifiplan": "0x39", "timezone": "US/Pacific", "logserver": "awsusor0.fds.api.xiaomi.com", "featureset": 1, }, "feature_info": [111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 122, 123, 124, 125], "new_feature_info": 633887780925447, "new_feature_info2": 8192, "new_feature_info_str": "0000000000002000", "status_info": { "state": 8, "battery": 100, "clean_time": 5610, "clean_area": 96490000, "error_code": 0, "in_cleaning": 0, "in_returning": 0, "in_fresh_state": 1, "lab_status": 1, "water_box_status": 0, "map_status": 3, "is_locating": 0, "lock_status": 0, "water_box_mode": 204, "distance_off": 0, "water_box_carriage_status": 0, "mop_forbidden_enable": 0, }, } Python-roborock-python-roborock-32df4f3/tests/mqtt/000077500000000000000000000000001507503702500225165ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/mqtt/test_roborock_session.py000066400000000000000000000164561507503702500275260ustar00rootroot00000000000000"""Tests for the MQTT session module.""" import asyncio from collections.abc import AsyncGenerator, Callable from queue import Queue from typing import Any from unittest.mock import AsyncMock, Mock, patch import aiomqtt import paho.mqtt.client as mqtt import pytest from roborock.mqtt.roborock_session import create_mqtt_session from roborock.mqtt.session import MqttParams, MqttSessionException from tests import mqtt_packet from tests.conftest import FakeSocketHandler # We mock out the connection so these params are not used/verified FAKE_PARAMS = MqttParams( host="localhost", port=1883, tls=False, username="username", password="password", timeout=10.0, ) @pytest.fixture(autouse=True) def mqtt_server_fixture(mock_create_connection: None, mock_select: None) -> None: """Fixture to prepare a fake MQTT server.""" @pytest.fixture(autouse=True) async def mock_client_fixture() -> AsyncGenerator[None, None]: """Fixture to patch the MQTT underlying sync client. The tests use fake sockets, so this ensures that the async mqtt client does not attempt to listen on them directly. We instead just poll the socket for data ourselves. """ event_loop = asyncio.get_running_loop() orig_class = mqtt.Client async def poll_sockets(client: mqtt.Client) -> None: """Poll the mqtt client sockets in a loop to pick up new data.""" while True: event_loop.call_soon_threadsafe(client.loop_read) event_loop.call_soon_threadsafe(client.loop_write) await asyncio.sleep(0.1) task: asyncio.Task[None] | None = None def new_client(*args: Any, **kwargs: Any) -> mqtt.Client: """Create a new mqtt client and start the socket polling task.""" nonlocal task client = orig_class(*args, **kwargs) task = event_loop.create_task(poll_sockets(client)) return client with ( patch("aiomqtt.client.Client._on_socket_open"), patch("aiomqtt.client.Client._on_socket_close"), patch("aiomqtt.client.Client._on_socket_register_write"), patch("aiomqtt.client.Client._on_socket_unregister_write"), patch("aiomqtt.client.mqtt.Client", side_effect=new_client), ): yield if task: task.cancel() @pytest.fixture def push_response(response_queue: Queue, fake_socket_handler: FakeSocketHandler) -> Callable[[bytes], None]: """Fixtures to push messages.""" def push(message: bytes) -> None: response_queue.put(message) fake_socket_handler.push_response() return push class Subscriber: """Mock subscriber class. This will capture messages published on the session so the tests can verify they were received. """ def __init__(self) -> None: """Initialize the subscriber.""" self.messages: list[bytes] = [] self.event: asyncio.Event = asyncio.Event() def append(self, message: bytes) -> None: """Append a message to the subscriber.""" self.messages.append(message) self.event.set() async def wait(self) -> None: """Wait for a message to be received.""" await self.event.wait() self.event.clear() async def test_session(push_response: Callable[[bytes], None]) -> None: """Test the MQTT session.""" push_response(mqtt_packet.gen_connack(rc=0, flags=2)) session = await create_mqtt_session(FAKE_PARAMS) assert session.connected push_response(mqtt_packet.gen_suback(mid=1)) subscriber1 = Subscriber() unsub1 = await session.subscribe("topic-1", subscriber1.append) push_response(mqtt_packet.gen_suback(mid=2)) subscriber2 = Subscriber() await session.subscribe("topic-2", subscriber2.append) push_response(mqtt_packet.gen_publish("topic-1", mid=3, payload=b"12345")) await subscriber1.wait() assert subscriber1.messages == [b"12345"] assert not subscriber2.messages push_response(mqtt_packet.gen_publish("topic-2", mid=4, payload=b"67890")) await subscriber2.wait() assert subscriber2.messages == [b"67890"] push_response(mqtt_packet.gen_publish("topic-1", mid=5, payload=b"ABC")) await subscriber1.wait() assert subscriber1.messages == [b"12345", b"ABC"] assert subscriber2.messages == [b"67890"] # Messages are no longer received after unsubscribing unsub1() push_response(mqtt_packet.gen_publish("topic-1", payload=b"ignored")) assert subscriber1.messages == [b"12345", b"ABC"] assert session.connected await session.close() assert not session.connected async def test_session_no_subscribers(push_response: Callable[[bytes], None]) -> None: """Test the MQTT session.""" push_response(mqtt_packet.gen_connack(rc=0, flags=2)) push_response(mqtt_packet.gen_publish("topic-1", mid=3, payload=b"12345")) push_response(mqtt_packet.gen_publish("topic-2", mid=4, payload=b"67890")) session = await create_mqtt_session(FAKE_PARAMS) assert session.connected await session.close() assert not session.connected async def test_publish_command(push_response: Callable[[bytes], None]) -> None: """Test publishing during an MQTT session.""" push_response(mqtt_packet.gen_connack(rc=0, flags=2)) session = await create_mqtt_session(FAKE_PARAMS) push_response(mqtt_packet.gen_publish("topic-1", mid=3, payload=b"12345")) await session.publish("topic-1", message=b"payload") assert session.connected await session.close() assert not session.connected class FakeAsyncIterator: """Fake async iterator that waits for messages to arrive, but they never do. This is used for testing exceptions in other client functions. """ def __aiter__(self): return self async def __anext__(self) -> None: """Iterator that does not generate any messages.""" while True: await asyncio.sleep(1) async def test_publish_failure() -> None: """Test an MQTT error is received when publishing a message.""" mock_client = AsyncMock() mock_client.messages = FakeAsyncIterator() mock_aenter = AsyncMock() mock_aenter.return_value = mock_client with patch("roborock.mqtt.roborock_session.aiomqtt.Client.__aenter__", mock_aenter): session = await create_mqtt_session(FAKE_PARAMS) assert session.connected mock_client.publish.side_effect = aiomqtt.MqttError with pytest.raises(MqttSessionException, match="Error publishing message"): await session.publish("topic-1", message=b"payload") async def test_subscribe_failure() -> None: """Test an MQTT error while subscribing.""" mock_client = AsyncMock() mock_client.messages = FakeAsyncIterator() mock_aenter = AsyncMock() mock_aenter.return_value = mock_client mock_shim = Mock() mock_shim.return_value.__aenter__ = mock_aenter mock_shim.return_value.__aexit__ = AsyncMock() with patch("roborock.mqtt.roborock_session.aiomqtt.Client", mock_shim): session = await create_mqtt_session(FAKE_PARAMS) assert session.connected mock_client.subscribe.side_effect = aiomqtt.MqttError subscriber1 = Subscriber() with pytest.raises(MqttSessionException, match="Error subscribing to topic"): await session.subscribe("topic-1", subscriber1.append) assert not subscriber1.messages await session.close() Python-roborock-python-roborock-32df4f3/tests/mqtt_packet.py000066400000000000000000000072111507503702500244200ustar00rootroot00000000000000"""Module for crafting MQTT packets. This library is copied from the paho mqtt client library tests, with just the parts needed for some roborock messages. This message format in this file is not specific to roborock. """ import struct PROP_RECEIVE_MAXIMUM = 33 PROP_TOPIC_ALIAS_MAXIMUM = 34 def gen_uint16_prop(identifier: int, word: int) -> bytes: """Generate a property with a uint16_t value.""" prop = struct.pack("!BH", identifier, word) return prop def pack_varint(varint: int) -> bytes: """Pack a variable integer.""" s = b"" while True: byte = varint % 128 varint = varint // 128 # If there are more digits to encode, set the top bit of this digit if varint > 0: byte = byte | 0x80 s = s + struct.pack("!B", byte) if varint == 0: return s def prop_finalise(props: bytes) -> bytes: """Finalise the properties.""" if props is None: return pack_varint(0) else: return pack_varint(len(props)) + props def gen_connack(flags=0, rc=0, properties=b"", property_helper=True): """Generate a CONNACK packet.""" if property_helper: if properties is not None: properties = ( gen_uint16_prop(PROP_TOPIC_ALIAS_MAXIMUM, 10) + properties + gen_uint16_prop(PROP_RECEIVE_MAXIMUM, 20) ) else: properties = b"" properties = prop_finalise(properties) packet = struct.pack("!BBBB", 32, 2 + len(properties), flags, rc) + properties return packet def gen_suback(mid: int, qos: int = 0) -> bytes: """Generate a SUBACK packet.""" return struct.pack("!BBHBB", 144, 2 + 1 + 1, mid, 0, qos) def _gen_short(cmd: int, reason_code: int) -> bytes: return struct.pack("!BBB", cmd, 1, reason_code) def gen_disconnect(reason_code: int = 0) -> bytes: """Generate a DISCONNECT packet.""" return _gen_short(0xE0, reason_code) def _gen_command_with_mid(cmd: int, mid: int, reason_code: int = 0) -> bytes: return struct.pack("!BBHB", cmd, 3, mid, reason_code) def gen_puback(mid: int, reason_code: int = 0) -> bytes: """Generate a PUBACK packet.""" return _gen_command_with_mid(64, mid, reason_code) def _pack_remaining_length(remaining_length: int) -> bytes: """Pack a remaining length.""" s = b"" while True: byte = remaining_length % 128 remaining_length = remaining_length // 128 # If there are more digits to encode, set the top bit of this digit if remaining_length > 0: byte = byte | 0x80 s = s + struct.pack("!B", byte) if remaining_length == 0: return s def gen_publish( topic: str, payload: bytes | None = None, retain: bool = False, dup: bool = False, mid: int = 0, properties: bytes = b"", ) -> bytes: """Generate a PUBLISH packet.""" if isinstance(topic, str): topic_b = topic.encode("utf-8") rl = 2 + len(topic_b) pack_format = "H" + str(len(topic_b)) + "s" properties = prop_finalise(properties) rl += len(properties) # This will break if len(properties) > 127 pack_format = f"{pack_format}{len(properties)}s" if payload is not None: # payload = payload.encode("utf-8") rl = rl + len(payload) pack_format = pack_format + str(len(payload)) + "s" else: payload = b"" pack_format = pack_format + "0s" rlpacked = _pack_remaining_length(rl) cmd = 48 if retain: cmd = cmd + 1 if dup: cmd = cmd + 8 return struct.pack( "!B" + str(len(rlpacked)) + "s" + pack_format, cmd, rlpacked, len(topic_b), topic_b, properties, payload ) Python-roborock-python-roborock-32df4f3/tests/protocols/000077500000000000000000000000001507503702500235555ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/protocols/__init__.py000066400000000000000000000000471507503702500256670ustar00rootroot00000000000000"""Tests for the protocols package.""" Python-roborock-python-roborock-32df4f3/tests/protocols/__snapshots__/000077500000000000000000000000001507503702500263735ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/protocols/__snapshots__/test_v1_protocol.ambr000066400000000000000000000110031507503702500325370ustar00rootroot00000000000000# serializer version: 1 # name: test_decode_rpc_payload[app_get_init_status] 20001 # --- # name: test_decode_rpc_payload[app_get_init_status].1 ''' [ { "local_info": { "name": "custom_A.03.0069_FCC", "bom": "A.03.0069", "location": "us", "language": "en", "wifiplan": "0x39", "timezone": "US/Pacific", "logserver": "awsusor0.fds.api.xiaomi.com", "featureset": 1 }, "feature_info": [ 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 122, 123, 124, 125 ], "new_feature_info": 633887780925447, "new_feature_info2": 8192, "new_feature_info_str": "0000000000002000", "status_info": { "state": 8, "battery": 100, "clean_time": 5610, "clean_area": 96490000, "error_code": 0, "in_cleaning": 0, "in_returning": 0, "in_fresh_state": 1, "lab_status": 1, "water_box_status": 0, "map_status": 3, "is_locating": 0, "lock_status": 0, "water_box_mode": 204, "distance_off": 0, "water_box_carriage_status": 0, "mop_forbidden_enable": 0 } } ] ''' # --- # name: test_decode_rpc_payload[get_clean_summary] 20001 # --- # name: test_decode_rpc_payload[get_clean_summary].1 ''' [ 1442559, 24258125000, 296, [ 1756848207, 1754930385, 1753203976, 1752183435, 1747427370, 1746204046, 1745601543, 1744387080, 1743528522, 1742489154, 1741022299, 1740433682, 1739902516, 1738875106, 1738864366, 1738620067, 1736873889, 1736197544, 1736121269, 1734458038 ] ] ''' # --- # name: test_decode_rpc_payload[get_consumeables] 20001 # --- # name: test_decode_rpc_payload[get_consumeables].1 ''' [ { "main_brush_work_time": 879348, "side_brush_work_time": 707618, "filter_work_time": 738722, "filter_element_work_time": 0, "sensor_dirty_time": 455517 } ] ''' # --- # name: test_decode_rpc_payload[get_dnd] 20002 # --- # name: test_decode_rpc_payload[get_dnd].1 ''' [ { "start_hour": 22, "start_minute": 0, "end_hour": 8, "end_minute": 0, "enabled": 1 } ] ''' # --- # name: test_decode_rpc_payload[get_multi_maps_list] 20001 # --- # name: test_decode_rpc_payload[get_multi_maps_list].1 ''' [ { "max_multi_map": 1, "max_bak_map": 1, "multi_map_count": 1, "map_info": [ { "mapFlag": 0, "add_time": 1747132930, "length": 0, "name": "", "bak_maps": [ { "mapFlag": 4, "add_time": 1747132936 } ] } ] } ] ''' # --- # name: test_decode_rpc_payload[get_room_mapping2] 20001 # --- # name: test_decode_rpc_payload[get_room_mapping2].1 ''' [ [ 16, "2537178", 6 ], [ 17, "2537175", 14 ], [ 18, "2537174", 13 ], [ 19, "2537176", 14 ], [ 20, "10655627", 12 ], [ 21, "2537145", 2 ], [ 22, "2537147", 12 ] ] ''' # --- # name: test_decode_rpc_payload[get_room_mapping] 20001 # --- # name: test_decode_rpc_payload[get_room_mapping].1 ''' [ [ 16, "3031886" ], [ 17, "3031880" ], [ 18, "3031883" ] ] ''' # --- # name: test_decode_rpc_payload[get_status] 20001 # --- # name: test_decode_rpc_payload[get_status].1 ''' [ { "msg_ver": 2, "msg_seq": 515, "state": 8, "battery": 100, "clean_time": 5405, "clean_area": 91287500, "error_code": 0, "map_present": 1, "in_cleaning": 0, "in_returning": 0, "in_fresh_state": 1, "lab_status": 1, "water_box_status": 0, "fan_power": 106, "dnd_enabled": 1, "map_status": 3, "is_locating": 0, "lock_status": 0, "water_box_mode": 204, "distance_off": 0, "water_box_carriage_status": 0, "mop_forbidden_enable": 0, "unsave_map_reason": 4, "unsave_map_flag": 0 } ] ''' # --- # name: test_decode_rpc_payload[get_volume] 20001 # --- # name: test_decode_rpc_payload[get_volume].1 ''' [ 90 ] ''' # --- Python-roborock-python-roborock-32df4f3/tests/protocols/test_a01_protocol.py000066400000000000000000000171461507503702500275010ustar00rootroot00000000000000"""Tests for A01 protocol encoding and decoding.""" import json from typing import Any import pytest from roborock.exceptions import RoborockException from roborock.protocols.a01_protocol import decode_rpc_response, encode_mqtt_payload from roborock.roborock_message import ( RoborockDyadDataProtocol, RoborockMessage, RoborockMessageProtocol, RoborockZeoProtocol, ) def test_encode_mqtt_payload_basic(): """Test basic MQTT payload encoding.""" # Test data with proper protocol keys data: dict[RoborockDyadDataProtocol | RoborockZeoProtocol, Any] = { RoborockDyadDataProtocol.START: {"test": "data", "number": 42} } result = encode_mqtt_payload(data) # Verify result is a RoborockMessage assert isinstance(result, RoborockMessage) assert result.protocol == RoborockMessageProtocol.RPC_REQUEST assert result.version == b"A01" assert result.payload is not None assert isinstance(result.payload, bytes) assert len(result.payload) % 16 == 0 # Should be padded to AES block size # Decode the payload to verify structure decoded_data = decode_rpc_response(result) assert decoded_data == {200: {"test": "data", "number": 42}} def test_encode_mqtt_payload_empty_data(): """Test encoding with empty data.""" data: dict[RoborockDyadDataProtocol | RoborockZeoProtocol, Any] = {} result = encode_mqtt_payload(data) assert isinstance(result, RoborockMessage) assert result.protocol == RoborockMessageProtocol.RPC_REQUEST assert result.payload is not None # Decode the payload to verify structure decoded_data = decode_rpc_response(result) assert decoded_data == {} def test_encode_mqtt_payload_complex_data(): """Test encoding with complex nested data.""" data: dict[RoborockDyadDataProtocol | RoborockZeoProtocol, Any] = { RoborockDyadDataProtocol.STATUS: { "nested": {"deep": {"value": 123}}, "list": [1, 2, 3, "test"], "boolean": True, "null": None, }, RoborockZeoProtocol.MODE: "simple_value", } result = encode_mqtt_payload(data) assert isinstance(result, RoborockMessage) assert result.protocol == RoborockMessageProtocol.RPC_REQUEST assert result.payload is not None assert isinstance(result.payload, bytes) # Decode the payload to verify structure decoded_data = decode_rpc_response(result) assert decoded_data == { 201: { "nested": {"deep": {"value": 123}}, "list": [1, 2, 3, "test"], "boolean": True, "null": None, }, 204: "simple_value", } def test_decode_rpc_response_valid_message(): """Test decoding a valid RPC response.""" # Create a valid padded JSON payload payload_data = {"dps": {"1": {"key": "value"}, "2": 42, "10": ["list", "data"]}} json_payload = json.dumps(payload_data).encode("utf-8") # Pad to AES block size (16 bytes) padding_length = 16 - (len(json_payload) % 16) padded_payload = json_payload + bytes([padding_length] * padding_length) message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=padded_payload) result = decode_rpc_response(message) assert isinstance(result, dict) assert 1 in result assert 2 in result assert 10 in result assert result[1] == {"key": "value"} assert result[2] == 42 assert result[10] == ["list", "data"] def test_decode_rpc_response_string_keys(): """Test decoding with string keys that can be converted to integers.""" payload_data = {"dps": {"1": "first", "100": "hundred", "999": {"nested": "data"}}} json_payload = json.dumps(payload_data).encode("utf-8") # Pad to AES block size padding_length = 16 - (len(json_payload) % 16) padded_payload = json_payload + bytes([padding_length] * padding_length) message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=padded_payload) result = decode_rpc_response(message) assert result[1] == "first" assert result[100] == "hundred" assert result[999] == {"nested": "data"} def test_decode_rpc_response_missing_payload(): """Test decoding fails when payload is missing.""" message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=None) with pytest.raises(RoborockException, match="Invalid A01 message format: missing payload"): decode_rpc_response(message) def test_decode_rpc_response_invalid_padding(): """Test decoding fails with invalid padding.""" # Create invalid padded data invalid_payload = b"invalid padding data" message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=invalid_payload) with pytest.raises(RoborockException, match="Unable to unpad A01 payload"): decode_rpc_response(message) def test_decode_rpc_response_invalid_json(): """Test decoding fails with invalid JSON after unpadding.""" # Create properly padded but invalid JSON invalid_json = b"invalid json data" padding_length = 16 - (len(invalid_json) % 16) padded_payload = invalid_json + bytes([padding_length] * padding_length) message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=padded_payload) with pytest.raises(RoborockException, match="Invalid A01 message payload"): decode_rpc_response(message) def test_decode_rpc_response_missing_dps(): """Test decoding with missing 'dps' key returns empty dict.""" payload_data = {"other_key": "value"} json_payload = json.dumps(payload_data).encode("utf-8") # Pad to AES block size padding_length = 16 - (len(json_payload) % 16) padded_payload = json_payload + bytes([padding_length] * padding_length) message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=padded_payload) result = decode_rpc_response(message) assert result == {} def test_decode_rpc_response_dps_not_dict(): """Test decoding fails when 'dps' is not a dictionary.""" payload_data = {"dps": "not_a_dict"} json_payload = json.dumps(payload_data).encode("utf-8") # Pad to AES block size padding_length = 16 - (len(json_payload) % 16) padded_payload = json_payload + bytes([padding_length] * padding_length) message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=padded_payload) with pytest.raises(RoborockException, match=r"Invalid A01 message format.*'dps' should be a dictionary"): decode_rpc_response(message) def test_decode_rpc_response_invalid_key(): """Test decoding fails when dps contains non-integer keys.""" payload_data = {"dps": {"1": "valid", "not_a_number": "invalid"}} json_payload = json.dumps(payload_data).encode("utf-8") # Pad to AES block size padding_length = 16 - (len(json_payload) % 16) padded_payload = json_payload + bytes([padding_length] * padding_length) message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=padded_payload) with pytest.raises(RoborockException, match=r"Invalid A01 message format:.*'dps' key should be an integer"): decode_rpc_response(message) def test_decode_rpc_response_empty_dps(): """Test decoding with empty dps dictionary.""" payload_data: dict[str, Any] = {"dps": {}} json_payload = json.dumps(payload_data).encode("utf-8") # Pad to AES block size padding_length = 16 - (len(json_payload) % 16) padded_payload = json_payload + bytes([padding_length] * padding_length) message = RoborockMessage(protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=padded_payload) result = decode_rpc_response(message) assert result == {} Python-roborock-python-roborock-32df4f3/tests/protocols/test_l01_protocol.py000066400000000000000000000037761507503702500275200ustar00rootroot00000000000000from roborock.protocol import Utils def test_encryption(): """Tests the L01 GCM encryption logic.""" local_key = "b8Hj5mFk3QzT7rLp" timestamp = 1753606905 sequence = 1 nonce = 304251 connect_nonce = 893563 ack_nonce = 485592656 payload_str = ( '{"dps":{"101":"{\\"id\\":1806,\\"method\\":\\"get_prop\\",\\"params\\":[\\"get_status\\"]}"},"t":1753606905}' ) payload = payload_str.encode("utf-8") encrypted_data = Utils.encrypt_gcm_l01( plaintext=payload, local_key=local_key, timestamp=timestamp, sequence=sequence, nonce=nonce, connect_nonce=connect_nonce, ack_nonce=ack_nonce, ) expected_data = bytes.fromhex( "fd60c8daca1ccae67f6077477bfa9d37189a38d75b3c4a907c2435d3c146ee84d8f99597e3e1571a015961ceaa4d64bc3695fae024c341" "6737d77150341de29cad2f95bfaf532358f12bbff89f140fef5b1ee284c3abfe3b83a577910a72056dab4d5a75b182d1a0cba145e3e450" "f3927443" ) assert encrypted_data == expected_data def test_decryption(): """Tests the L01 GCM decryption logic.""" local_key = "b8Hj5mFk3QzT7rLp" timestamp = 1753606905 sequence = 1 nonce = 304251 connect_nonce = 893563 ack_nonce = 485592656 payload = bytes.fromhex( "fd60c8daca1ccae67f6077477bfa9d37189a38d75b3c4a907c2435d3c146ee84d8f99597e3e1571a015961ceaa4d64bc3695fae024c341" "6737d77150341de29cad2f95bfaf532358f12bbff89f140fef5b1ee284c3abfe3b83a577910a72056dab4d5a75b182d1a0cba145e3e450" "f3927443" ) decrypted_data = Utils.decrypt_gcm_l01( payload=payload, local_key=local_key, timestamp=timestamp, sequence=sequence, nonce=nonce, connect_nonce=connect_nonce, ack_nonce=ack_nonce, ) decrypted_str = decrypted_data.decode("utf-8") expected_str = ( '{"dps":{"101":"{\\"id\\":1806,\\"method\\":\\"get_prop\\",\\"params\\":[\\"get_status\\"]}"},"t":1753606905}' ) assert decrypted_str == expected_str Python-roborock-python-roborock-32df4f3/tests/protocols/test_v1_protocol.py000066400000000000000000000235401507503702500274410ustar00rootroot00000000000000"""Tests for the v1 protocol message encoding and decoding.""" import json import logging import pathlib from collections.abc import Generator from unittest.mock import patch import pytest from freezegun import freeze_time from syrupy import SnapshotAssertion from roborock.containers import RoborockBase, UserData from roborock.exceptions import RoborockException from roborock.protocol import Utils from roborock.protocols.v1_protocol import ( RequestMessage, SecurityData, create_map_response_decoder, decode_rpc_response, ) from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from roborock.roborock_typing import RoborockCommand from .. import mock_data USER_DATA = UserData.from_dict(mock_data.USER_DATA) TEST_REQUEST_ID = 44444 TEST_ENDPOINT = "87ItGWdb" TEST_ENDPOINT_BYTES = TEST_ENDPOINT.encode() SECURITY_DATA = SecurityData( endpoint=TEST_ENDPOINT, nonce=b"\x91\xbe\x10\xc9b+\x9d\x8a\xcdH*\x19\xf6\xfe\x81h", ) TESTDATA_PATH = pathlib.Path("tests/protocols/testdata/v1_protocol/") TESTDATA_FILES = list(TESTDATA_PATH.glob("*.json")) TESTDATA_IDS = [x.stem for x in TESTDATA_FILES] @pytest.fixture(autouse=True) def fixed_time_fixture() -> Generator[None, None, None]: """Fixture to freeze time for predictable request IDs.""" # Freeze time to a specific point so request IDs are predictable with freeze_time("2025-01-20T12:00:00"): yield @pytest.fixture(name="test_request_id", autouse=True) def request_id_fixture() -> Generator[int, None, None]: """Fixture to provide a fixed request ID.""" with patch("roborock.protocols.v1_protocol.get_next_int", return_value=TEST_REQUEST_ID): yield TEST_REQUEST_ID @pytest.mark.parametrize( ("command", "params", "expected"), [ ( RoborockCommand.GET_STATUS, None, b'{"dps":{"101":"{\\"id\\":44444,\\"method\\":\\"get_status\\",\\"params\\":[]}"},"t":1737374400}', ) ], ) def test_encode_local_payload(command, params, expected): """Test encoding of local payload for V1 commands.""" message = RequestMessage(command, params).encode_message(RoborockMessageProtocol.GENERAL_REQUEST) assert isinstance(message, RoborockMessage) assert message.protocol == RoborockMessageProtocol.GENERAL_REQUEST assert message.payload == expected @pytest.mark.parametrize( ("command", "params", "expected"), [ ( RoborockCommand.GET_STATUS, None, b'{"dps":{"101":"{\\"id\\":44444,\\"method\\":\\"get_status\\",\\"params\\":[],\\"security\\":{\\"endpoint\\":\\"87ItGWdb\\",\\"nonce\\":\\"91be10c9622b9d8acd482a19f6fe8168\\"}}"},"t":1737374400}', ) ], ) def test_encode_mqtt_payload(command, params, expected): """Test encoding of local payload for V1 commands.""" request_message = RequestMessage(command, params=params) message = request_message.encode_message(RoborockMessageProtocol.RPC_REQUEST, SECURITY_DATA) assert isinstance(message, RoborockMessage) assert message.protocol == RoborockMessageProtocol.RPC_REQUEST assert message.payload == expected @pytest.mark.parametrize( ("payload", "expected"), [ ( b'{"t":1652547161,"dps":{"102":"{\\"id\\":20005,\\"result\\":[{\\"msg_ver\\":2,\\"msg_seq\\":1072,\\"state\\":8,\\"battery\\":100,\\"clean_time\\":1041,\\"clean_area\\":37080000,\\"error_code\\":0,\\"map_present\\":1,\\"in_cleaning\\":0,\\"in_returning\\":0,\\"in_fresh_state\\":1,\\"lab_status\\":1,\\"water_box_status\\":0,\\"fan_power\\":103,\\"dnd_enabled\\":0,\\"map_status\\":3,\\"is_locating\\":0,\\"lock_status\\":0,\\"water_box_mode\\":202,\\"distance_off\\":0,\\"water_box_carriage_status\\":0,\\"mop_forbidden_enable\\":0,\\"unsave_map_reason\\":0,\\"unsave_map_flag\\":0}]}"}}', [ { "msg_ver": 2, "msg_seq": 1072, "state": 8, "battery": 100, "clean_time": 1041, "clean_area": 37080000, "error_code": 0, "map_present": 1, "in_cleaning": 0, "in_returning": 0, "in_fresh_state": 1, "lab_status": 1, "water_box_status": 0, "fan_power": 103, "dnd_enabled": 0, "map_status": 3, "is_locating": 0, "lock_status": 0, "water_box_mode": 202, "distance_off": 0, "water_box_carriage_status": 0, "mop_forbidden_enable": 0, "unsave_map_reason": 0, "unsave_map_flag": 0, } ], ), ], ) def test_decode_rpc_response(payload: bytes, expected: RoborockBase) -> None: """Test decoding a v1 RPC response protocol message.""" # The values other than the payload are arbitrary message = RoborockMessage( protocol=RoborockMessageProtocol.GENERAL_RESPONSE, payload=payload, seq=12750, version=b"1.0", random=97431, timestamp=1652547161, ) decoded_message = decode_rpc_response(message) assert decoded_message.request_id == 20005 assert decoded_message.data == expected @pytest.mark.parametrize("filename", TESTDATA_FILES, ids=TESTDATA_IDS) def test_decode_rpc_payload(filename: str, snapshot: SnapshotAssertion) -> None: """Test decoding a v1 RPC response protocol message.""" with open(filename, "rb") as f: payload = f.read() # The values other than the payload are arbitrary message = RoborockMessage( protocol=RoborockMessageProtocol.GENERAL_RESPONSE, payload=payload, seq=12750, version=b"1.0", random=97431, timestamp=1652547161, ) decoded_message = decode_rpc_response(message) assert decoded_message.request_id == snapshot assert json.dumps(decoded_message.data, indent=2) == snapshot def test_create_map_response_decoder(): """Test creating and using a map response decoder.""" test_data = b"some map\n" compressed_data = ( b"\x1f\x8b\x08\x08\xf9\x13\x99h\x00\x03foo\x00+\xce\xcfMU\xc8M,\xe0\x02\x00@\xdb\xc6\x1a\t\x00\x00\x00" ) # Create header: endpoint(8) + padding(8) + request_id(2) + padding(6) # request_id = 44508 (0xaddc in little endian) header = TEST_ENDPOINT_BYTES + b"\x00" * 8 + b"\xdc\xad" + b"\x00" * 6 encrypted_data = Utils.encrypt_cbc(compressed_data, SECURITY_DATA.nonce) payload = header + encrypted_data message = RoborockMessage( protocol=RoborockMessageProtocol.MAP_RESPONSE, payload=payload, seq=12750, version=b"1.0", random=97431, timestamp=1652547161, ) decoder = create_map_response_decoder(SECURITY_DATA) result = decoder(message) assert result is not None assert result.request_id == 44508 assert result.data == test_data def test_create_map_response_decoder_invalid_endpoint(caplog: pytest.LogCaptureFixture): """Test map response decoder with invalid endpoint.""" caplog.set_level(logging.DEBUG) # Create header with wrong endpoint header = b"wrongend" + b"\x00" * 8 + b"\xdc\xad" + b"\x00" * 6 payload = header + b"encrypted_data" message = RoborockMessage( protocol=RoborockMessageProtocol.MAP_RESPONSE, payload=payload, seq=12750, version=b"1.0", random=97431, timestamp=1652547161, ) decoder = create_map_response_decoder(SECURITY_DATA) assert decoder(message) is None assert "Received map response not requested by this device, ignoring." in caplog.text def test_create_map_response_decoder_invalid_payload(): """Test map response decoder with invalid payload.""" message = RoborockMessage( protocol=RoborockMessageProtocol.MAP_RESPONSE, payload=b"short", # Too short payload seq=12750, version=b"1.0", random=97431, timestamp=1652547161, ) decoder = create_map_response_decoder(SECURITY_DATA) with pytest.raises(RoborockException, match="Invalid V1 map response format: missing payload"): decoder(message) @pytest.mark.parametrize( ("payload", "expected_data", "expected_error"), [ ( b'{"t":1757883536,"dps":{"102":"{\\"id\\":20001,\\"result\\":\\"unknown_method\\"}"}}', {}, "The method called is not recognized by the device.", ), ( b'{"t":1757883536,"dps":{"102":"{\\"id\\":20001,\\"result\\":\\"other\\"}"}}', {}, "Unexpected API Result", ), ], ) def test_decode_result_with_error(payload: bytes, expected_data: dict[str, str], expected_error: str) -> None: """Test decoding a v1 RPC response protocol message.""" # The values other than the payload are arbitrary message = RoborockMessage( protocol=RoborockMessageProtocol.GENERAL_RESPONSE, payload=payload, seq=12750, version=b"1.0", random=97431, timestamp=1652547161, ) decoded_message = decode_rpc_response(message) assert decoded_message.request_id == 20001 assert decoded_message.data == expected_data assert decoded_message.api_error assert expected_error in str(decoded_message.api_error) def test_decode_no_request_id(): """Test map response decoder without a request id is raised as an exception.""" message = RoborockMessage( protocol=RoborockMessageProtocol.GENERAL_RESPONSE, payload=b'{"t":1757883536,"dps":{"102":"{\\"result\\":\\"unknown_method\\"}"}}', seq=12750, version=b"1.0", random=97431, timestamp=1652547161, ) with pytest.raises(RoborockException, match="The method called is not recognized by the device"): decode_rpc_response(message) Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/000077500000000000000000000000001507503702500253665ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol/000077500000000000000000000000001507503702500276355ustar00rootroot00000000000000app_get_init_status.json000066400000000000000000000015011507503702500345130ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol{"t":1760665602,"dps":{"102":"{\"id\":20001,\"result\":[{\"local_info\":{\"name\":\"custom_A.03.0069_FCC\",\"bom\":\"A.03.0069\",\"location\":\"us\",\"language\":\"en\",\"wifiplan\":\"0x39\",\"timezone\":\"US/Pacific\",\"logserver\":\"awsusor0.fds.api.xiaomi.com\",\"featureset\":1},\"feature_info\":[111,112,113,114,115,116,117,118,119,120,122,123,124,125],\"new_feature_info\":633887780925447,\"new_feature_info2\":8192,\"new_feature_info_str\":\"0000000000002000\",\"status_info\":{\"state\":8,\"battery\":100,\"clean_time\":5610,\"clean_area\":96490000,\"error_code\":0,\"in_cleaning\":0,\"in_returning\":0,\"in_fresh_state\":1,\"lab_status\":1,\"water_box_status\":0,\"map_status\":3,\"is_locating\":0,\"lock_status\":0,\"water_box_mode\":204,\"distance_off\":0,\"water_box_carriage_status\":0,\"mop_forbidden_enable\":0}}]}"}} Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol/get_clean_summary.json000066400000000000000000000005051507503702500342260ustar00rootroot00000000000000{ "t": 1757878288, "dps": { "102": "{\"id\":20001,\"result\":[1442559,24258125000,296,[1756848207,1754930385,1753203976,1752183435,1747427370,1746204046,1745601543,1744387080,1743528522,1742489154,1741022299,1740433682,1739902516,1738875106,1738864366,1738620067,1736873889,1736197544,1736121269,1734458038]]}" } } Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol/get_consumeables.json000066400000000000000000000003271507503702500340510ustar00rootroot00000000000000{"t":1759038395,"dps":{"102":"{\"id\":20001,\"result\":[{\"main_brush_work_time\":879348,\"side_brush_work_time\":707618,\"filter_work_time\":738722,\"filter_element_work_time\":0,\"sensor_dirty_time\":455517}]}"}} Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol/get_dnd.json000066400000000000000000000002261507503702500321340ustar00rootroot00000000000000{"t": 1755785801, "dps": {"102": "{\"id\":20002,\"result\":[{\"start_hour\":22,\"start_minute\":0,\"end_hour\":8,\"end_minute\":0,\"enabled\":1}]}"}} get_multi_maps_list.json000066400000000000000000000004041507503702500345130ustar00rootroot00000000000000Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol{"t":1758987228,"dps":{"102":"{\"id\":20001,\"result\":[{\"max_multi_map\":1,\"max_bak_map\":1,\"multi_map_count\":1,\"map_info\":[{\"mapFlag\":0,\"add_time\":1747132930,\"length\":0,\"name\":\"\",\"bak_maps\":[{\"mapFlag\":4,\"add_time\":1747132936}]}]}]}"}} Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol/get_room_mapping.json000066400000000000000000000001601507503702500340530ustar00rootroot00000000000000{"t":1759590351,"dps":{"102":"{\"id\":20001,\"result\":[[16,\"3031886\"],[17,\"3031880\"],[18,\"3031883\"]]}"}} Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol/get_room_mapping2.json000066400000000000000000000003341507503702500341400ustar00rootroot00000000000000{"t":1759590351,"dps":{"102":"{\"id\":20001,\"result\":[[16, \"2537178\", 6], [17, \"2537175\", 14], [18, \"2537174\", 13], [19, \"2537176\", 14], [20, \"10655627\", 12], [21, \"2537145\", 2], [22, \"2537147\", 12]]}"}} Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol/get_status.json000066400000000000000000000010321507503702500327060ustar00rootroot00000000000000{"t": 1755785773, "dps": {"102": "{\"id\":20001,\"result\":[{\"msg_ver\":2,\"msg_seq\":515,\"state\":8,\"battery\":100,\"clean_time\":5405,\"clean_area\":91287500,\"error_code\":0,\"map_present\":1,\"in_cleaning\":0,\"in_returning\":0,\"in_fresh_state\":1,\"lab_status\":1,\"water_box_status\":0,\"fan_power\":106,\"dnd_enabled\":1,\"map_status\":3,\"is_locating\":0,\"lock_status\":0,\"water_box_mode\":204,\"distance_off\":0,\"water_box_carriage_status\":0,\"mop_forbidden_enable\":0,\"unsave_map_reason\":4,\"unsave_map_flag\":0}]}"}} Python-roborock-python-roborock-32df4f3/tests/protocols/testdata/v1_protocol/get_volume.json000066400000000000000000000001001507503702500326650ustar00rootroot00000000000000{"t":1757903261,"dps":{"102":"{\"id\":20001,\"result\":[90]}"}} Python-roborock-python-roborock-32df4f3/tests/test_a01_api.py000066400000000000000000000210741507503702500243600ustar00rootroot00000000000000import asyncio import json from collections.abc import AsyncGenerator from queue import Queue from typing import Any from unittest.mock import patch import paho.mqtt.client as mqtt import pytest from Crypto.Cipher import AES from Crypto.Util.Padding import pad from roborock import ( HomeData, UserData, ) from roborock.code_mappings import ZeoState, ZeoTemperature from roborock.containers import DeviceData, RoborockCategory from roborock.exceptions import RoborockException from roborock.protocol import MessageParser from roborock.roborock_message import ( RoborockMessage, RoborockMessageProtocol, RoborockZeoProtocol, ) from roborock.version_a01_apis import RoborockMqttClientA01 from tests.mock_data import ( HOME_DATA_RAW, LOCAL_KEY, MQTT_PUBLISH_TOPIC, USER_DATA, WASHER_PRODUCT, ZEO_ONE_DEVICE, ) from . import mqtt_packet from .conftest import QUEUE_TIMEOUT RELEASE_TIMEOUT = 2 @pytest.fixture(name="a01_mqtt_client") async def a01_mqtt_client_fixture( mock_create_connection: None, mock_select: None ) -> AsyncGenerator[RoborockMqttClientA01, None]: user_data = UserData.from_dict(USER_DATA) home_data = HomeData.from_dict( { **HOME_DATA_RAW, "devices": [ZEO_ONE_DEVICE], "products": [WASHER_PRODUCT], } ) device_info = DeviceData( device=home_data.devices[0], model=home_data.products[0].model, ) client = RoborockMqttClientA01( user_data, device_info, RoborockCategory.WASHING_MACHINE, queue_timeout=QUEUE_TIMEOUT ) try: yield client finally: # Cleanup is best effort to reduce number of active threads if client.is_connected(): try: async with asyncio.timeout(RELEASE_TIMEOUT): await client.async_release() except Exception: pass @pytest.fixture(name="connected_a01_mqtt_client") async def connected_a01_mqtt_client_fixture( response_queue: Queue, a01_mqtt_client: RoborockMqttClientA01 ) -> AsyncGenerator[RoborockMqttClientA01, None]: response_queue.put(mqtt_packet.gen_connack(rc=0, flags=2)) response_queue.put(mqtt_packet.gen_suback(1, 0)) await a01_mqtt_client.async_connect() yield a01_mqtt_client async def test_async_connect(received_requests: Queue, connected_a01_mqtt_client: RoborockMqttClientA01) -> None: """Test connecting to the MQTT broker.""" assert connected_a01_mqtt_client.is_connected() # Connecting again is a no-op await connected_a01_mqtt_client.async_connect() assert connected_a01_mqtt_client.is_connected() await connected_a01_mqtt_client.async_disconnect() assert not connected_a01_mqtt_client.is_connected() # Broker received a connect and subscribe. Disconnect packet is not # guaranteed to be captured by the time the async_disconnect returns assert received_requests.qsize() >= 2 # Connect and Subscribe async def test_connect_failure( received_requests: Queue, response_queue: Queue, a01_mqtt_client: RoborockMqttClientA01 ) -> None: """Test the broker responding with a connect failure.""" response_queue.put(mqtt_packet.gen_connack(rc=1)) with pytest.raises(RoborockException, match="Failed to connect"): await a01_mqtt_client.async_connect() assert not a01_mqtt_client.is_connected() assert received_requests.qsize() == 1 # Connect attempt async def test_disconnect_already_disconnected(connected_a01_mqtt_client: RoborockMqttClientA01) -> None: """Test the MQTT client error handling for a no-op disconnect.""" assert connected_a01_mqtt_client.is_connected() # Make the MQTT client simulate returning that it already thinks it is disconnected with patch("roborock.cloud_api.mqtt.Client.disconnect", return_value=mqtt.MQTT_ERR_NO_CONN): await connected_a01_mqtt_client.async_disconnect() async def test_disconnect_failure(connected_a01_mqtt_client: RoborockMqttClientA01) -> None: """Test that the MQTT client ignores MQTT client error handling for a no-op disconnect.""" assert connected_a01_mqtt_client.is_connected() # Make the MQTT client returns with an error when disconnecting with ( patch("roborock.cloud_api.mqtt.Client.disconnect", return_value=mqtt.MQTT_ERR_PROTOCOL), pytest.raises(RoborockException, match="Failed to disconnect"), ): await connected_a01_mqtt_client.async_disconnect() async def test_async_release(connected_a01_mqtt_client: RoborockMqttClientA01) -> None: """Test the async_release API will disconnect the client.""" await connected_a01_mqtt_client.async_release() assert not connected_a01_mqtt_client.is_connected() async def test_subscribe_failure( received_requests: Queue, response_queue: Queue, a01_mqtt_client: RoborockMqttClientA01 ) -> None: """Test the broker responding with the wrong message type on subscribe.""" response_queue.put(mqtt_packet.gen_connack(rc=0, flags=2)) with ( patch("roborock.cloud_api.mqtt.Client.subscribe", return_value=(mqtt.MQTT_ERR_NO_CONN, None)), pytest.raises(RoborockException, match="Failed to subscribe"), ): await a01_mqtt_client.async_connect() assert received_requests.qsize() == 1 # Connect attempt # NOTE: The client is "connected" but not "subscribed" and cannot recover # from this state without disconnecting first. This can likely be improved. assert a01_mqtt_client.is_connected() # Attempting to reconnect is a no-op since the client already thinks it is connected await a01_mqtt_client.async_connect() assert a01_mqtt_client.is_connected() assert received_requests.qsize() == 1 def build_rpc_response(message: dict[Any, Any]) -> bytes: """Build an encoded RPC response message.""" return MessageParser.build( [ RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=pad( json.dumps( { "dps": message, # {10000: json.dumps(message)}, } ).encode(), AES.block_size, ), version=b"A01", seq=2020, ), ], local_key=LOCAL_KEY, ) async def test_update_values( received_requests: Queue, response_queue: Queue, connected_a01_mqtt_client: RoborockMqttClientA01, ) -> None: """Test sending an arbitrary MQTT message and parsing the response.""" message = build_rpc_response( { 203: 6, # spinning 207: 3, # medium } ) response_queue.put(mqtt_packet.gen_publish(MQTT_PUBLISH_TOPIC, payload=message)) data = await connected_a01_mqtt_client.update_values([RoborockZeoProtocol.STATE, RoborockZeoProtocol.TEMP]) assert data.get(RoborockZeoProtocol.STATE) == ZeoState.spinning.name assert data.get(RoborockZeoProtocol.TEMP) == ZeoTemperature.medium.name async def test_set_value( received_requests: Queue, response_queue: Queue, connected_a01_mqtt_client: RoborockMqttClientA01, ) -> None: """Test sending an arbitrary MQTT message and parsing the response.""" # Clear existing messages received during setup assert received_requests.qsize() == 2 assert received_requests.get(block=True, timeout=QUEUE_TIMEOUT) assert received_requests.get(block=True, timeout=QUEUE_TIMEOUT) assert received_requests.empty() # Prepare the response message message = build_rpc_response({}) response_queue.put(mqtt_packet.gen_publish(MQTT_PUBLISH_TOPIC, payload=message)) await connected_a01_mqtt_client.set_value(RoborockZeoProtocol.STATE, "spinning") assert received_requests.get(block=True) async def test_publish_failure( connected_a01_mqtt_client: RoborockMqttClientA01, ) -> None: """Test a failure return code when publishing a messaage.""" msg = mqtt.MQTTMessageInfo(0) msg.rc = mqtt.MQTT_ERR_PROTOCOL with ( patch("roborock.cloud_api.mqtt.Client.publish", return_value=msg), pytest.raises(RoborockException, match="Failed to publish"), ): await connected_a01_mqtt_client.update_values([RoborockZeoProtocol.STATE]) async def test_future_timeout( connected_a01_mqtt_client: RoborockMqttClientA01, ) -> None: """Test a timeout raised while waiting for an RPC response.""" with patch("roborock.roborock_future.asyncio.timeout", side_effect=asyncio.TimeoutError): data = await connected_a01_mqtt_client.update_values([RoborockZeoProtocol.STATE]) assert data.get(RoborockZeoProtocol.STATE) is None Python-roborock-python-roborock-32df4f3/tests/test_api.py000066400000000000000000000300401507503702500237100ustar00rootroot00000000000000import asyncio import json import logging from collections.abc import AsyncGenerator from queue import Queue from typing import Any from unittest.mock import AsyncMock, patch import paho.mqtt.client as mqtt import pytest from roborock import ( HomeData, RoborockDockDustCollectionModeCode, RoborockDockTypeCode, RoborockDockWashTowelModeCode, UserData, ) from roborock.containers import DeviceData, RoomMapping, S7MaxVStatus from roborock.exceptions import RoborockException, RoborockTimeout from roborock.protocol import MessageParser from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from roborock.version_1_apis import RoborockMqttClientV1 from roborock.web_api import PreparedRequest, RoborockApiClient from tests.mock_data import ( BASE_URL_REQUEST, GET_CODE_RESPONSE, HOME_DATA_RAW, LOCAL_KEY, MQTT_PUBLISH_TOPIC, STATUS, USER_DATA, ) from . import mqtt_packet def test_can_create_prepared_request(): PreparedRequest("https://sample.com", AsyncMock()) async def test_can_create_mqtt_roborock(): home_data = HomeData.from_dict(HOME_DATA_RAW) device_info = DeviceData(device=home_data.devices[0], model=home_data.products[0].model) RoborockMqttClientV1(UserData.from_dict(USER_DATA), device_info) async def test_get_base_url_no_url(): rc = RoborockApiClient("sample@gmail.com") with patch("roborock.web_api.PreparedRequest.request") as mock_request: mock_request.return_value = BASE_URL_REQUEST await rc._get_iot_login_info() assert await rc.base_url == "https://sample.com" async def test_request_code(): rc = RoborockApiClient("sample@gmail.com") with ( patch("roborock.web_api.RoborockApiClient._get_iot_login_info"), patch("roborock.web_api.RoborockApiClient._get_header_client_id"), patch("roborock.web_api.PreparedRequest.request") as mock_request, ): mock_request.return_value = GET_CODE_RESPONSE await rc.request_code() async def test_get_home_data(): rc = RoborockApiClient("sample@gmail.com") with ( patch("roborock.web_api.RoborockApiClient._get_iot_login_info"), patch("roborock.web_api.RoborockApiClient._get_header_client_id"), patch("roborock.web_api.PreparedRequest.request") as mock_prepared_request, ): mock_prepared_request.side_effect = [ {"code": 200, "msg": "success", "data": {"rrHomeId": 1}}, {"code": 200, "success": True, "result": HOME_DATA_RAW}, ] user_data = UserData.from_dict(USER_DATA) result = await rc.get_home_data(user_data) assert result == HomeData.from_dict(HOME_DATA_RAW) async def test_get_dust_collection_mode(): home_data = HomeData.from_dict(HOME_DATA_RAW) device_info = DeviceData(device=home_data.devices[0], model=home_data.products[0].model) rmc = RoborockMqttClientV1(UserData.from_dict(USER_DATA), device_info) with patch("roborock.version_1_apis.roborock_client_v1.AttributeCache.async_value") as command: command.return_value = {"mode": 1} dust = await rmc.get_dust_collection_mode() assert dust is not None assert dust.mode == RoborockDockDustCollectionModeCode.light async def test_get_mop_wash_mode(): home_data = HomeData.from_dict(HOME_DATA_RAW) device_info = DeviceData(device=home_data.devices[0], model=home_data.products[0].model) rmc = RoborockMqttClientV1(UserData.from_dict(USER_DATA), device_info) with patch("roborock.version_1_apis.roborock_client_v1.AttributeCache.async_value") as command: command.return_value = {"smart_wash": 0, "wash_interval": 1500} mop_wash = await rmc.get_smart_wash_params() assert mop_wash is not None assert mop_wash.smart_wash == 0 assert mop_wash.wash_interval == 1500 async def test_get_washing_mode(): home_data = HomeData.from_dict(HOME_DATA_RAW) device_info = DeviceData(device=home_data.devices[0], model=home_data.products[0].model) rmc = RoborockMqttClientV1(UserData.from_dict(USER_DATA), device_info) with patch("roborock.version_1_apis.roborock_client_v1.AttributeCache.async_value") as command: command.return_value = {"wash_mode": 2} washing_mode = await rmc.get_wash_towel_mode() assert washing_mode is not None assert washing_mode.wash_mode == RoborockDockWashTowelModeCode.deep assert washing_mode.wash_mode == 2 async def test_get_prop(): home_data = HomeData.from_dict(HOME_DATA_RAW) device_info = DeviceData(device=home_data.devices[0], model=home_data.products[0].model) rmc = RoborockMqttClientV1(UserData.from_dict(USER_DATA), device_info) with ( patch("roborock.version_1_apis.roborock_mqtt_client_v1.RoborockMqttClientV1.get_status") as get_status, patch("roborock.version_1_apis.roborock_client_v1.RoborockClientV1.send_command"), patch("roborock.version_1_apis.roborock_client_v1.AttributeCache.async_value"), patch("roborock.version_1_apis.roborock_mqtt_client_v1.RoborockMqttClientV1.get_dust_collection_mode"), ): status = S7MaxVStatus.from_dict(STATUS) status.dock_type = RoborockDockTypeCode.auto_empty_dock_pure get_status.return_value = status props = await rmc.get_prop() assert props assert props.dock_summary assert props.dock_summary.wash_towel_mode is None assert props.dock_summary.smart_wash_params is None assert props.dock_summary.dust_collection_mode is not None @pytest.fixture(name="connected_mqtt_client") async def connected_mqtt_client_fixture( response_queue: Queue, mqtt_client: RoborockMqttClientV1 ) -> AsyncGenerator[RoborockMqttClientV1, None]: response_queue.put(mqtt_packet.gen_connack(rc=0, flags=2)) response_queue.put(mqtt_packet.gen_suback(1, 0)) await mqtt_client.async_connect() yield mqtt_client if mqtt_client.is_connected(): try: await mqtt_client.async_disconnect() except Exception: pass async def test_async_connect(received_requests: Queue, connected_mqtt_client: RoborockMqttClientV1) -> None: """Test connecting to the MQTT broker.""" assert connected_mqtt_client.is_connected() # Connecting again is a no-op await connected_mqtt_client.async_connect() assert connected_mqtt_client.is_connected() await connected_mqtt_client.async_disconnect() assert not connected_mqtt_client.is_connected() # Broker received a connect and subscribe. Disconnect packet is not # guaranteed to be captured by the time the async_disconnect returns assert received_requests.qsize() >= 2 # Connect and Subscribe async def test_connect_failure_response( received_requests: Queue, response_queue: Queue, mqtt_client: RoborockMqttClientV1 ) -> None: """Test the broker responding with a connect failure.""" response_queue.put(mqtt_packet.gen_connack(rc=1)) with pytest.raises(RoborockException, match="Failed to connect"): await mqtt_client.async_connect() assert not mqtt_client.is_connected() assert received_requests.qsize() == 1 # Connect attempt async def test_disconnect_already_disconnected(connected_mqtt_client: RoborockMqttClientV1) -> None: """Test the MQTT client error handling for a no-op disconnect.""" assert connected_mqtt_client.is_connected() # Make the MQTT client simulate returning that it already thinks it is disconnected with patch("roborock.cloud_api.mqtt.Client.disconnect", return_value=mqtt.MQTT_ERR_NO_CONN): await connected_mqtt_client.async_disconnect() async def test_disconnect_failure(connected_mqtt_client: RoborockMqttClientV1) -> None: """Test that the MQTT client ignores MQTT client error handling for a no-op disconnect.""" assert connected_mqtt_client.is_connected() # Make the MQTT client returns with an error when disconnecting with ( patch("roborock.cloud_api.mqtt.Client.disconnect", return_value=mqtt.MQTT_ERR_PROTOCOL), pytest.raises(RoborockException, match="Failed to disconnect"), ): await connected_mqtt_client.async_disconnect() async def test_disconnect_failure_response( received_requests: Queue, response_queue: Queue, connected_mqtt_client: RoborockMqttClientV1, caplog: pytest.LogCaptureFixture, ) -> None: """Test the broker responding with a connect failure.""" # Enqueue a failed message -- however, the client does not process any # further messages and there is no parsing error, and no failed log messages. response_queue.put(mqtt_packet.gen_disconnect(reason_code=1)) assert connected_mqtt_client.is_connected() with caplog.at_level(logging.ERROR): await connected_mqtt_client.async_disconnect() assert not connected_mqtt_client.is_connected() assert not caplog.records async def test_async_release(connected_mqtt_client: RoborockMqttClientV1) -> None: """Test the async_release API will disconnect the client.""" await connected_mqtt_client.async_release() assert not connected_mqtt_client.is_connected() async def test_subscribe_failure( received_requests: Queue, response_queue: Queue, mqtt_client: RoborockMqttClientV1 ) -> None: """Test the broker responding with the wrong message type on subscribe.""" response_queue.put(mqtt_packet.gen_connack(rc=0, flags=2)) with ( patch("roborock.cloud_api.mqtt.Client.subscribe", return_value=(mqtt.MQTT_ERR_NO_CONN, None)), pytest.raises(RoborockException, match="Failed to subscribe"), ): await mqtt_client.async_connect() assert received_requests.qsize() == 1 # Connect attempt # NOTE: The client is "connected" but not "subscribed" and cannot recover # from this state without disconnecting first. This can likely be improved. assert mqtt_client.is_connected() # Attempting to reconnect is a no-op since the client already thinks it is connected await mqtt_client.async_connect() assert mqtt_client.is_connected() assert received_requests.qsize() == 1 def build_rpc_response(message: dict[str, Any]) -> bytes: """Build an encoded RPC response message.""" return MessageParser.build( [ RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps( { "dps": {102: json.dumps(message)}, } ).encode(), seq=2020, ), ], local_key=LOCAL_KEY, ) async def test_get_room_mapping( received_requests: Queue, response_queue: Queue, connected_mqtt_client: RoborockMqttClientV1, ) -> None: """Test sending an arbitrary MQTT message and parsing the response.""" test_request_id = 5050 message = build_rpc_response( { "id": test_request_id, "result": [[16, "2362048"], [17, "2362044"]], } ) response_queue.put(mqtt_packet.gen_publish(MQTT_PUBLISH_TOPIC, payload=message)) with patch("roborock.protocols.v1_protocol.get_next_int", return_value=test_request_id): room_mapping = await connected_mqtt_client.get_room_mapping() assert room_mapping == [ RoomMapping(segment_id=16, iot_id="2362048"), RoomMapping(segment_id=17, iot_id="2362044"), ] async def test_publish_failure( connected_mqtt_client: RoborockMqttClientV1, ) -> None: """Test a failure return code when publishing a messaage.""" msg = mqtt.MQTTMessageInfo(0) msg.rc = mqtt.MQTT_ERR_PROTOCOL with ( patch("roborock.cloud_api.mqtt.Client.publish", return_value=msg), pytest.raises(RoborockException, match="Failed to publish"), ): await connected_mqtt_client.get_room_mapping() async def test_future_timeout( connected_mqtt_client: RoborockMqttClientV1, ) -> None: """Test a timeout raised while waiting for an RPC response.""" with ( patch("roborock.roborock_future.asyncio.timeout", side_effect=asyncio.TimeoutError), pytest.raises(RoborockTimeout, match="Timeout after"), ): await connected_mqtt_client.get_room_mapping() Python-roborock-python-roborock-32df4f3/tests/test_broadcast_protocol.py000066400000000000000000000017401507503702500270270ustar00rootroot00000000000000from roborock.broadcast_protocol import RoborockProtocol def test_l01_data(): data = bytes.fromhex( "4c30310000000000000043841496d5a31e34b5b02c1867c445509ba5a21aec1fa4b307bddeb27a75d9b366193e8a97d0534dc39851c" "980609f2670cdcaee04594ec5c93e3c5ae609b0c9a203139ac8e40c8c" ) prot = RoborockProtocol() prot.datagram_received(data, None) device = prot.devices_found[0] assert device.duid == "ZrQn1jfZtJQLoPOL7620e" assert device.ip == "192.168.1.4" assert device.version == b"L01" def test_v1_data(): data = bytes.fromhex( "312e30000003e003e80040b87035058b439f36af42f249605f8661897173f111bb849a6231831f5874a0cf220a25872ea412d796b4902ee" "57fdc120074b901b482acb1fe6d06317e3a72ddac654fe0" ) prot = RoborockProtocol() prot.datagram_received(data, None) device = prot.devices_found[0] assert device.duid == "h96rOV3e8DTPMAOLiypREl" assert device.ip == "192.168.20.250" assert device.version == b"1.0" Python-roborock-python-roborock-32df4f3/tests/test_callbacks.py000066400000000000000000000154271507503702500250720ustar00rootroot00000000000000"""Tests for the callbacks module.""" import logging from unittest.mock import Mock from roborock.callbacks import CallbackList, CallbackMap, safe_callback def test_safe_callback_successful_execution(): """Test that safe_callback executes callback successfully.""" mock_callback = Mock() wrapped = safe_callback(mock_callback) wrapped("test_value") mock_callback.assert_called_once_with("test_value") def test_safe_callback_catches_exception(): """Test that safe_callback catches and logs exceptions.""" def failing_callback(value): raise ValueError("Test exception") mock_logger = Mock(spec=logging.Logger) wrapped = safe_callback(failing_callback, mock_logger) # Should not raise exception wrapped("test_value") mock_logger.error.assert_called_once() assert "Uncaught error in callback" in mock_logger.error.call_args[0][0] def test_safe_callback_uses_default_logger(): """Test that safe_callback uses default logger when none provided.""" def failing_callback(value): raise ValueError("Test exception") wrapped = safe_callback(failing_callback) # Should not raise exception wrapped("test_value") # CallbackMap tests def test_callback_map_add_callback_and_invoke(): """Test adding callback and invoking it.""" callback_map = CallbackMap[str, str]() mock_callback = Mock() remove_fn = callback_map.add_callback("key1", mock_callback) callback_map("key1", "test_value") mock_callback.assert_called_once_with("test_value") assert callable(remove_fn) def test_callback_map_multiple_callbacks_same_key(): """Test multiple callbacks for the same key.""" callback_map = CallbackMap[str, str]() mock_callback1 = Mock() mock_callback2 = Mock() callback_map.add_callback("key1", mock_callback1) callback_map.add_callback("key1", mock_callback2) callback_map("key1", "test_value") mock_callback1.assert_called_once_with("test_value") mock_callback2.assert_called_once_with("test_value") def test_callback_map_different_keys(): """Test callbacks for different keys.""" callback_map = CallbackMap[str, str]() mock_callback1 = Mock() mock_callback2 = Mock() callback_map.add_callback("key1", mock_callback1) callback_map.add_callback("key2", mock_callback2) callback_map("key1", "value1") callback_map("key2", "value2") mock_callback1.assert_called_once_with("value1") mock_callback2.assert_called_once_with("value2") def test_callback_map_get_callbacks(): """Test getting callbacks for a key.""" callback_map = CallbackMap[str, str]() mock_callback = Mock() # No callbacks initially assert callback_map.get_callbacks("key1") == [] # Add callback callback_map.add_callback("key1", mock_callback) callbacks = callback_map.get_callbacks("key1") assert len(callbacks) == 1 assert callbacks[0] == mock_callback def test_callback_map_remove_callback(): """Test removing callback.""" callback_map = CallbackMap[str, str]() mock_callback = Mock() remove_fn = callback_map.add_callback("key1", mock_callback) # Callback should be there assert len(callback_map.get_callbacks("key1")) == 1 # Remove callback remove_fn() # Callback should be gone assert callback_map.get_callbacks("key1") == [] def test_callback_map_remove_callback_cleans_up_key(): """Test that removing last callback for a key removes the key.""" callback_map = CallbackMap[str, str]() mock_callback = Mock() remove_fn = callback_map.add_callback("key1", mock_callback) # Key should exist assert "key1" in callback_map._callbacks # Remove callback remove_fn() # Key should be removed assert "key1" not in callback_map._callbacks def test_callback_map_exception_handling(caplog): """Test that exceptions in callbacks are handled gracefully.""" callback_map = CallbackMap[str, str]() def failing_callback(value): raise ValueError("Test exception") callback_map.add_callback("key1", failing_callback) with caplog.at_level(logging.ERROR): callback_map("key1", "test_value") assert "Uncaught error in callback" in caplog.text def test_callback_map_custom_logger(): """Test using custom logger.""" mock_logger = Mock(spec=logging.Logger) callback_map = CallbackMap[str, str](logger=mock_logger) def failing_callback(value): raise ValueError("Test exception") callback_map.add_callback("key1", failing_callback) callback_map("key1", "test_value") mock_logger.error.assert_called_once() # CallbackList tests def test_callback_list_add_callback_and_invoke(): """Test adding callback and invoking it.""" callback_list = CallbackList[str]() mock_callback = Mock() remove_fn = callback_list.add_callback(mock_callback) callback_list("test_value") mock_callback.assert_called_once_with("test_value") assert callable(remove_fn) def test_callback_list_multiple_callbacks(): """Test multiple callbacks in the list.""" callback_list = CallbackList[str]() mock_callback1 = Mock() mock_callback2 = Mock() callback_list.add_callback(mock_callback1) callback_list.add_callback(mock_callback2) callback_list("test_value") mock_callback1.assert_called_once_with("test_value") mock_callback2.assert_called_once_with("test_value") def test_callback_list_remove_callback(): """Test removing callback from list.""" callback_list = CallbackList[str]() mock_callback1 = Mock() mock_callback2 = Mock() remove_fn1 = callback_list.add_callback(mock_callback1) callback_list.add_callback(mock_callback2) # Both should be called callback_list("test_value") assert mock_callback1.call_count == 1 assert mock_callback2.call_count == 1 # Remove first callback remove_fn1() # Only second should be called callback_list("test_value2") assert mock_callback1.call_count == 1 # Still 1 assert mock_callback2.call_count == 2 # Now 2 def test_callback_list_exception_handling(caplog): """Test that exceptions in callbacks are handled gracefully.""" callback_list = CallbackList[str]() def failing_callback(value): raise ValueError("Test exception") callback_list.add_callback(failing_callback) with caplog.at_level(logging.ERROR): callback_list("test_value") assert "Uncaught error in callback" in caplog.text def test_callback_list_custom_logger(): """Test using custom logger.""" mock_logger = Mock(spec=logging.Logger) callback_list = CallbackList[str](logger=mock_logger) def failing_callback(value): raise ValueError("Test exception") callback_list.add_callback(failing_callback) callback_list("test_value") mock_logger.error.assert_called_once() Python-roborock-python-roborock-32df4f3/tests/test_containers.py000066400000000000000000000372131507503702500253150ustar00rootroot00000000000000"""Test cases for the containers module.""" from dataclasses import dataclass from typing import Any from syrupy import SnapshotAssertion from roborock import CleanRecord, CleanSummary, Consumable, DnDTimer, HomeData, S7MaxVStatus, UserData from roborock.b01_containers import ( B01Fault, B01Props, SCWindMapping, WorkStatusMapping, ) from roborock.code_mappings import ( RoborockCategory, RoborockDockErrorCode, RoborockDockTypeCode, RoborockErrorCode, RoborockFanSpeedS7MaxV, RoborockMopIntensityS7, RoborockMopModeS7, RoborockStateCode, ) from roborock.containers import MultiMapsList, RoborockBase from .mock_data import ( CLEAN_RECORD, CLEAN_SUMMARY, CONSUMABLE, DND_TIMER, HOME_DATA_RAW, K_VALUE, LOCAL_KEY, PRODUCT_ID, STATUS, USER_DATA, ) @dataclass class SimpleObject(RoborockBase): """Simple object for testing serialization.""" name: str | None = None value: int | None = None @dataclass class ComplexObject(RoborockBase): """Complex object for testing serialization.""" simple: SimpleObject | None = None items: list[str] | None = None value: int | None = None nested_dict: dict[str, SimpleObject] | None = None nested_list: list[SimpleObject] | None = None any: Any | None = None def test_simple_object() -> None: """Test serialization and deserialization of a simple object.""" obj = SimpleObject(name="Test", value=42) serialized = obj.as_dict() assert serialized == {"name": "Test", "value": 42} deserialized = SimpleObject.from_dict(serialized) assert deserialized.name == "Test" assert deserialized.value == 42 def test_complex_object() -> None: """Test serialization and deserialization of a complex object.""" simple = SimpleObject(name="Nested", value=100) obj = ComplexObject( simple=simple, items=["item1", "item2"], value=200, nested_dict={ "nested1": SimpleObject(name="Nested1", value=1), "nested2": SimpleObject(name="Nested2", value=2), }, nested_list=[SimpleObject(name="Nested3", value=3), SimpleObject(name="Nested4", value=4)], any="This can be anything", ) serialized = obj.as_dict() assert serialized == { "simple": {"name": "Nested", "value": 100}, "items": ["item1", "item2"], "value": 200, "nestedDict": { "nested1": {"name": "Nested1", "value": 1}, "nested2": {"name": "Nested2", "value": 2}, }, "nestedList": [ {"name": "Nested3", "value": 3}, {"name": "Nested4", "value": 4}, ], "any": "This can be anything", } deserialized = ComplexObject.from_dict(serialized) assert deserialized.simple.name == "Nested" assert deserialized.simple.value == 100 assert deserialized.items == ["item1", "item2"] assert deserialized.value == 200 assert deserialized.nested_dict == { "nested1": SimpleObject(name="Nested1", value=1), "nested2": SimpleObject(name="Nested2", value=2), } assert deserialized.nested_list == [ SimpleObject(name="Nested3", value=3), SimpleObject(name="Nested4", value=4), ] assert deserialized.any == "This can be anything" def test_ignore_unknown_keys() -> None: """Test that we don't fail on unknown keys.""" data = { "ignored_key": "This key should be ignored", "name": "named_object", "value": 42, } deserialized = SimpleObject.from_dict(data) assert deserialized.name == "named_object" assert deserialized.value == 42 def test_user_data(): ud = UserData.from_dict(USER_DATA) assert ud.uid == 123456 assert ud.tokentype == "token_type" assert ud.token == "abc123" assert ud.rruid == "abc123" assert ud.region == "us" assert ud.country == "US" assert ud.countrycode == "1" assert ud.nickname == "user_nickname" assert ud.rriot.u == "user123" assert ud.rriot.s == "pass123" assert ud.rriot.h == "unknown123" assert ud.rriot.k == K_VALUE assert ud.rriot.r.r == "US" assert ud.rriot.r.a == "https://api-us.roborock.com" assert ud.rriot.r.m == "tcp://mqtt-us.roborock.com:8883" assert ud.rriot.r.l == "https://wood-us.roborock.com" assert ud.tuya_device_state == 2 assert ud.avatarurl == "https://files.roborock.com/iottest/default_avatar.png" def test_home_data(): hd = HomeData.from_dict(HOME_DATA_RAW) assert hd.id == 123456 assert hd.name == "My Home" assert hd.lon is None assert hd.lat is None assert hd.geo_name is None product = hd.products[0] assert product.id == PRODUCT_ID assert product.name == "Roborock S7 MaxV" assert product.code == "a27" assert product.model == "roborock.vacuum.a27" assert product.icon_url is None assert product.attribute is None assert product.capability == 0 assert product.category == RoborockCategory.VACUUM schema = product.schema assert schema[0].id == "101" assert schema[0].name == "rpc_request" assert schema[0].code == "rpc_request_code" assert schema[0].mode == "rw" assert schema[0].type == "RAW" assert schema[0].product_property is None assert schema[0].desc is None device = hd.devices[0] assert device.duid == "abc123" assert device.name == "Roborock S7 MaxV" assert device.attribute is None assert device.active_time == 1672364449 assert device.local_key == LOCAL_KEY assert device.runtime_env is None assert device.time_zone_id == "America/Los_Angeles" assert device.icon_url == "no_url" assert device.product_id == "product-id-123" assert device.lon is None assert device.lat is None assert not device.share assert device.share_time is None assert device.online assert device.fv == "02.56.02" assert device.pv == "1.0" assert device.room_id == 2362003 assert device.tuya_uuid is None assert not device.tuya_migrated assert device.extra == '{"RRPhotoPrivacyVersion": "1"}' assert device.sn == "abc123" assert device.feature_set == "2234201184108543" assert device.new_feature_set == "0000000000002041" # status = device.device_status # assert status.name == assert device.silent_ota_switch assert hd.rooms[0].id == 2362048 assert hd.rooms[0].name == "Example room 1" def test_serialize_and_unserialize(): ud = UserData.from_dict(USER_DATA) ud_dict = ud.as_dict() assert ud_dict == USER_DATA def test_consumable(): c = Consumable.from_dict(CONSUMABLE) assert c.main_brush_work_time == 74382 assert c.side_brush_work_time == 74383 assert c.filter_work_time == 74384 assert c.filter_element_work_time == 0 assert c.sensor_dirty_time == 74385 assert c.strainer_work_times == 65 assert c.dust_collection_work_times == 25 assert c.cleaning_brush_work_times == 66 def test_status(): s = S7MaxVStatus.from_dict(STATUS) assert s.msg_ver == 2 assert s.msg_seq == 458 assert s.state == RoborockStateCode.charging assert s.battery == 100 assert s.clean_time == 1176 assert s.clean_area == 20965000 assert s.square_meter_clean_area == 21.0 assert s.error_code == RoborockErrorCode.none assert s.map_present == 1 assert s.in_cleaning == 0 assert s.in_returning == 0 assert s.in_fresh_state == 1 assert s.lab_status == 1 assert s.water_box_status == 1 assert s.back_type == -1 assert s.wash_phase == 0 assert s.wash_ready == 0 assert s.fan_power == 102 assert s.dnd_enabled == 0 assert s.map_status == 3 assert s.current_map == 0 assert s.is_locating == 0 assert s.lock_status == 0 assert s.water_box_mode == 203 assert s.water_box_carriage_status == 1 assert s.mop_forbidden_enable == 1 assert s.camera_status == 3457 assert s.is_exploring == 0 assert s.home_sec_status == 0 assert s.home_sec_enable_password == 0 assert s.adbumper_status == [0, 0, 0] assert s.water_shortage_status == 0 assert s.dock_type == RoborockDockTypeCode.empty_wash_fill_dock assert s.dust_collection_status == 0 assert s.auto_dust_collection == 1 assert s.avoid_count == 19 assert s.mop_mode == 300 assert s.debug_mode == 0 assert s.collision_avoid_status == 1 assert s.switch_map_mode == 0 assert s.dock_error_status == RoborockDockErrorCode.ok assert s.charge_status == 1 assert s.unsave_map_reason == 0 assert s.unsave_map_flag == 0 assert s.fan_power == RoborockFanSpeedS7MaxV.balanced assert s.mop_mode == RoborockMopModeS7.standard assert s.water_box_mode == RoborockMopIntensityS7.intense def test_current_map() -> None: """Test the current map logic based on map status.""" s = S7MaxVStatus.from_dict(STATUS) assert s.map_status == 3 assert s.current_map == 0 s.map_status = 7 assert s.current_map == 1 s.map_status = 11 assert s.current_map == 2 s.map_status = None assert not s.current_map def test_dnd_timer(): dnd = DnDTimer.from_dict(DND_TIMER) assert dnd.start_hour == 22 assert dnd.start_minute == 0 assert dnd.end_hour == 7 assert dnd.end_minute == 0 assert dnd.enabled == 1 def test_clean_summary(): cs = CleanSummary.from_dict(CLEAN_SUMMARY) assert cs.clean_time == 74382 assert cs.clean_area == 1159182500 assert cs.square_meter_clean_area == 1159.2 assert cs.clean_count == 31 assert cs.dust_collection_count == 25 assert cs.records assert len(cs.records) == 2 assert cs.records[1] == 1672458041 def test_clean_record(): cr = CleanRecord.from_dict(CLEAN_RECORD) assert cr.begin == 1672543330 assert cr.end == 1672544638 assert cr.duration == 1176 assert cr.area == 20965000 assert cr.square_meter_area == 21.0 assert cr.error == 0 assert cr.complete == 1 assert cr.start_type == 2 assert cr.clean_type == 3 assert cr.finish_reason == 56 assert cr.dust_collection_status == 1 assert cr.avoid_count == 19 assert cr.wash_count == 2 assert cr.map_flag == 0 def test_no_value(): modified_status = STATUS.copy() modified_status["dock_type"] = 9999 s = S7MaxVStatus.from_dict(modified_status) assert s.dock_type == RoborockDockTypeCode.unknown assert -9999 not in RoborockDockTypeCode.keys() assert "missing" not in RoborockDockTypeCode.values() def test_b01props_deserialization(): """Test that B01Props can be deserialized after its module is dynamically imported.""" B01_PROPS_MOCK_DATA = { "status": 6, "fault": 510, "wind": 3, "water": 2, "mode": 1, "quantity": 1, "alarm": 0, "volume": 60, "hypa": 90, "mainBrush": 80, "sideBrush": 70, "mopLife": 60, "mainSensor": 50, "netStatus": { "rssi": "-60", "loss": 1, "ping": 20, "ip": "192.168.1.102", "mac": "BB:CC:DD:EE:FF:00", "ssid": "MyOtherWiFi", "frequency": 2.4, "bssid": "00:FF:EE:DD:CC:BB", }, "repeatState": 1, "tankState": 0, "sweepType": 0, "cleanPathPreference": 1, "clothState": 1, "timeZone": -5, "timeZoneInfo": "America/New_York", "language": 2, "cleaningTime": 1500, "realCleanTime": 1400, "cleaningArea": 600000, "customType": 1, "sound": 0, "workMode": 3, "stationAct": 1, "chargeState": 0, "currentMapId": 2, "mapNum": 3, "dustAction": 0, "quietIsOpen": 1, "quietBeginTime": 23, "quietEndTime": 7, "cleanFinish": 0, "voiceType": 2, "voiceTypeVersion": 1, "orderTotal": {"total": 12, "enable": 0}, "buildMap": 0, "privacy": { "aiRecognize": 1, "dirtRecognize": 1, "petRecognize": 1, "carpetTurbo": 1, "carpetAvoid": 1, "carpetShow": 1, "mapUploads": 1, "aiAgent": 1, "aiAvoidance": 1, "recordUploads": 1, "alongFloor": 1, "autoUpgrade": 1, }, "dustAutoState": 0, "dustFrequency": 1, "childLock": 1, "multiFloor": 0, "mapSave": 0, "lightMode": 0, "greenLaser": 0, "dustBagUsed": 1, "orderSaveMode": 0, "manufacturer": "Roborock-Test", "backToWash": 0, "chargeStationType": 2, "pvCutCharge": 1, "pvCharging": {"status": 1, "beginTime": 10, "endTime": 18}, "serialNumber": "987654321", "recommend": {"sill": 0, "wall": 0, "roomId": [4, 5, 6]}, "addSweepStatus": 1, } deserialized = B01Props.from_dict(B01_PROPS_MOCK_DATA) assert isinstance(deserialized, B01Props) assert deserialized.fault == B01Fault.F_510 assert deserialized.status == WorkStatusMapping.SWEEP_MOPING_2 assert deserialized.wind == SCWindMapping.SUPER_STRONG assert deserialized.net_status.ip == "192.168.1.102" def test_multi_maps_list_info(snapshot: SnapshotAssertion) -> None: """Test that MultiMapsListInfo can be deserialized correctly.""" data = { "max_multi_map": 4, "max_bak_map": 1, "multi_map_count": 2, "map_info": [ { "mapFlag": 0, "add_time": 1757636125, "length": 10, "name": "Downstairs", "bak_maps": [{"mapFlag": 4, "add_time": 1739205442}], "rooms": [ {"id": 16, "tag": 12, "iot_name_id": "6990322", "iot_name": "Room"}, {"id": 17, "tag": 15, "iot_name_id": "7140977", "iot_name": "Room"}, {"id": 18, "tag": 12, "iot_name_id": "6985623", "iot_name": "Room"}, {"id": 19, "tag": 14, "iot_name_id": "6990378", "iot_name": "Room"}, {"id": 20, "tag": 10, "iot_name_id": "7063728", "iot_name": "Room"}, {"id": 22, "tag": 12, "iot_name_id": "6995506", "iot_name": "Room"}, {"id": 23, "tag": 15, "iot_name_id": "7140979", "iot_name": "Room"}, {"id": 25, "tag": 13, "iot_name_id": "6990383", "iot_name": "Room"}, {"id": 24, "tag": -1, "iot_name_id": "-1", "iot_name": "Room"}, ], "furnitures": [ {"id": 1, "type": 46, "subtype": 2}, {"id": 2, "type": 47, "subtype": 0}, {"id": 3, "type": 56, "subtype": 0}, {"id": 4, "type": 43, "subtype": 0}, {"id": 5, "type": 44, "subtype": 0}, {"id": 6, "type": 44, "subtype": 0}, {"id": 7, "type": 44, "subtype": 0}, {"id": 8, "type": 46, "subtype": 0}, {"id": 9, "type": 46, "subtype": 0}, ], }, { "mapFlag": 1, "add_time": 1734283706, "length": 5, "name": "Foyer", "bak_maps": [{"mapFlag": 5, "add_time": 1728184107}], "rooms": [], "furnitures": [], }, ], } deserialized = MultiMapsList.from_dict(data) assert isinstance(deserialized, MultiMapsList) assert deserialized == snapshot def test_accurate_map_flag() -> None: """Test that we parse the map flag accurately.""" s = S7MaxVStatus.from_dict(STATUS) assert s.current_map == 0 s = S7MaxVStatus.from_dict( { **STATUS, "map_status": 252, # Code for no map } ) assert s.current_map is None Python-roborock-python-roborock-32df4f3/tests/test_local_api_v1.py000066400000000000000000000072561507503702500255050ustar00rootroot00000000000000"""Tests for the Roborock Local Client V1.""" import json from collections.abc import AsyncGenerator from queue import Queue from typing import Any from unittest.mock import patch import pytest from roborock.containers import RoomMapping from roborock.exceptions import RoborockException from roborock.protocol import MessageParser from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from roborock.version_1_apis import RoborockLocalClientV1 from .mock_data import LOCAL_KEY def build_rpc_response(seq: int, message: dict[str, Any]) -> bytes: """Build an encoded RPC response message.""" return build_raw_response( protocol=RoborockMessageProtocol.GENERAL_REQUEST, seq=seq, payload=json.dumps( { "dps": {102: json.dumps(message)}, } ).encode(), ) def build_raw_response(protocol: RoborockMessageProtocol, seq: int, payload: bytes) -> bytes: """Build an encoded RPC response message.""" message = RoborockMessage( protocol=protocol, random=23, seq=seq, payload=payload, ) return MessageParser.build(message, local_key=LOCAL_KEY) async def test_async_connect( local_client: RoborockLocalClientV1, received_requests: Queue, response_queue: Queue, ): """Test that we can connect to the Roborock device.""" response_queue.put(build_raw_response(RoborockMessageProtocol.HELLO_RESPONSE, 1, b"ignored")) response_queue.put(build_raw_response(RoborockMessageProtocol.PING_RESPONSE, 2, b"ignored")) await local_client.async_connect() assert local_client.is_connected() assert received_requests.qsize() == 2 await local_client.async_disconnect() assert not local_client.is_connected() @pytest.fixture(name="connected_local_client") async def connected_local_client_fixture( response_queue: Queue, local_client: RoborockLocalClientV1, ) -> AsyncGenerator[RoborockLocalClientV1, None]: response_queue.put(build_raw_response(RoborockMessageProtocol.HELLO_RESPONSE, 1, b"ignored")) response_queue.put(build_raw_response(RoborockMessageProtocol.PING_RESPONSE, 2, b"ignored")) await local_client.async_connect() yield local_client async def test_get_room_mapping( received_requests: Queue, response_queue: Queue, connected_local_client: RoborockLocalClientV1, ) -> None: """Test sending an arbitrary MQTT message and parsing the response.""" test_request_id = 5050 message = build_rpc_response( seq=test_request_id, message={ "id": test_request_id, "result": [[16, "2362048"], [17, "2362044"]], }, ) response_queue.put(message) with patch("roborock.protocols.v1_protocol.get_next_int", return_value=test_request_id): room_mapping = await connected_local_client.get_room_mapping() assert room_mapping == [ RoomMapping(segment_id=16, iot_id="2362048"), RoomMapping(segment_id=17, iot_id="2362044"), ] async def test_retry_request( received_requests: Queue, response_queue: Queue, connected_local_client: RoborockLocalClientV1, ) -> None: """Test sending an arbitrary MQTT message and parsing the response.""" test_request_id = 5050 retry_message = build_rpc_response( seq=test_request_id, message={ "id": test_request_id, "result": "retry", }, ) response_queue.put(retry_message) with ( patch("roborock.protocols.v1_protocol.get_next_int", return_value=test_request_id), pytest.raises(RoborockException, match="Device is busy, try again later"), ): await connected_local_client.get_room_mapping() Python-roborock-python-roborock-32df4f3/tests/test_queue.py000066400000000000000000000012551507503702500242710ustar00rootroot00000000000000import asyncio import pytest from roborock.exceptions import VacuumError from roborock.roborock_future import RoborockFuture async def test_can_create(): RoborockFuture(1) @pytest.mark.asyncio async def test_set_result(): rq = RoborockFuture(1) rq.set_result("test") assert await rq.async_get(1) == "test" @pytest.mark.asyncio async def test_set_exception(): rq = RoborockFuture(1) rq.set_exception(VacuumError("test")) with pytest.raises(VacuumError): assert await rq.async_get(1) @pytest.mark.asyncio async def test_get_timeout(): rq = RoborockFuture(1) with pytest.raises(asyncio.TimeoutError): await rq.async_get(0.01) Python-roborock-python-roborock-32df4f3/tests/test_roborock_message.py000066400000000000000000000017131507503702500264700ustar00rootroot00000000000000import json from freezegun import freeze_time from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol def test_roborock_message() -> None: """Test the RoborockMessage class is initialized.""" with freeze_time("2025-01-20T12:00:00"): message1 = RoborockMessage( protocol=RoborockMessageProtocol.RPC_REQUEST, payload=json.dumps({"dps": {"101": json.dumps({"id": 4321})}}).encode(), ) with freeze_time("2025-01-20T11:00:00"): # Back in time 1hr to test timestamp message2 = RoborockMessage( protocol=RoborockMessageProtocol.RPC_RESPONSE, payload=json.dumps({"dps": {"94": json.dumps({"id": 444}), "102": json.dumps({"id": 333})}}).encode(), ) # Ensure the sequence, random numbers, etc are initialized properly assert message1.seq != message2.seq assert message1.random != message2.random assert message1.timestamp > message2.timestamp Python-roborock-python-roborock-32df4f3/tests/test_supported_features.py000066400000000000000000000034651507503702500270750ustar00rootroot00000000000000from roborock import SHORT_MODEL_TO_ENUM from roborock.device_features import DeviceFeatures def test_supported_features_qrevo_maxv(): """Ensure that a QREVO MaxV has some more complicated features enabled.""" model = "roborock.vacuum.a87" product_nickname = SHORT_MODEL_TO_ENUM.get(model.split(".")[-1]) device_features = DeviceFeatures.from_feature_flags( new_feature_info=4499197267967999, new_feature_info_str="508A977F7EFEFFFF", feature_info=[111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125], product_nickname=product_nickname, ) assert device_features print("\n".join(device_features.get_supported_features())) num_true = sum(vars(device_features).values()) print(num_true) assert num_true != 0 assert device_features.is_dust_collection_setting_supported assert device_features.is_led_status_switch_supported assert not device_features.is_matter_supported print(device_features) def test_supported_features_s7(): """Ensure that a S7 has some more basic features enabled.""" model = "roborock.vacuum.a15" product_nickname = SHORT_MODEL_TO_ENUM.get(model.split(".")[-1]) device_features = DeviceFeatures.from_feature_flags( new_feature_info=636084721975295, new_feature_info_str="0000000000002000", feature_info=[111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 122, 123, 124, 125], product_nickname=product_nickname, ) num_true = sum(vars(device_features).values()) assert num_true != 0 assert device_features assert device_features.is_custom_mode_supported assert device_features.is_led_status_switch_supported assert not device_features.is_hot_wash_towel_supported num_true = sum(vars(device_features).values()) assert num_true != 0 Python-roborock-python-roborock-32df4f3/tests/test_util.py000066400000000000000000000041251507503702500241210ustar00rootroot00000000000000import datetime import pytest from roborock.util import parse_time_to_datetime @pytest.mark.skip def validate(start: datetime.datetime, end: datetime.datetime) -> bool: duration = end - start return duration > datetime.timedelta() # start_date < now < end_date def test_start_date_lower_than_now_lower_than_end_date(): start, end = parse_time_to_datetime( (datetime.datetime.now() - datetime.timedelta(hours=2)).time(), (datetime.datetime.now() - datetime.timedelta(hours=1)).time(), ) assert validate(start, end) # start_date > now > end_date def test_start_date_greater_than_now_greater_than_end_date(): start, end = parse_time_to_datetime( (datetime.datetime.now() + datetime.timedelta(hours=1)).time(), (datetime.datetime.now() + datetime.timedelta(hours=2)).time(), ) assert validate(start, end) # start_date < now > end_date def test_start_date_lower_than_now_greater_than_end_date(): start, end = parse_time_to_datetime( (datetime.datetime.now() - datetime.timedelta(hours=1)).time(), (datetime.datetime.now() + datetime.timedelta(hours=1)).time(), ) assert validate(start, end) # start_date > now < end_date def test_start_date_greater_than_now_lower_than_end_date(): start, end = parse_time_to_datetime( (datetime.datetime.now() + datetime.timedelta(hours=1)).time(), (datetime.datetime.now() - datetime.timedelta(hours=1)).time(), ) assert validate(start, end) # start_date < end_date < now def test_start_date_lower_than_end_date_lower_than_now(): start, end = parse_time_to_datetime( (datetime.datetime.now() - datetime.timedelta(hours=2)).time(), (datetime.datetime.now() - datetime.timedelta(hours=1)).time(), ) assert validate(start, end) # start_date > end_date > now def test_start_date_greater_than_end_date_greater_than_now(): start, end = parse_time_to_datetime( (datetime.datetime.now() + datetime.timedelta(hours=2)).time(), (datetime.datetime.now() + datetime.timedelta(hours=1)).time(), ) assert validate(start, end) Python-roborock-python-roborock-32df4f3/tests/test_web_api.py000066400000000000000000000154541507503702500245610ustar00rootroot00000000000000import re import aiohttp from aioresponses.compat import normalize_url from roborock import HomeData, HomeDataScene, UserData from roborock.web_api import IotLoginInfo, RoborockApiClient from tests.mock_data import HOME_DATA_RAW, USER_DATA async def test_pass_login_flow() -> None: """Test that we can login with a password and we get back the correct userdata object.""" my_session = aiohttp.ClientSession() api = RoborockApiClient(username="test_user@gmail.com", session=my_session) ud = await api.pass_login("password") assert ud == UserData.from_dict(USER_DATA) assert not my_session.closed async def test_code_login_flow() -> None: """Test that we can login with a code and we get back the correct userdata object.""" api = RoborockApiClient(username="test_user@gmail.com") await api.request_code() ud = await api.code_login(4123) assert ud == UserData.from_dict(USER_DATA) async def test_get_home_data_v2(): """Test a full standard flow where we get the home data to end it off. This matches what HA does""" api = RoborockApiClient(username="test_user@gmail.com") await api.request_code() ud = await api.code_login(4123) hd = await api.get_home_data_v2(ud) assert hd == HomeData.from_dict(HOME_DATA_RAW) async def test_nc_prepare(): """Test adding a device and that nothing breaks""" api = RoborockApiClient(username="test_user@gmail.com") await api.request_code() ud = await api.code_login(4123) prepare = await api.nc_prepare(ud, "America/New_York") new_device = await api.add_device(ud, prepare["s"], prepare["t"]) assert new_device["duid"] == "rand_duid" async def test_get_scenes(): """Test that we can get scenes""" api = RoborockApiClient(username="test_user@gmail.com") ud = await api.pass_login("password") sc = await api.get_scenes(ud, "123456") assert sc == [ HomeDataScene.from_dict( { "id": 1234567, "name": "My plan", } ) ] async def test_execute_scene(mock_rest): """Test that we can execute a scene""" api = RoborockApiClient(username="test_user@gmail.com") ud = await api.pass_login("password") await api.execute_scene(ud, 123456) mock_rest.assert_any_call("https://api-us.roborock.com/user/scene/123456/execute", "post") async def test_code_login_v4_flow(mock_rest) -> None: """Test that we can login with a code and we get back the correct userdata object.""" api = RoborockApiClient(username="test_user@gmail.com") await api.request_code_v4() ud = await api.code_login_v4(4123, "US", 1) assert ud == UserData.from_dict(USER_DATA) async def test_url_cycling(mock_rest) -> None: """Test that we cycle through the URLs correctly.""" # Clear mock rest so that we can override the patches. mock_rest.clear() # 1. Mock US URL to return valid status but None for countrycode mock_rest.post( re.compile("https://usiot.roborock.com/api/v1/getUrlByEmail.*"), status=200, payload={ "code": 200, "data": {"url": "https://usiot.roborock.com", "country": None, "countrycode": None}, "msg": "Success", }, ) # 2. Mock EU URL to return valid status but None for countrycode mock_rest.post( re.compile("https://euiot.roborock.com/api/v1/getUrlByEmail.*"), status=200, payload={ "code": 200, "data": {"url": "https://euiot.roborock.com", "country": None, "countrycode": None}, "msg": "Success", }, ) # 3. Mock CN URL to return the correct, valid data mock_rest.post( re.compile("https://cniot.roborock.com/api/v1/getUrlByEmail.*"), status=200, payload={ "code": 200, "data": {"url": "https://cniot.roborock.com", "country": "CN", "countrycode": "86"}, "msg": "Success", }, ) # The RU URL should not be called, but we can mock it just in case # to catch unexpected behavior. mock_rest.post(re.compile("https://ruiot.roborock.com/api/v1/getUrlByEmail.*"), status=500) client = RoborockApiClient("test@example.com") result = await client._get_iot_login_info() assert result is not None assert isinstance(result, IotLoginInfo) assert result.base_url == "https://cniot.roborock.com" assert result.country == "CN" assert result.country_code == "86" assert client._iot_login_info == result # Check that all three urls were called. We have to do this kind of weirdly as aioresponses seems to have a bug. assert ( len( mock_rest.requests[ ( "post", normalize_url( "https://usiot.roborock.com/api/v1/getUrlByEmail?email=test%2540example.com&needtwostepauth=false" ), ) ] ) == 1 ) assert ( len( mock_rest.requests[ ( "post", normalize_url( "https://euiot.roborock.com/api/v1/getUrlByEmail?email=test%2540example.com&needtwostepauth=false" ), ) ] ) == 1 ) assert ( len( mock_rest.requests[ ( "post", normalize_url( "https://cniot.roborock.com/api/v1/getUrlByEmail?email=test%2540example.com&needtwostepauth=false" ), ) ] ) == 1 ) # Make sure we just have the three we tested for above. assert len(mock_rest.requests) == 3 async def test_missing_country_login(mock_rest) -> None: """Test that we cycle through the URLs correctly.""" mock_rest.clear() # Make country None, but country code set. mock_rest.post( re.compile("https://usiot.roborock.com/api/v1/getUrlByEmail.*"), status=200, payload={ "code": 200, "data": {"url": "https://usiot.roborock.com", "country": None, "countrycode": 1}, "msg": "Success", }, ) # v4 is not mocked, so it would fail it were called. mock_rest.post( re.compile(r"https://.*iot\.roborock\.com/api/v1/loginWithCode.*"), status=200, payload={"code": 200, "data": USER_DATA, "msg": "success"}, ) mock_rest.post( re.compile(r"https://.*iot\.roborock\.com/api/v1/sendEmailCode.*"), status=200, payload={"code": 200, "data": None, "msg": "success"}, ) client = RoborockApiClient("test@example.com") await client.request_code_v4() ud = await client.code_login_v4(4123) assert ud is not None # Ensure we have no surprise REST calls. assert len(mock_rest.requests) == 3 Python-roborock-python-roborock-32df4f3/uv.lock000066400000000000000000006731161507503702500217110ustar00rootroot00000000000000version = 1 requires-python = ">=3.11, <4" [[distribution]] name = "aiohappyeyeballs" version = "2.6.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } wheels = [ { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, ] [[distribution]] name = "aiohttp" version = "3.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, { name = "aiosignal" }, { name = "attrs" }, { name = "frozenlist" }, { name = "multidict" }, { name = "propcache" }, { name = "yarl" }, ] sdist = { url = "https://files.pythonhosted.org/packages/62/f1/8515650ac3121a9e55c7b217c60e7fae3e0134b5acfe65691781b5356929/aiohttp-3.13.0.tar.gz", hash = "sha256:378dbc57dd8cf341ce243f13fa1fa5394d68e2e02c15cd5f28eae35a70ec7f67", size = 7832348 } wheels = [ { url = "https://files.pythonhosted.org/packages/b1/db/df80cacac46cd548a736c5535b13cc18925cf6f9f83cd128cf3839842219/aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99eb94e97a42367fef5fc11e28cb2362809d3e70837f6e60557816c7106e2e20", size = 741374 }, { url = "https://files.pythonhosted.org/packages/ae/f9/2d6d93fd57ab4726e18a7cdab083772eda8302d682620fbf2aef48322351/aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4696665b2713021c6eba3e2b882a86013763b442577fe5d2056a42111e732eca", size = 494956 }, { url = "https://files.pythonhosted.org/packages/89/a6/e1c061b079fed04ffd6777950c82f2e8246fd08b7b3c4f56fdd47f697e5a/aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3e6a38366f7f0d0f6ed7a1198055150c52fda552b107dad4785c0852ad7685d1", size = 491154 }, { url = "https://files.pythonhosted.org/packages/fe/4d/ee8913c0d2c7da37fdc98673a342b51611eaa0871682b37b8430084e35b5/aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aab715b1a0c37f7f11f9f1f579c6fbaa51ef569e47e3c0a4644fba46077a9409", size = 1745707 }, { url = "https://files.pythonhosted.org/packages/f9/70/26b2c97e8fa68644aec43d788940984c5f3b53a8d1468d5baaa328f809c9/aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7972c82bed87d7bd8e374b60a6b6e816d75ba4f7c2627c2d14eed216e62738e1", size = 1702404 }, { url = "https://files.pythonhosted.org/packages/65/1e/c8aa3c293a0e8b18968b1b88e9bd8fb269eb67eb7449f504a4c3e175b159/aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca8313cb852af788c78d5afdea24c40172cbfff8b35e58b407467732fde20390", size = 1805519 }, { url = "https://files.pythonhosted.org/packages/51/b6/a3753fe86249eb441768658cfc00f8c4e0913b255c13be00ddb8192775e1/aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c333a2385d2a6298265f4b3e960590f787311b87f6b5e6e21bb8375914ef504", size = 1893904 }, { url = "https://files.pythonhosted.org/packages/51/6d/7b1e020fe1d2a2be7cf0ce5e35922f345e3507cf337faa1a6563c42065c1/aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc6d5fc5edbfb8041d9607f6a417997fa4d02de78284d386bea7ab767b5ea4f3", size = 1745043 }, { url = "https://files.pythonhosted.org/packages/e6/df/aad5dce268f9d4f29759c3eeb5fb5995c569d76abb267468dc1075218d5b/aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ddedba3d0043349edc79df3dc2da49c72b06d59a45a42c1c8d987e6b8d175b8", size = 1604765 }, { url = "https://files.pythonhosted.org/packages/1c/19/a84a0e97b2da2224c8b85e1aef5cac834d07b2903c17bff1a6bdbc7041d2/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23ca762140159417a6bbc959ca1927f6949711851e56f2181ddfe8d63512b5ad", size = 1721737 }, { url = "https://files.pythonhosted.org/packages/6c/61/ca6ad390128d964a08554fd63d6df5810fb5fbc7e599cb9e617f1729ae19/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfe824d6707a5dc3c5676685f624bc0c63c40d79dc0239a7fd6c034b98c25ebe", size = 1716052 }, { url = "https://files.pythonhosted.org/packages/2a/71/769e249e6625372c7d14be79b8b8c3b0592963a09793fb3d36758e60952c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3c11fa5dd2ef773a8a5a6daa40243d83b450915992eab021789498dc87acc114", size = 1783532 }, { url = "https://files.pythonhosted.org/packages/66/64/b9cd03cdbb629bc492e4a744fbe96550a8340b0cd7a0cc4a9c90cfecd8d3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00fdfe370cffede3163ba9d3f190b32c0cfc8c774f6f67395683d7b0e48cdb8a", size = 1593072 }, { url = "https://files.pythonhosted.org/packages/24/0e/87922c8cfdbd09f5e2197e9d87714a98c99c423560d44739e3af55400fe3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6475e42ef92717a678bfbf50885a682bb360a6f9c8819fb1a388d98198fdcb80", size = 1798613 }, { url = "https://files.pythonhosted.org/packages/c5/bb/a3adfe2af76e1ee9e3b5464522004b148b266bc99d7ec424ca7843d64a3c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:77da5305a410910218b99f2a963092f4277d8a9c1f429c1ff1b026d1826bd0b6", size = 1737480 }, { url = "https://files.pythonhosted.org/packages/ad/53/e124dcbd64e6365602f3493fe37a11ca5b7ac0a40822a6e2bc8260cd08e0/aiohttp-3.13.0-cp311-cp311-win32.whl", hash = "sha256:2f9d9ea547618d907f2ee6670c9a951f059c5994e4b6de8dcf7d9747b420c820", size = 429824 }, { url = "https://files.pythonhosted.org/packages/3e/bd/485d98b372a2cd6998484a93ddd401ec6b6031657661c36846a10e2a1f6e/aiohttp-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f19f7798996d4458c669bd770504f710014926e9970f4729cf55853ae200469", size = 454137 }, { url = "https://files.pythonhosted.org/packages/3a/95/7e8bdfa6e79099a086d59d42589492f1fe9d29aae3cefb58b676015ce278/aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1c272a9a18a5ecc48a7101882230046b83023bb2a662050ecb9bfcb28d9ab53a", size = 735585 }, { url = "https://files.pythonhosted.org/packages/9f/20/2f1d3ee06ee94eafe516810705219bff234d09f135d6951661661d5595ae/aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:97891a23d7fd4e1afe9c2f4473e04595e4acb18e4733b910b6577b74e7e21985", size = 490613 }, { url = "https://files.pythonhosted.org/packages/74/15/ab8600ef6dc1dcd599009a81acfed2ea407037e654d32e47e344e0b08c34/aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:475bd56492ce5f4cffe32b5533c6533ee0c406d1d0e6924879f83adcf51da0ae", size = 489750 }, { url = "https://files.pythonhosted.org/packages/33/59/752640c2b86ca987fe5703a01733b00d375e6cd2392bc7574489934e64e5/aiohttp-3.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c32ada0abb4bc94c30be2b681c42f058ab104d048da6f0148280a51ce98add8c", size = 1736812 }, { url = "https://files.pythonhosted.org/packages/3d/c6/dd6b86ddb852a7fdbcdc7a45b6bdc80178aef713c08279afcaee7a5a9f07/aiohttp-3.13.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4af1f8877ca46ecdd0bc0d4a6b66d4b2bddc84a79e2e8366bc0d5308e76bceb8", size = 1698535 }, { url = "https://files.pythonhosted.org/packages/33/e2/27c92d205b9e8cee7661670e8e9f187931b71e26d42796b153d2a0ba6949/aiohttp-3.13.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e04ab827ec4f775817736b20cdc8350f40327f9b598dec4e18c9ffdcbea88a93", size = 1766573 }, { url = "https://files.pythonhosted.org/packages/df/6a/1fc1ad71d130a30f7a207d8d958a41224c29b834463b5185efb2dbff6ad4/aiohttp-3.13.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a6d9487b9471ec36b0faedf52228cd732e89be0a2bbd649af890b5e2ce422353", size = 1865229 }, { url = "https://files.pythonhosted.org/packages/14/51/d0c1701a79fcb0109cff5304da16226581569b89a282d8e7f1549a7e3ec0/aiohttp-3.13.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e66c57416352f36bf98f6641ddadd47c93740a22af7150d3e9a1ef6e983f9a8", size = 1750379 }, { url = "https://files.pythonhosted.org/packages/ae/3d/2ec4b934f85856de1c0c18e90adc8902adadbfac2b3c0b831bfeb7214fc8/aiohttp-3.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:469167d5372f5bb3aedff4fc53035d593884fff2617a75317740e885acd48b04", size = 1560798 }, { url = "https://files.pythonhosted.org/packages/38/56/e23d9c3e13006e599fdce3851517c70279e177871e3e567d22cf3baf5d6c/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a9f3546b503975a69b547c9fd1582cad10ede1ce6f3e313a2f547c73a3d7814f", size = 1697552 }, { url = "https://files.pythonhosted.org/packages/56/cb/caa32c2ccaeca0a3dc39129079fd2ad02f9406c3a5f7924340435b87d4cd/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6b4174fcec98601f0cfdf308ee29a6ae53c55f14359e848dab4e94009112ee7d", size = 1718609 }, { url = "https://files.pythonhosted.org/packages/fb/c0/5911856fef9e40fd1ccbb8c54a90116875d5753a92c1cac66ce2059b390d/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a533873a7a4ec2270fb362ee5a0d3b98752e4e1dc9042b257cd54545a96bd8ed", size = 1735887 }, { url = "https://files.pythonhosted.org/packages/0e/48/8d6f4757a24c02f0a454c043556593a00645d10583859f7156db44d8b7d3/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ce887c5e54411d607ee0959cac15bb31d506d86a9bcaddf0b7e9d63325a7a802", size = 1553079 }, { url = "https://files.pythonhosted.org/packages/39/fa/e82c9445e40b50e46770702b5b6ca2f767966d53e1a5eef03583ceac6df6/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d871f6a30d43e32fc9252dc7b9febe1a042b3ff3908aa83868d7cf7c9579a59b", size = 1762750 }, { url = "https://files.pythonhosted.org/packages/3d/e6/9d30554e7f1e700bfeae4ab6b153d5dc7441606a9ec5e929288fa93a1477/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:222c828243b4789d79a706a876910f656fad4381661691220ba57b2ab4547865", size = 1717461 }, { url = "https://files.pythonhosted.org/packages/1f/e5/29cca547990a59ea54f0674fc01de98519fc628cfceeab6175711750eca7/aiohttp-3.13.0-cp312-cp312-win32.whl", hash = "sha256:682d2e434ff2f1108314ff7f056ce44e457f12dbed0249b24e106e385cf154b9", size = 424633 }, { url = "https://files.pythonhosted.org/packages/8b/68/46dd042d7bc62eab30bafdb8569f55ef125c3a88bb174270324224f8df56/aiohttp-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a2be20eb23888df130214b91c262a90e2de1553d6fb7de9e9010cec994c0ff2", size = 451401 }, { url = "https://files.pythonhosted.org/packages/86/2c/ac53efdc9c10e41399acc2395af98f835b86d0141d5c3820857eb9f6a14a/aiohttp-3.13.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:00243e51f16f6ec0fb021659d4af92f675f3cf9f9b39efd142aa3ad641d8d1e6", size = 730090 }, { url = "https://files.pythonhosted.org/packages/13/18/1ac95683e1c1d48ef4503965c96f5401618a04c139edae12e200392daae8/aiohttp-3.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059978d2fddc462e9211362cbc8446747ecd930537fa559d3d25c256f032ff54", size = 488041 }, { url = "https://files.pythonhosted.org/packages/fd/79/ef0d477c771a642d1a881b92d226314c43d3c74bc674c93e12e679397a97/aiohttp-3.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:564b36512a7da3b386143c611867e3f7cfb249300a1bf60889bd9985da67ab77", size = 486989 }, { url = "https://files.pythonhosted.org/packages/37/b4/0e440481a0e77a551d6c5dcab5d11f1ff6b2b2ddb8dedc24f54f5caad732/aiohttp-3.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4aa995b9156ae499393d949a456a7ab0b994a8241a96db73a3b73c7a090eff6a", size = 1718331 }, { url = "https://files.pythonhosted.org/packages/e6/59/76c421cc4a75bb1aceadb92f20ee6f05a990aa6960c64b59e8e0d340e3f5/aiohttp-3.13.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55ca0e95a3905f62f00900255ed807c580775174252999286f283e646d675a49", size = 1686263 }, { url = "https://files.pythonhosted.org/packages/ec/ac/5095f12a79c7775f402cfc3e83651b6e0a92ade10ddf7f2c78c4fed79f71/aiohttp-3.13.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:49ce7525853a981fc35d380aa2353536a01a9ec1b30979ea4e35966316cace7e", size = 1754265 }, { url = "https://files.pythonhosted.org/packages/05/d7/a48e4989bd76cc70600c505bbdd0d90ca1ad7f9053eceeb9dbcf9345a9ec/aiohttp-3.13.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2117be9883501eaf95503bd313eb4c7a23d567edd44014ba15835a1e9ec6d852", size = 1856486 }, { url = "https://files.pythonhosted.org/packages/1e/02/45b388b49e37933f316e1fb39c0de6fb1d77384b0c8f4cf6af5f2cbe3ea6/aiohttp-3.13.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d169c47e40c911f728439da853b6fd06da83761012e6e76f11cb62cddae7282b", size = 1737545 }, { url = "https://files.pythonhosted.org/packages/6c/a7/4fde058f1605c34a219348a83a99f14724cc64e68a42480fc03cf40f9ea3/aiohttp-3.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:703ad3f742fc81e543638a7bebddd35acadaa0004a5e00535e795f4b6f2c25ca", size = 1552958 }, { url = "https://files.pythonhosted.org/packages/d1/12/0bac4d29231981e3aa234e88d1931f6ba38135ff4c2cf3afbb7895527630/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5bf635c3476f4119b940cc8d94ad454cbe0c377e61b4527f0192aabeac1e9370", size = 1681166 }, { url = "https://files.pythonhosted.org/packages/71/95/b829eb5f8ac1ca1d8085bb8df614c8acf3ff32e23ad5ad1173c7c9761daa/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:cfe6285ef99e7ee51cef20609be2bc1dd0e8446462b71c9db8bb296ba632810a", size = 1710516 }, { url = "https://files.pythonhosted.org/packages/47/6d/15ccf4ef3c254d899f62580e0c7fc717014f4d14a3ac31771e505d2c736c/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8af6391c5f2e69749d7f037b614b8c5c42093c251f336bdbfa4b03c57d6c4", size = 1731354 }, { url = "https://files.pythonhosted.org/packages/46/6a/8acf6c57e03b6fdcc8b4c06392e66abaff3213ea275e41db3edb20738d91/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:12f5d820fadc5848d4559ea838aef733cf37ed2a1103bba148ac2f5547c14c29", size = 1548040 }, { url = "https://files.pythonhosted.org/packages/75/7d/fbfd59ab2a83fe2578ce79ac3db49727b81e9f4c3376217ad09c03c6d279/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f1338b61ea66f4757a0544ed8a02ccbf60e38d9cfb3225888888dd4475ebb96", size = 1756031 }, { url = "https://files.pythonhosted.org/packages/99/e7/cc9f0fdf06cab3ca61e6b62bff9a4b978b8ca736e9d76ddf54365673ab19/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:582770f82513419512da096e8df21ca44f86a2e56e25dc93c5ab4df0fe065bf0", size = 1714933 }, { url = "https://files.pythonhosted.org/packages/db/43/7abbe1de94748a58a71881163ee280fd3217db36e8344d109f63638fe16a/aiohttp-3.13.0-cp313-cp313-win32.whl", hash = "sha256:3194b8cab8dbc882f37c13ef1262e0a3d62064fa97533d3aa124771f7bf1ecee", size = 423799 }, { url = "https://files.pythonhosted.org/packages/c9/58/afab7f2b9e7df88c995995172eb78cae8a3d5a62d5681abaade86b3f0089/aiohttp-3.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:7897298b3eedc790257fef8a6ec582ca04e9dbe568ba4a9a890913b925b8ea21", size = 450138 }, { url = "https://files.pythonhosted.org/packages/fe/c1/93bb1e35cd0c4665bb422b1ca3d87b588f4bca2656bbe9292b963d5b76a9/aiohttp-3.13.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c417f8c2e1137775569297c584a8a7144e5d1237789eae56af4faf1894a0b861", size = 733187 }, { url = "https://files.pythonhosted.org/packages/5e/36/2d50eba91992d3fe7a6452506ccdab45d03685ee8d8acaa5b289384a7d4c/aiohttp-3.13.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f84b53326abf8e56ebc28a35cebf4a0f396a13a76300f500ab11fe0573bf0b52", size = 488684 }, { url = "https://files.pythonhosted.org/packages/82/93/fa4b1d5ecdc7805bdf0815ef00257db4632ccf0a8bffd44f9fc4657b1677/aiohttp-3.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:990a53b9d6a30b2878789e490758e568b12b4a7fb2527d0c89deb9650b0e5813", size = 489255 }, { url = "https://files.pythonhosted.org/packages/05/0f/85241f0d158da5e24e8ac9d50c0849ed24f882cafc53dc95749ef85eef09/aiohttp-3.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c811612711e01b901e18964b3e5dec0d35525150f5f3f85d0aee2935f059910a", size = 1715914 }, { url = "https://files.pythonhosted.org/packages/ab/fc/c755590d6f6d2b5d1565c72d6ee658d3c30ec61acb18964d1e9bf991d9b5/aiohttp-3.13.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ee433e594d7948e760b5c2a78cc06ac219df33b0848793cf9513d486a9f90a52", size = 1665171 }, { url = "https://files.pythonhosted.org/packages/3a/de/caa61e213ff546b8815aef5e931d7eae1dbe8c840a3f11ec5aa41c5ae462/aiohttp-3.13.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:19bb08e56f57c215e9572cd65cb6f8097804412c54081d933997ddde3e5ac579", size = 1755124 }, { url = "https://files.pythonhosted.org/packages/fb/b7/40c3219dd2691aa35cf889b4fbb0c00e48a19092928707044bfe92068e01/aiohttp-3.13.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f27b7488144eb5dd9151cf839b195edd1569629d90ace4c5b6b18e4e75d1e63a", size = 1835949 }, { url = "https://files.pythonhosted.org/packages/57/e8/66e3c32841fc0e26a09539c377aa0f3bbf6deac1957ac5182cf276c5719c/aiohttp-3.13.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d812838c109757a11354a161c95708ae4199c4fd4d82b90959b20914c1d097f6", size = 1714276 }, { url = "https://files.pythonhosted.org/packages/6b/a5/c68e5b46ff0410fe3abfa508651b09372428f27036138beacf4ff6b7cb8c/aiohttp-3.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7c20db99da682f9180fa5195c90b80b159632fb611e8dbccdd99ba0be0970620", size = 1545929 }, { url = "https://files.pythonhosted.org/packages/7a/a6/4c97dc27f9935c0c0aa6e3e10e5b4548823ab5d056636bde374fcd297256/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cf8b0870047900eb1f17f453b4b3953b8ffbf203ef56c2f346780ff930a4d430", size = 1679988 }, { url = "https://files.pythonhosted.org/packages/8e/1b/11f9c52fd72b786a47e796e6794883417280cdca8eb1032d8d0939928dfa/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:5b8a5557d5af3f4e3add52a58c4cf2b8e6e59fc56b261768866f5337872d596d", size = 1678031 }, { url = "https://files.pythonhosted.org/packages/ea/eb/948903d40505f3a25e53e051488d2714ded3afac1f961df135f2936680f9/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:052bcdd80c1c54b8a18a9ea0cd5e36f473dc8e38d51b804cea34841f677a9971", size = 1726184 }, { url = "https://files.pythonhosted.org/packages/44/14/c8ced38c7dfe80804dec17a671963ccf3cb282f12700ec70b1f689d8de7d/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:76484ba17b2832776581b7ab466d094e48eba74cb65a60aea20154dae485e8bd", size = 1542344 }, { url = "https://files.pythonhosted.org/packages/a4/6e/f2e6bff550a51fd7c45fdab116a1dab7cc502e5d942956f10fc5c626bb15/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:62d8a0adcdaf62ee56bfb37737153251ac8e4b27845b3ca065862fb01d99e247", size = 1740913 }, { url = "https://files.pythonhosted.org/packages/da/00/8f057300d9b598a706348abb375b3de9a253195fb615f17c0b2be2a72836/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5004d727499ecb95f7c9147dd0bfc5b5670f71d355f0bd26d7af2d3af8e07d2f", size = 1695535 }, { url = "https://files.pythonhosted.org/packages/8a/ab/6919d584d8f053a14b15f0bfa3f315b3f548435c2142145459da2efa8673/aiohttp-3.13.0-cp314-cp314-win32.whl", hash = "sha256:a1c20c26af48aea984f63f96e5d7af7567c32cb527e33b60a0ef0a6313cf8b03", size = 429548 }, { url = "https://files.pythonhosted.org/packages/c5/59/5d9e78de6132079066f5077d9687bf524f764a2f8207e04d8d68790060c6/aiohttp-3.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:56f7d230ec66e799fbfd8350e9544f8a45a4353f1cf40c1fea74c1780f555b8f", size = 455548 }, { url = "https://files.pythonhosted.org/packages/7c/ea/7d98da03d1e9798bb99c3ca4963229150d45c9b7a3a16210c5b4a5f89e07/aiohttp-3.13.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:2fd35177dc483ae702f07b86c782f4f4b100a8ce4e7c5778cea016979023d9fd", size = 765319 }, { url = "https://files.pythonhosted.org/packages/5c/02/37f29beced8213bb467c52ad509a5e3b41e6e967de2f6eaf7f8db63bea54/aiohttp-3.13.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4df1984c8804ed336089e88ac81a9417b1fd0db7c6f867c50a9264488797e778", size = 502567 }, { url = "https://files.pythonhosted.org/packages/e7/22/b0afcafcfe3637bc8d7992abf08ee9452018366c0801e4e7d4efda2ed839/aiohttp-3.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e68c0076052dd911a81d3acc4ef2911cc4ef65bf7cadbfbc8ae762da24da858f", size = 507078 }, { url = "https://files.pythonhosted.org/packages/49/4c/046c847b7a1993b49f3855cc3b97872d5df193d9240de835d0dc6a97b164/aiohttp-3.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc95c49853cd29613e4fe4ff96d73068ff89b89d61e53988442e127e8da8e7ba", size = 1862115 }, { url = "https://files.pythonhosted.org/packages/1a/25/1449a59e3c6405da5e47b0138ee0855414dc12a8c306685d7fc3dd300e1f/aiohttp-3.13.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3b3bdc89413117b40cc39baae08fd09cbdeb839d421c4e7dce6a34f6b54b3ac1", size = 1717147 }, { url = "https://files.pythonhosted.org/packages/23/8f/50cc34ad267b38608f21c6a74327015dd08a66f1dd8e7ceac954d0953191/aiohttp-3.13.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e77a729df23be2116acc4e9de2767d8e92445fbca68886dd991dc912f473755", size = 1841443 }, { url = "https://files.pythonhosted.org/packages/df/b9/b3ab1278faa0d1b8f434c85f9cf34eeb0a25016ffe1ee6bc361d09fef0ec/aiohttp-3.13.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e88ab34826d6eeb6c67e6e92400b9ec653faf5092a35f07465f44c9f1c429f82", size = 1933652 }, { url = "https://files.pythonhosted.org/packages/88/e2/86050aaa3bd7021b115cdfc88477b754e8cf93ef0079867840eee22d3c34/aiohttp-3.13.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:019dbef24fe28ce2301419dd63a2b97250d9760ca63ee2976c2da2e3f182f82e", size = 1790682 }, { url = "https://files.pythonhosted.org/packages/78/8d/9af903324c2ba24a0c4778e9bcc738b773c98dded3a4fcf8041d5211769f/aiohttp-3.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2c4aeaedd20771b7b4bcdf0ae791904445df6d856c02fc51d809d12d17cffdc7", size = 1622011 }, { url = "https://files.pythonhosted.org/packages/84/97/5174971ba4986d913554ceb248b0401eb5358cb60672ea0166f9f596cd08/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b3a8e6a2058a0240cfde542b641d0e78b594311bc1a710cbcb2e1841417d5cb3", size = 1787148 }, { url = "https://files.pythonhosted.org/packages/dd/ae/8b397e980ac613ef3ddd8e996aa7a40a1828df958257800d4bb325657db3/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:f8e38d55ca36c15f36d814ea414ecb2401d860de177c49f84a327a25b3ee752b", size = 1774816 }, { url = "https://files.pythonhosted.org/packages/c7/54/0e8e2111dd92051c787e934b6bbf30c213daaa5e7ee5f51bca8913607492/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a921edbe971aade1bf45bcbb3494e30ba6863a5c78f28be992c42de980fd9108", size = 1788610 }, { url = "https://files.pythonhosted.org/packages/fa/dd/c9283dbfd9325ed6fa6c91f009db6344d8d370a7bcf09f36e7b2fcbfae02/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:474cade59a447cb4019c0dce9f0434bf835fb558ea932f62c686fe07fe6db6a1", size = 1615498 }, { url = "https://files.pythonhosted.org/packages/8c/f6/da76230679bd9ef175d876093f89e7fd6d6476c18505e115e3026fe5ef95/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:99a303ad960747c33b65b1cb65d01a62ac73fa39b72f08a2e1efa832529b01ed", size = 1815187 }, { url = "https://files.pythonhosted.org/packages/d5/78/394003ac738703822616f4f922705b54e5b3d8e7185831ecc1c97904174d/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:bb34001fc1f05f6b323e02c278090c07a47645caae3aa77ed7ed8a3ce6abcce9", size = 1760281 }, { url = "https://files.pythonhosted.org/packages/bd/b0/4bad0a9dd5910bd01c3119f8bd3d71887cd412d4105e4acddcdacf3cfa76/aiohttp-3.13.0-cp314-cp314t-win32.whl", hash = "sha256:dea698b64235d053def7d2f08af9302a69fcd760d1c7bd9988fd5d3b6157e657", size = 462608 }, { url = "https://files.pythonhosted.org/packages/bd/af/ad12d592f623aae2bd1d3463201dc39c201ea362f9ddee0d03efd9e83720/aiohttp-3.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1f164699a060c0b3616459d13c1464a981fddf36f892f0a5027cbd45121fb14b", size = 496010 }, ] [[distribution]] name = "aiomqtt" version = "2.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "paho-mqtt" }, ] sdist = { url = "https://files.pythonhosted.org/packages/45/9a/863bc34c64bc4acb9720a9950bfc77d6f324640cdf1f420bb5d9ee624975/aiomqtt-2.4.0.tar.gz", hash = "sha256:ab0f18fc5b7ffaa57451c407417d674db837b00a9c7d953cccd02be64f046c17", size = 82718 } wheels = [ { url = "https://files.pythonhosted.org/packages/98/0c/2720665998d97d3a9521c03b138a22247e035ba54c4738e934da33c68699/aiomqtt-2.4.0-py3-none-any.whl", hash = "sha256:721296e2b79df5f6c7c4dfc91700ae0166953a4127735c92637859619dbd84e4", size = 15908 }, ] [[distribution]] name = "aiosignal" version = "1.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "frozenlist" }, { name = "typing-extensions", marker = "python_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007 } wheels = [ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490 }, ] [[distribution]] name = "appdirs" version = "1.4.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d7/d8/05696357e0311f5b5c316d7b95f46c669dd9c15aaeecbb48c7d0aeb88c40/appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", size = 13470 } wheels = [ { url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566 }, ] [[distribution]] name = "attrs" version = "25.4.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251 } wheels = [ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615 }, ] [[distribution]] name = "click" version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "platform_system == 'Windows'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943 } wheels = [ { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295 }, ] [[distribution]] name = "click-shell" version = "2.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9e/26/93dd93fb1714f64376989b9e809982fd64d5f26e666b6d55458066c40b53/click-shell-2.1.tar.gz", hash = "sha256:ce0c91faae284c41a39bec966f928791ad4a45763755445f1fe2041fd091aa37", size = 8421 } wheels = [ { url = "https://files.pythonhosted.org/packages/21/ce/d81dcb726c436bf3f77d0145e03bf364c189cc95e6551e797bc0511dcea0/click_shell-2.1-py2.py3-none-any.whl", hash = "sha256:2d971a2e50eb7ad387cf0ce79ba4b844e66e0580784e2efe2df58b50a2f047f0", size = 8582 }, ] [[distribution]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, ] [[distribution]] name = "construct" version = "2.10.70" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/02/77/8c84b98eca70d245a2a956452f21d57930d22ab88cbeed9290ca630cf03f/construct-2.10.70.tar.gz", hash = "sha256:4d2472f9684731e58cc9c56c463be63baa1447d674e0d66aeb5627b22f512c29", size = 86337 } wheels = [ { url = "https://files.pythonhosted.org/packages/b2/fb/08b3f4bf05da99aba8ffea52a558758def16e8516bc75ca94ff73587e7d3/construct-2.10.70-py3-none-any.whl", hash = "sha256:c80be81ef595a1a821ec69dc16099550ed22197615f4320b57cc9ce2a672cb30", size = 63020 }, ] [[distribution]] name = "frozenlist" version = "1.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875 } wheels = [ { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912 }, { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046 }, { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119 }, { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067 }, { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160 }, { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544 }, { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797 }, { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923 }, { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886 }, { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731 }, { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544 }, { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806 }, { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382 }, { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647 }, { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064 }, { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937 }, { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782 }, { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594 }, { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448 }, { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411 }, { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014 }, { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909 }, { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049 }, { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485 }, { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619 }, { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320 }, { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820 }, { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518 }, { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096 }, { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985 }, { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591 }, { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102 }, { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717 }, { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651 }, { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417 }, { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391 }, { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048 }, { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549 }, { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833 }, { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363 }, { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314 }, { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365 }, { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763 }, { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110 }, { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717 }, { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628 }, { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882 }, { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676 }, { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235 }, { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742 }, { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725 }, { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533 }, { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506 }, { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161 }, { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676 }, { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638 }, { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067 }, { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101 }, { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901 }, { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395 }, { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659 }, { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492 }, { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034 }, { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749 }, { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127 }, { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698 }, { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749 }, { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298 }, { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015 }, { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038 }, { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130 }, { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845 }, { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131 }, { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542 }, { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308 }, { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210 }, { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972 }, { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536 }, { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330 }, { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627 }, { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238 }, { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738 }, { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739 }, { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186 }, { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196 }, { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830 }, { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289 }, { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318 }, { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814 }, { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762 }, { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470 }, { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042 }, { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148 }, { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676 }, { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451 }, { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507 }, { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409 }, ] [[distribution]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] [[distribution]] name = "lxml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426 } wheels = [ { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365 }, { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793 }, { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362 }, { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152 }, { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539 }, { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853 }, { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133 }, { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944 }, { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535 }, { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343 }, { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419 }, { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008 }, { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906 }, { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357 }, { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583 }, { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591 }, { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887 }, { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818 }, { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807 }, { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179 }, { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044 }, { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685 }, { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127 }, { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958 }, { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541 }, { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426 }, { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917 }, { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795 }, { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759 }, { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666 }, { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989 }, { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456 }, { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793 }, { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836 }, { url = "https://files.pythonhosted.org/packages/53/fd/4e8f0540608977aea078bf6d79f128e0e2c2bba8af1acf775c30baa70460/lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77", size = 8648494 }, { url = "https://files.pythonhosted.org/packages/5d/f4/2a94a3d3dfd6c6b433501b8d470a1960a20ecce93245cf2db1706adf6c19/lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f", size = 4661146 }, { url = "https://files.pythonhosted.org/packages/25/2e/4efa677fa6b322013035d38016f6ae859d06cac67437ca7dc708a6af7028/lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452", size = 4946932 }, { url = "https://files.pythonhosted.org/packages/ce/0f/526e78a6d38d109fdbaa5049c62e1d32fdd70c75fb61c4eadf3045d3d124/lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048", size = 5100060 }, { url = "https://files.pythonhosted.org/packages/81/76/99de58d81fa702cc0ea7edae4f4640416c2062813a00ff24bd70ac1d9c9b/lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df", size = 5019000 }, { url = "https://files.pythonhosted.org/packages/b5/35/9e57d25482bc9a9882cb0037fdb9cc18f4b79d85df94fa9d2a89562f1d25/lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1", size = 5348496 }, { url = "https://files.pythonhosted.org/packages/a6/8e/cb99bd0b83ccc3e8f0f528e9aa1f7a9965dfec08c617070c5db8d63a87ce/lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916", size = 5643779 }, { url = "https://files.pythonhosted.org/packages/d0/34/9e591954939276bb679b73773836c6684c22e56d05980e31d52a9a8deb18/lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd", size = 5244072 }, { url = "https://files.pythonhosted.org/packages/8d/27/b29ff065f9aaca443ee377aff699714fcbffb371b4fce5ac4ca759e436d5/lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6", size = 4718675 }, { url = "https://files.pythonhosted.org/packages/2b/9f/f756f9c2cd27caa1a6ef8c32ae47aadea697f5c2c6d07b0dae133c244fbe/lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a", size = 5255171 }, { url = "https://files.pythonhosted.org/packages/61/46/bb85ea42d2cb1bd8395484fd72f38e3389611aa496ac7772da9205bbda0e/lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679", size = 5057175 }, { url = "https://files.pythonhosted.org/packages/95/0c/443fc476dcc8e41577f0af70458c50fe299a97bb6b7505bb1ae09aa7f9ac/lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659", size = 4785688 }, { url = "https://files.pythonhosted.org/packages/48/78/6ef0b359d45bb9697bc5a626e1992fa5d27aa3f8004b137b2314793b50a0/lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484", size = 5660655 }, { url = "https://files.pythonhosted.org/packages/ff/ea/e1d33808f386bc1339d08c0dcada6e4712d4ed8e93fcad5f057070b7988a/lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2", size = 5247695 }, { url = "https://files.pythonhosted.org/packages/4f/47/eba75dfd8183673725255247a603b4ad606f4ae657b60c6c145b381697da/lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314", size = 5269841 }, { url = "https://files.pythonhosted.org/packages/76/04/5c5e2b8577bc936e219becb2e98cdb1aca14a4921a12995b9d0c523502ae/lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2", size = 3610700 }, { url = "https://files.pythonhosted.org/packages/fe/0a/4643ccc6bb8b143e9f9640aa54e38255f9d3b45feb2cbe7ae2ca47e8782e/lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7", size = 4010347 }, { url = "https://files.pythonhosted.org/packages/31/ef/dcf1d29c3f530577f61e5fe2f1bd72929acf779953668a8a47a479ae6f26/lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf", size = 3671248 }, { url = "https://files.pythonhosted.org/packages/03/15/d4a377b385ab693ce97b472fe0c77c2b16ec79590e688b3ccc71fba19884/lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe", size = 8659801 }, { url = "https://files.pythonhosted.org/packages/c8/e8/c128e37589463668794d503afaeb003987373c5f94d667124ffd8078bbd9/lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d", size = 4659403 }, { url = "https://files.pythonhosted.org/packages/00/ce/74903904339decdf7da7847bb5741fc98a5451b42fc419a86c0c13d26fe2/lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d", size = 4966974 }, { url = "https://files.pythonhosted.org/packages/1f/d3/131dec79ce61c5567fecf82515bd9bc36395df42501b50f7f7f3bd065df0/lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5", size = 5102953 }, { url = "https://files.pythonhosted.org/packages/3a/ea/a43ba9bb750d4ffdd885f2cd333572f5bb900cd2408b67fdda07e85978a0/lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0", size = 5055054 }, { url = "https://files.pythonhosted.org/packages/60/23/6885b451636ae286c34628f70a7ed1fcc759f8d9ad382d132e1c8d3d9bfd/lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba", size = 5352421 }, { url = "https://files.pythonhosted.org/packages/48/5b/fc2ddfc94ddbe3eebb8e9af6e3fd65e2feba4967f6a4e9683875c394c2d8/lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0", size = 5673684 }, { url = "https://files.pythonhosted.org/packages/29/9c/47293c58cc91769130fbf85531280e8cc7868f7fbb6d92f4670071b9cb3e/lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d", size = 5252463 }, { url = "https://files.pythonhosted.org/packages/9b/da/ba6eceb830c762b48e711ded880d7e3e89fc6c7323e587c36540b6b23c6b/lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37", size = 4698437 }, { url = "https://files.pythonhosted.org/packages/a5/24/7be3f82cb7990b89118d944b619e53c656c97dc89c28cfb143fdb7cd6f4d/lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9", size = 5269890 }, { url = "https://files.pythonhosted.org/packages/1b/bd/dcfb9ea1e16c665efd7538fc5d5c34071276ce9220e234217682e7d2c4a5/lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917", size = 5097185 }, { url = "https://files.pythonhosted.org/packages/21/04/a60b0ff9314736316f28316b694bccbbabe100f8483ad83852d77fc7468e/lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f", size = 4745895 }, { url = "https://files.pythonhosted.org/packages/d6/bd/7d54bd1846e5a310d9c715921c5faa71cf5c0853372adf78aee70c8d7aa2/lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8", size = 5695246 }, { url = "https://files.pythonhosted.org/packages/fd/32/5643d6ab947bc371da21323acb2a6e603cedbe71cb4c99c8254289ab6f4e/lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a", size = 5260797 }, { url = "https://files.pythonhosted.org/packages/33/da/34c1ec4cff1eea7d0b4cd44af8411806ed943141804ac9c5d565302afb78/lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c", size = 5277404 }, { url = "https://files.pythonhosted.org/packages/82/57/4eca3e31e54dc89e2c3507e1cd411074a17565fa5ffc437c4ae0a00d439e/lxml-6.0.2-cp314-cp314-win32.whl", hash = "sha256:be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b", size = 3670072 }, { url = "https://files.pythonhosted.org/packages/e3/e0/c96cf13eccd20c9421ba910304dae0f619724dcf1702864fd59dd386404d/lxml-6.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed", size = 4080617 }, { url = "https://files.pythonhosted.org/packages/d5/5d/b3f03e22b3d38d6f188ef044900a9b29b2fe0aebb94625ce9fe244011d34/lxml-6.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8", size = 3754930 }, { url = "https://files.pythonhosted.org/packages/5e/5c/42c2c4c03554580708fc738d13414801f340c04c3eff90d8d2d227145275/lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d", size = 8910380 }, { url = "https://files.pythonhosted.org/packages/bf/4f/12df843e3e10d18d468a7557058f8d3733e8b6e12401f30b1ef29360740f/lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba", size = 4775632 }, { url = "https://files.pythonhosted.org/packages/e4/0c/9dc31e6c2d0d418483cbcb469d1f5a582a1cd00a1f4081953d44051f3c50/lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601", size = 4975171 }, { url = "https://files.pythonhosted.org/packages/e7/2b/9b870c6ca24c841bdd887504808f0417aa9d8d564114689266f19ddf29c8/lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed", size = 5110109 }, { url = "https://files.pythonhosted.org/packages/bf/0c/4f5f2a4dd319a178912751564471355d9019e220c20d7db3fb8307ed8582/lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37", size = 5041061 }, { url = "https://files.pythonhosted.org/packages/12/64/554eed290365267671fe001a20d72d14f468ae4e6acef1e179b039436967/lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338", size = 5306233 }, { url = "https://files.pythonhosted.org/packages/7a/31/1d748aa275e71802ad9722df32a7a35034246b42c0ecdd8235412c3396ef/lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9", size = 5604739 }, { url = "https://files.pythonhosted.org/packages/8f/41/2c11916bcac09ed561adccacceaedd2bf0e0b25b297ea92aab99fd03d0fa/lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd", size = 5225119 }, { url = "https://files.pythonhosted.org/packages/99/05/4e5c2873d8f17aa018e6afde417c80cc5d0c33be4854cce3ef5670c49367/lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d", size = 4633665 }, { url = "https://files.pythonhosted.org/packages/0f/c9/dcc2da1bebd6275cdc723b515f93edf548b82f36a5458cca3578bc899332/lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9", size = 5234997 }, { url = "https://files.pythonhosted.org/packages/9c/e2/5172e4e7468afca64a37b81dba152fc5d90e30f9c83c7c3213d6a02a5ce4/lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e", size = 5090957 }, { url = "https://files.pythonhosted.org/packages/a5/b3/15461fd3e5cd4ddcb7938b87fc20b14ab113b92312fc97afe65cd7c85de1/lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d", size = 4764372 }, { url = "https://files.pythonhosted.org/packages/05/33/f310b987c8bf9e61c4dd8e8035c416bd3230098f5e3cfa69fc4232de7059/lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec", size = 5634653 }, { url = "https://files.pythonhosted.org/packages/70/ff/51c80e75e0bc9382158133bdcf4e339b5886c6ee2418b5199b3f1a61ed6d/lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272", size = 5233795 }, { url = "https://files.pythonhosted.org/packages/56/4d/4856e897df0d588789dd844dbed9d91782c4ef0b327f96ce53c807e13128/lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f", size = 5257023 }, { url = "https://files.pythonhosted.org/packages/0f/85/86766dfebfa87bea0ab78e9ff7a4b4b45225df4b4d3b8cc3c03c5cd68464/lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312", size = 3911420 }, { url = "https://files.pythonhosted.org/packages/fe/1a/b248b355834c8e32614650b8008c69ffeb0ceb149c793961dd8c0b991bb3/lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca", size = 4406837 }, { url = "https://files.pythonhosted.org/packages/92/aa/df863bcc39c5e0946263454aba394de8a9084dbaff8ad143846b0d844739/lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c", size = 3822205 }, { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829 }, { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277 }, { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433 }, { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119 }, { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314 }, { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768 }, ] [[distribution]] name = "multidict" version = "6.7.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834 } wheels = [ { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604 }, { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715 }, { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332 }, { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212 }, { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671 }, { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491 }, { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322 }, { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694 }, { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715 }, { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189 }, { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845 }, { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374 }, { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345 }, { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940 }, { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229 }, { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308 }, { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037 }, { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023 }, { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877 }, { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467 }, { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834 }, { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545 }, { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305 }, { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363 }, { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375 }, { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346 }, { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107 }, { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592 }, { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024 }, { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484 }, { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579 }, { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654 }, { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511 }, { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895 }, { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073 }, { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226 }, { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135 }, { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117 }, { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472 }, { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342 }, { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082 }, { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704 }, { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355 }, { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259 }, { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903 }, { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365 }, { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062 }, { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683 }, { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254 }, { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967 }, { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085 }, { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713 }, { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915 }, { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077 }, { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114 }, { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442 }, { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885 }, { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588 }, { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966 }, { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618 }, { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539 }, { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345 }, { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934 }, { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243 }, { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878 }, { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452 }, { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312 }, { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935 }, { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385 }, { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777 }, { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104 }, { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503 }, { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128 }, { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410 }, { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205 }, { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084 }, { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667 }, { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590 }, { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112 }, { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194 }, { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510 }, { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395 }, { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520 }, { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479 }, { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903 }, { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333 }, { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411 }, { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940 }, { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087 }, { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368 }, { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326 }, { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065 }, { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475 }, { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324 }, { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877 }, { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824 }, { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558 }, { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339 }, { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895 }, { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862 }, { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376 }, { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272 }, { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774 }, { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731 }, { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193 }, { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023 }, { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507 }, { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804 }, { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317 }, ] [[distribution]] name = "packaging" version = "25.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, ] [[distribution]] name = "paho-mqtt" version = "2.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/39/15/0a6214e76d4d32e7f663b109cf71fb22561c2be0f701d67f93950cd40542/paho_mqtt-2.1.0.tar.gz", hash = "sha256:12d6e7511d4137555a3f6ea167ae846af2c7357b10bc6fa4f7c3968fc1723834", size = 148848 } wheels = [ { url = "https://files.pythonhosted.org/packages/c4/cb/00451c3cf31790287768bb12c6bec834f5d292eaf3022afc88e14b8afc94/paho_mqtt-2.1.0-py3-none-any.whl", hash = "sha256:6db9ba9b34ed5bc6b6e3812718c7e06e2fd7444540df2455d2c51bd58808feee", size = 67219 }, ] [[distribution]] name = "pillow" version = "11.3.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069 } wheels = [ { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531 }, { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560 }, { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978 }, { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168 }, { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053 }, { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273 }, { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043 }, { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516 }, { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768 }, { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055 }, { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079 }, { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800 }, { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296 }, { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726 }, { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652 }, { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787 }, { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236 }, { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950 }, { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358 }, { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079 }, { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324 }, { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067 }, { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328 }, { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652 }, { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443 }, { url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474 }, { url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038 }, { url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407 }, { url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094 }, { url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503 }, { url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574 }, { url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060 }, { url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407 }, { url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841 }, { url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450 }, { url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055 }, { url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110 }, { url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547 }, { url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554 }, { url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132 }, { url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001 }, { url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814 }, { url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124 }, { url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186 }, { url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546 }, { url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102 }, { url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803 }, { url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520 }, { url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116 }, { url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597 }, { url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246 }, { url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336 }, { url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699 }, { url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789 }, { url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386 }, { url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911 }, { url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383 }, { url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385 }, { url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129 }, { url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580 }, { url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860 }, { url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694 }, { url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888 }, { url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330 }, { url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089 }, { url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206 }, { url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370 }, { url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500 }, { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835 }, { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566 }, { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618 }, { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248 }, { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963 }, { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170 }, { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505 }, { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598 }, ] [[distribution]] name = "propcache" version = "0.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442 } wheels = [ { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208 }, { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777 }, { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647 }, { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929 }, { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778 }, { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144 }, { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030 }, { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252 }, { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064 }, { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429 }, { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727 }, { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097 }, { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084 }, { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637 }, { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064 }, { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061 }, { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037 }, { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324 }, { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505 }, { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242 }, { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474 }, { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575 }, { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736 }, { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019 }, { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376 }, { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988 }, { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615 }, { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066 }, { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655 }, { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789 }, { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750 }, { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780 }, { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308 }, { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182 }, { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215 }, { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112 }, { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442 }, { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398 }, { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920 }, { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748 }, { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877 }, { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437 }, { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586 }, { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790 }, { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158 }, { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451 }, { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374 }, { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396 }, { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950 }, { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856 }, { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420 }, { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254 }, { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205 }, { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873 }, { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739 }, { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514 }, { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781 }, { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396 }, { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897 }, { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789 }, { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152 }, { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869 }, { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596 }, { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981 }, { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490 }, { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371 }, { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424 }, { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566 }, { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130 }, { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625 }, { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209 }, { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797 }, { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140 }, { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257 }, { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097 }, { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455 }, { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372 }, { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411 }, { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712 }, { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557 }, { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015 }, { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880 }, { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938 }, { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641 }, { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510 }, { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161 }, { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393 }, { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546 }, { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259 }, { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428 }, { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305 }, ] [[distribution]] name = "pycryptodome" version = "3.23.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8e/a6/8452177684d5e906854776276ddd34eca30d1b1e15aa1ee9cefc289a33f5/pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef", size = 4921276 } wheels = [ { url = "https://files.pythonhosted.org/packages/04/5d/bdb09489b63cd34a976cc9e2a8d938114f7a53a74d3dd4f125ffa49dce82/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:0011f7f00cdb74879142011f95133274741778abba114ceca229adbf8e62c3e4", size = 2495152 }, { url = "https://files.pythonhosted.org/packages/a7/ce/7840250ed4cc0039c433cd41715536f926d6e86ce84e904068eb3244b6a6/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:90460fc9e088ce095f9ee8356722d4f10f86e5be06e2354230a9880b9c549aae", size = 1639348 }, { url = "https://files.pythonhosted.org/packages/ee/f0/991da24c55c1f688d6a3b5a11940567353f74590734ee4a64294834ae472/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4764e64b269fc83b00f682c47443c2e6e85b18273712b98aa43bcb77f8570477", size = 2184033 }, { url = "https://files.pythonhosted.org/packages/54/16/0e11882deddf00f68b68dd4e8e442ddc30641f31afeb2bc25588124ac8de/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8f24adb74984aa0e5d07a2368ad95276cf38051fe2dc6605cbcf482e04f2a7", size = 2270142 }, { url = "https://files.pythonhosted.org/packages/d5/fc/4347fea23a3f95ffb931f383ff28b3f7b1fe868739182cb76718c0da86a1/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d97618c9c6684a97ef7637ba43bdf6663a2e2e77efe0f863cce97a76af396446", size = 2309384 }, { url = "https://files.pythonhosted.org/packages/6e/d9/c5261780b69ce66d8cfab25d2797bd6e82ba0241804694cd48be41add5eb/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a53a4fe5cb075075d515797d6ce2f56772ea7e6a1e5e4b96cf78a14bac3d265", size = 2183237 }, { url = "https://files.pythonhosted.org/packages/5a/6f/3af2ffedd5cfa08c631f89452c6648c4d779e7772dfc388c77c920ca6bbf/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:763d1d74f56f031788e5d307029caef067febf890cd1f8bf61183ae142f1a77b", size = 2343898 }, { url = "https://files.pythonhosted.org/packages/9a/dc/9060d807039ee5de6e2f260f72f3d70ac213993a804f5e67e0a73a56dd2f/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:954af0e2bd7cea83ce72243b14e4fb518b18f0c1649b576d114973e2073b273d", size = 2269197 }, { url = "https://files.pythonhosted.org/packages/f9/34/e6c8ca177cb29dcc4967fef73f5de445912f93bd0343c9c33c8e5bf8cde8/pycryptodome-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:257bb3572c63ad8ba40b89f6fc9d63a2a628e9f9708d31ee26560925ebe0210a", size = 1768600 }, { url = "https://files.pythonhosted.org/packages/e4/1d/89756b8d7ff623ad0160f4539da571d1f594d21ee6d68be130a6eccb39a4/pycryptodome-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6501790c5b62a29fcb227bd6b62012181d886a767ce9ed03b303d1f22eb5c625", size = 1799740 }, { url = "https://files.pythonhosted.org/packages/5d/61/35a64f0feaea9fd07f0d91209e7be91726eb48c0f1bfc6720647194071e4/pycryptodome-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9a77627a330ab23ca43b48b130e202582e91cc69619947840ea4d2d1be21eb39", size = 1703685 }, { url = "https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27", size = 2495627 }, { url = "https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843", size = 1640362 }, { url = "https://files.pythonhosted.org/packages/50/52/adaf4c8c100a8c49d2bd058e5b551f73dfd8cb89eb4911e25a0c469b6b4e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490", size = 2182625 }, { url = "https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575", size = 2268954 }, { url = "https://files.pythonhosted.org/packages/f9/c5/ffe6474e0c551d54cab931918127c46d70cab8f114e0c2b5a3c071c2f484/pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b", size = 2308534 }, { url = "https://files.pythonhosted.org/packages/18/28/e199677fc15ecf43010f2463fde4c1a53015d1fe95fb03bca2890836603a/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a", size = 2181853 }, { url = "https://files.pythonhosted.org/packages/ce/ea/4fdb09f2165ce1365c9eaefef36625583371ee514db58dc9b65d3a255c4c/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f", size = 2342465 }, { url = "https://files.pythonhosted.org/packages/22/82/6edc3fc42fe9284aead511394bac167693fb2b0e0395b28b8bedaa07ef04/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa", size = 2267414 }, { url = "https://files.pythonhosted.org/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886", size = 1768484 }, { url = "https://files.pythonhosted.org/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2", size = 1799636 }, { url = "https://files.pythonhosted.org/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c", size = 1703675 }, { url = "https://files.pythonhosted.org/packages/9f/7c/f5b0556590e7b4e710509105e668adb55aa9470a9f0e4dea9c40a4a11ce1/pycryptodome-3.23.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:350ebc1eba1da729b35ab7627a833a1a355ee4e852d8ba0447fafe7b14504d56", size = 1705791 }, { url = "https://files.pythonhosted.org/packages/33/38/dcc795578d610ea1aaffef4b148b8cafcfcf4d126b1e58231ddc4e475c70/pycryptodome-3.23.0-pp27-pypy_73-win32.whl", hash = "sha256:93837e379a3e5fd2bb00302a47aee9fdf7940d83595be3915752c74033d17ca7", size = 1780265 }, ] [[distribution]] name = "pycryptodomex" version = "3.23.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/c9/85/e24bf90972a30b0fcd16c73009add1d7d7cd9140c2498a68252028899e41/pycryptodomex-3.23.0.tar.gz", hash = "sha256:71909758f010c82bc99b0abf4ea12012c98962fbf0583c2164f8b84533c2e4da", size = 4922157 } wheels = [ { url = "https://files.pythonhosted.org/packages/2e/00/10edb04777069a42490a38c137099d4b17ba6e36a4e6e28bdc7470e9e853/pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:7b37e08e3871efe2187bc1fd9320cc81d87caf19816c648f24443483005ff886", size = 2498764 }, { url = "https://files.pythonhosted.org/packages/6b/3f/2872a9c2d3a27eac094f9ceaa5a8a483b774ae69018040ea3240d5b11154/pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:91979028227543010d7b2ba2471cf1d1e398b3f183cb105ac584df0c36dac28d", size = 1643012 }, { url = "https://files.pythonhosted.org/packages/70/af/774c2e2b4f6570fbf6a4972161adbb183aeeaa1863bde31e8706f123bf92/pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8962204c47464d5c1c4038abeadd4514a133b28748bcd9fa5b6d62e3cec6fa", size = 2187643 }, { url = "https://files.pythonhosted.org/packages/de/a3/71065b24cb889d537954cedc3ae5466af00a2cabcff8e29b73be047e9a19/pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a33986a0066860f7fcf7c7bd2bc804fa90e434183645595ae7b33d01f3c91ed8", size = 2273762 }, { url = "https://files.pythonhosted.org/packages/c9/0b/ff6f43b7fbef4d302c8b981fe58467b8871902cdc3eb28896b52421422cc/pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7947ab8d589e3178da3d7cdeabe14f841b391e17046954f2fbcd941705762b5", size = 2313012 }, { url = "https://files.pythonhosted.org/packages/02/de/9d4772c0506ab6da10b41159493657105d3f8bb5c53615d19452afc6b315/pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c25e30a20e1b426e1f0fa00131c516f16e474204eee1139d1603e132acffc314", size = 2186856 }, { url = "https://files.pythonhosted.org/packages/28/ad/8b30efcd6341707a234e5eba5493700a17852ca1ac7a75daa7945fcf6427/pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:da4fa650cef02db88c2b98acc5434461e027dce0ae8c22dd5a69013eaf510006", size = 2347523 }, { url = "https://files.pythonhosted.org/packages/0f/02/16868e9f655b7670dbb0ac4f2844145cbc42251f916fc35c414ad2359849/pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58b851b9effd0d072d4ca2e4542bf2a4abcf13c82a29fd2c93ce27ee2a2e9462", size = 2272825 }, { url = "https://files.pythonhosted.org/packages/ca/18/4ca89ac737230b52ac8ffaca42f9c6f1fd07c81a6cd821e91af79db60632/pycryptodomex-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:a9d446e844f08299236780f2efa9898c818fe7e02f17263866b8550c7d5fb328", size = 1772078 }, { url = "https://files.pythonhosted.org/packages/73/34/13e01c322db027682e00986873eca803f11c56ade9ba5bbf3225841ea2d4/pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bc65bdd9fc8de7a35a74cab1c898cab391a4add33a8fe740bda00f5976ca4708", size = 1803656 }, { url = "https://files.pythonhosted.org/packages/54/68/9504c8796b1805d58f4425002bcca20f12880e6fa4dc2fc9a668705c7a08/pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c885da45e70139464f082018ac527fdaad26f1657a99ee13eecdce0f0ca24ab4", size = 1707172 }, { url = "https://files.pythonhosted.org/packages/dd/9c/1a8f35daa39784ed8adf93a694e7e5dc15c23c741bbda06e1d45f8979e9e/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:06698f957fe1ab229a99ba2defeeae1c09af185baa909a31a5d1f9d42b1aaed6", size = 2499240 }, { url = "https://files.pythonhosted.org/packages/7a/62/f5221a191a97157d240cf6643747558759126c76ee92f29a3f4aee3197a5/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2c2537863eccef2d41061e82a881dcabb04944c5c06c5aa7110b577cc487545", size = 1644042 }, { url = "https://files.pythonhosted.org/packages/8c/fd/5a054543c8988d4ed7b612721d7e78a4b9bf36bc3c5ad45ef45c22d0060e/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43c446e2ba8df8889e0e16f02211c25b4934898384c1ec1ec04d7889c0333587", size = 2186227 }, { url = "https://files.pythonhosted.org/packages/c8/a9/8862616a85cf450d2822dbd4fff1fcaba90877907a6ff5bc2672cafe42f8/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f489c4765093fb60e2edafdf223397bc716491b2b69fe74367b70d6999257a5c", size = 2272578 }, { url = "https://files.pythonhosted.org/packages/46/9f/bda9c49a7c1842820de674ab36c79f4fbeeee03f8ff0e4f3546c3889076b/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdc69d0d3d989a1029df0eed67cc5e8e5d968f3724f4519bd03e0ec68df7543c", size = 2312166 }, { url = "https://files.pythonhosted.org/packages/03/cc/870b9bf8ca92866ca0186534801cf8d20554ad2a76ca959538041b7a7cf4/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bbcb1dd0f646484939e142462d9e532482bc74475cecf9c4903d4e1cd21f003", size = 2185467 }, { url = "https://files.pythonhosted.org/packages/96/e3/ce9348236d8e669fea5dd82a90e86be48b9c341210f44e25443162aba187/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:8a4fcd42ccb04c31268d1efeecfccfd1249612b4de6374205376b8f280321744", size = 2346104 }, { url = "https://files.pythonhosted.org/packages/a5/e9/e869bcee87beb89040263c416a8a50204f7f7a83ac11897646c9e71e0daf/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:55ccbe27f049743a4caf4f4221b166560d3438d0b1e5ab929e07ae1702a4d6fd", size = 2271038 }, { url = "https://files.pythonhosted.org/packages/8d/67/09ee8500dd22614af5fbaa51a4aee6e342b5fa8aecf0a6cb9cbf52fa6d45/pycryptodomex-3.23.0-cp37-abi3-win32.whl", hash = "sha256:189afbc87f0b9f158386bf051f720e20fa6145975f1e76369303d0f31d1a8d7c", size = 1771969 }, { url = "https://files.pythonhosted.org/packages/69/96/11f36f71a865dd6df03716d33bd07a67e9d20f6b8d39820470b766af323c/pycryptodomex-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:52e5ca58c3a0b0bd5e100a9fbc8015059b05cffc6c66ce9d98b4b45e023443b9", size = 1803124 }, { url = "https://files.pythonhosted.org/packages/f9/93/45c1cdcbeb182ccd2e144c693eaa097763b08b38cded279f0053ed53c553/pycryptodomex-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:02d87b80778c171445d67e23d1caef279bf4b25c3597050ccd2e13970b57fd51", size = 1707161 }, { url = "https://files.pythonhosted.org/packages/da/5c/fbfa398a593ba914c9c9cd44194357c7f84a496f6a80a76bb03a7f032492/pycryptodomex-3.23.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:febec69c0291efd056c65691b6d9a339f8b4bc43c6635b8699471248fe897fea", size = 1709255 }, { url = "https://files.pythonhosted.org/packages/3a/08/598eba09b39626501d044b7689a4beaa5ba421b9e30a0a4f2d87f9332a4f/pycryptodomex-3.23.0-pp27-pypy_73-win32.whl", hash = "sha256:c84b239a1f4ec62e9c789aafe0543f0594f0acd90c8d9e15bcece3efe55eca66", size = 1783750 }, ] [[distribution]] name = "pyrate-limiter" version = "3.9.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ec/da/f682c5c5f9f0a5414363eb4397e6b07d84a02cde69c4ceadcbf32c85537c/pyrate_limiter-3.9.0.tar.gz", hash = "sha256:6b882e2c77cda07a241d3730975daea4258344b39c878f1dd8849df73f70b0ce", size = 289308 } wheels = [ { url = "https://files.pythonhosted.org/packages/04/af/d8bf0959ece9bc4679bd203908c31019556a421d76d8143b0c6871c7f614/pyrate_limiter-3.9.0-py3-none-any.whl", hash = "sha256:77357840c8cf97a36d67005d4e090787043f54000c12c2b414ff65657653e378", size = 33628 }, ] [[distribution]] name = "pyshark" version = "0.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "appdirs" }, { name = "lxml" }, { name = "packaging" }, { name = "termcolor" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e1/3c/0e6f306a8a0490bfe58d0683553b4c60c6dfcd2cd2c6a68b46673b177dd0/pyshark-0.6.tar.gz", hash = "sha256:a424d83e0ca6224a96bbe30cd3f89d5491654d783faaaf90adaf45867a0bcb17", size = 27053 } wheels = [ { url = "https://files.pythonhosted.org/packages/1d/d9/7884ff926c3d05ec65fdf84ac499fb0f088e440d02c606b0ac41645605de/pyshark-0.6-py3-none-any.whl", hash = "sha256:98e8a1ebdcbfbb6e8defd0c96736ea51bf8234339f980b15dd3545f87f5146d4", size = 41359 }, ] [[distribution]] name = "python-roborock" version = "2.54.0" source = { editable = "." } dependencies = [ { name = "aiohttp" }, { name = "aiomqtt" }, { name = "click" }, { name = "click-shell" }, { name = "construct" }, { name = "paho-mqtt" }, { name = "pycryptodome" }, { name = "pycryptodomex", marker = "sys_platform == 'darwin'" }, { name = "pyrate-limiter" }, { name = "vacuum-map-parser-roborock" }, ] [distribution.dev-dependencies] dev = [ { name = "pyshark" }, { name = "pyyaml" }, ] [[distribution]] name = "pyyaml" version = "6.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 } wheels = [ { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826 }, { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577 }, { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556 }, { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114 }, { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638 }, { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463 }, { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986 }, { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543 }, { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763 }, { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063 }, { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973 }, { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116 }, { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011 }, { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870 }, { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089 }, { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181 }, { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658 }, { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003 }, { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344 }, { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 }, { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 }, { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 }, { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 }, { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 }, { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 }, { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 }, { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 }, { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 }, { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 }, { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814 }, { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809 }, { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454 }, { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355 }, { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175 }, { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228 }, { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194 }, { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429 }, { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912 }, { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108 }, { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641 }, { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901 }, { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132 }, { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261 }, { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272 }, { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923 }, { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062 }, { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 }, ] [[distribution]] name = "termcolor" version = "3.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ca/6c/3d75c196ac07ac8749600b60b03f4f6094d54e132c4d94ebac6ee0e0add0/termcolor-3.1.0.tar.gz", hash = "sha256:6a6dd7fbee581909eeec6a756cff1d7f7c376063b14e4a298dc4980309e55970", size = 14324 } wheels = [ { url = "https://files.pythonhosted.org/packages/4f/bd/de8d508070629b6d84a30d01d57e4a65c69aa7f5abe7560b8fad3b50ea59/termcolor-3.1.0-py3-none-any.whl", hash = "sha256:591dd26b5c2ce03b9e43f391264626557873ce1d379019786f99b0c2bee140aa", size = 7684 }, ] [[distribution]] name = "typing-extensions" version = "4.15.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 } wheels = [ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 }, ] [[distribution]] name = "vacuum-map-parser-base" version = "0.1.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pillow" }, ] sdist = { url = "https://files.pythonhosted.org/packages/8f/f8/5633c2d294ce0102bdfc008684a937d50cf59bab48286de962997e108db3/vacuum_map_parser_base-0.1.5.tar.gz", hash = "sha256:efbf889ae7a7a8fe6478354a1711e857ee781c2d7f3a09e5b30e714b60036c4a", size = 18330 } wheels = [ { url = "https://files.pythonhosted.org/packages/c5/5c/6d16e20b76504ff7694d405bbfdc587ab6fe1d096940e505d33c632e4b8e/vacuum_map_parser_base-0.1.5-py3-none-any.whl", hash = "sha256:cdbbe1905ab7b3e5929a1aefaa80b6972f796d46d53ccccdfde13d4afb510b59", size = 19076 }, ] [[distribution]] name = "vacuum-map-parser-roborock" version = "0.1.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pillow" }, { name = "vacuum-map-parser-base" }, ] sdist = { url = "https://files.pythonhosted.org/packages/39/b4/82583167a6b667151a6432fe9084232a090b36985751cd5c428998b2d080/vacuum_map_parser_roborock-0.1.4.tar.gz", hash = "sha256:07ab7cd8aaf0e94da62d2a228013b2f6b8acb0e6d2215b697b6441ffdfd70e89", size = 15315 } wheels = [ { url = "https://files.pythonhosted.org/packages/2a/56/e80291e0bfd38078bf9338fe379076d1fd55dea0174eee71897e55a0c9dc/vacuum_map_parser_roborock-0.1.4-py3-none-any.whl", hash = "sha256:8b5a00484a88c5d103a99ed7580677939c0801430f04752d9ae6265dfcec5969", size = 13758 }, ] [[distribution]] name = "yarl" version = "1.22.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "multidict" }, { name = "propcache" }, ] sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169 } wheels = [ { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607 }, { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027 }, { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963 }, { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406 }, { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581 }, { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924 }, { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890 }, { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819 }, { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601 }, { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072 }, { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311 }, { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094 }, { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944 }, { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804 }, { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858 }, { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637 }, { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000 }, { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338 }, { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909 }, { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940 }, { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825 }, { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705 }, { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518 }, { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267 }, { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797 }, { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535 }, { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324 }, { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803 }, { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220 }, { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589 }, { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213 }, { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330 }, { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980 }, { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424 }, { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821 }, { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243 }, { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361 }, { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036 }, { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671 }, { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059 }, { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356 }, { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331 }, { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590 }, { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316 }, { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431 }, { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555 }, { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965 }, { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205 }, { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209 }, { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966 }, { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312 }, { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967 }, { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949 }, { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818 }, { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626 }, { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129 }, { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776 }, { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879 }, { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996 }, { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047 }, { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947 }, { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943 }, { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715 }, { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857 }, { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520 }, { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504 }, { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282 }, { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080 }, { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696 }, { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121 }, { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080 }, { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661 }, { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645 }, { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361 }, { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451 }, { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814 }, { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799 }, { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990 }, { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292 }, { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888 }, { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223 }, { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981 }, { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303 }, { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820 }, { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203 }, { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173 }, { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562 }, { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828 }, { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551 }, { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512 }, { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400 }, { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140 }, { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473 }, { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056 }, { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292 }, { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171 }, { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814 }, ]