pax_global_header00006660000000000000000000000064147415502640014522gustar00rootroot0000000000000052 comment=07545e85ffd449391363f2aa6216c5db4b2d2a58 python-otbr-api-2.7.0/000077500000000000000000000000001474155026400145645ustar00rootroot00000000000000python-otbr-api-2.7.0/.github/000077500000000000000000000000001474155026400161245ustar00rootroot00000000000000python-otbr-api-2.7.0/.github/dependabot.yml000066400000000000000000000004101474155026400207470ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: daily open-pull-requests-limit: 10 - package-ecosystem: pip directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 python-otbr-api-2.7.0/.github/release-drafter.yml000066400000000000000000000002221474155026400217100ustar00rootroot00000000000000categories: - title: "⬆️ Dependencies" collapse-after: 1 labels: - "dependencies" template: | ## What's Changed $CHANGES python-otbr-api-2.7.0/.github/workflows/000077500000000000000000000000001474155026400201615ustar00rootroot00000000000000python-otbr-api-2.7.0/.github/workflows/pythonpublish.yml000066400000000000000000000015021474155026400236120ustar00rootroot00000000000000# This workflows will upload a Python Package using Twine when a release is created # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries name: Upload Python Package on: release: types: - published jobs: deploy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4.2.2 - name: Set up Python uses: actions/setup-python@v5.3.0 with: python-version: '3.x' - name: Install dependencies run: | python -m pip install --upgrade pip pip install build wheel twine - name: Build and publish env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} run: | python -m build twine upload dist/* python-otbr-api-2.7.0/.github/workflows/release-drafter.yml000066400000000000000000000005101474155026400237450ustar00rootroot00000000000000name: Release Drafter on: push: branches: - main jobs: update_release_draft: runs-on: ubuntu-latest steps: # Drafts your next Release notes as Pull Requests are merged into "main" - uses: release-drafter/release-drafter@v6.0.0 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} python-otbr-api-2.7.0/.github/workflows/test.yml000066400000000000000000000021101474155026400216550ustar00rootroot00000000000000# This workflow will install Python dependencies, run tests and lint with a single version of Python # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions name: Run Tests on: push: branches: - main pull_request: branches: - main jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4.2.2 - name: Set up Python 3.9 uses: actions/setup-python@v5.3.0 with: python-version: 3.9 - name: Install dependencies run: | pip install -r requirements.txt pip install -r requirements-test.txt - name: Lint with flake8 run: | flake8 python_otbr_api tests - name: Check formatting with black run: | black python_otbr_api tests --check --diff - name: Lint with mypy run: | mypy python_otbr_api tests - name: Lint with pylint run: | pylint python_otbr_api tests - name: Run tests run: | pytest tests python-otbr-api-2.7.0/.gitignore000066400000000000000000000002151474155026400165520ustar00rootroot00000000000000.DS_Store .idea *.log tmp/ *.py[cod] *.egg htmlcov .projectile .venv/ venv/ .mypy_cache/ *.egg-info/ # Visual Studio Code .vscode/* dist python-otbr-api-2.7.0/LICENSE000066400000000000000000000020501474155026400155660ustar00rootroot00000000000000MIT License Copyright (c) 2023 ESPHome Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-otbr-api-2.7.0/README.md000066400000000000000000000001141474155026400160370ustar00rootroot00000000000000# Python OTBR API Python package to interact with an OTBR via its REST API python-otbr-api-2.7.0/pyproject.toml000066400000000000000000000016471474155026400175100ustar00rootroot00000000000000[build-system] requires = ["setuptools>=65.6"] build-backend = "setuptools.build_meta" [project] name = "python-otbr-api" version = "2.7.0" license = {text = "MIT"} description = "API to interact with an OTBR via its REST API" readme = "README.md" authors = [ {name = "The Home Assistant Authors", email = "hello@home-assistant.io"} ] requires-python = ">=3.9.0" dependencies = [ "aiohttp", "bitstruct", "cryptography", "typing_extensions", "voluptuous", ] [project.urls] "Homepage" = "https://github.com/home-assistant-libs/python-otbr-api" [tool.pylint.BASIC] class-const-naming-style = "any" good-names = [ "c", "i", ] [tool.pytest.ini_options] asyncio_mode = "auto" [tool.setuptools] platforms = ["any"] zip-safe = true include-package-data = true [tool.setuptools.packages.find] include = ["python_otbr_api*"] [tool.setuptools.package-data] "*" = ["py.typed"] python-otbr-api-2.7.0/python_otbr_api/000077500000000000000000000000001474155026400177645ustar00rootroot00000000000000python-otbr-api-2.7.0/python_otbr_api/__init__.py000066400000000000000000000252271474155026400221050ustar00rootroot00000000000000"""API to interact with the Open Thread Border Router REST API.""" from __future__ import annotations from http import HTTPStatus import json import aiohttp import voluptuous as vol # type:ignore[import] from .models import ActiveDataSet, PendingDataSet, Timestamp # 5 minutes as recommended by # https://github.com/openthread/openthread/discussions/8567#discussioncomment-4468920 PENDING_DATASET_DELAY_TIMER = 5 * 60 * 1000 class OTBRError(Exception): """Raised on error.""" class FactoryResetNotSupportedError(OTBRError): """Raised when attempting to factory reset a router which does not support it.""" class GetBorderAgentIdNotSupportedError(OTBRError): """Raised when attempting to get the agent ID if the router does not support it.""" class ThreadNetworkActiveError(OTBRError): """Raised on attempts to modify the active dataset when thread network is active.""" class OTBR: # pylint: disable=too-few-public-methods """Class to interact with the Open Thread Border Router REST API.""" def __init__( self, url: str, session: aiohttp.ClientSession, timeout: int = 10 ) -> None: """Initialize.""" self._session = session self._url = url self._timeout = timeout async def factory_reset(self) -> None: """Factory reset the router.""" response = await self._session.delete( f"{self._url}/node", timeout=aiohttp.ClientTimeout(total=10), ) if response.status == HTTPStatus.METHOD_NOT_ALLOWED: raise FactoryResetNotSupportedError if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") async def get_border_agent_id(self) -> bytes: """Get the border agent ID.""" response = await self._session.get( f"{self._url}/node/ba-id", timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status == HTTPStatus.NOT_FOUND: raise GetBorderAgentIdNotSupportedError if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") try: return bytes.fromhex(await response.json()) except ValueError as exc: raise OTBRError("unexpected API response") from exc async def set_enabled(self, enabled: bool) -> None: """Enable or disable the router.""" response = await self._session.put( f"{self._url}/node/state", json="enable" if enabled else "disable", timeout=aiohttp.ClientTimeout(total=10), ) if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") async def get_active_dataset(self) -> ActiveDataSet | None: """Get current active operational dataset. Returns None if there is no active operational dataset. Raises if the http status is 400 or higher or if the response is invalid. """ response = await self._session.get( f"{self._url}/node/dataset/active", timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status == HTTPStatus.NO_CONTENT: return None if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") try: return ActiveDataSet.from_json(await response.json()) except (json.JSONDecodeError, vol.Error) as exc: raise OTBRError("unexpected API response") from exc async def get_active_dataset_tlvs(self) -> bytes | None: """Get current active operational dataset in TLVS format, or None. Returns None if there is no active operational dataset. Raises if the http status is 400 or higher or if the response is invalid. """ response = await self._session.get( f"{self._url}/node/dataset/active", headers={"Accept": "text/plain"}, timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status == HTTPStatus.NO_CONTENT: return None if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") try: return bytes.fromhex(await response.text("ASCII")) except ValueError as exc: raise OTBRError("unexpected API response") from exc async def get_pending_dataset_tlvs(self) -> bytes | None: """Get current pending operational dataset in TLVS format, or None. Returns None if there is no pending operational dataset. Raises if the http status is 400 or higher or if the response is invalid. """ response = await self._session.get( f"{self._url}/node/dataset/pending", headers={"Accept": "text/plain"}, timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status == HTTPStatus.NO_CONTENT: return None if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") try: return bytes.fromhex(await response.text("ASCII")) except ValueError as exc: raise OTBRError("unexpected API response") from exc async def create_active_dataset(self, dataset: ActiveDataSet) -> None: """Create active operational dataset. The passed in ActiveDataSet does not need to be fully populated, any fields not set will be automatically set by the open thread border router. Raises if the http status is 400 or higher or if the response is invalid. """ response = await self._session.put( f"{self._url}/node/dataset/active", json=dataset.as_json(), timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status == HTTPStatus.CONFLICT: raise ThreadNetworkActiveError if response.status not in (HTTPStatus.CREATED, HTTPStatus.OK): raise OTBRError(f"unexpected http status {response.status}") async def delete_active_dataset(self) -> None: """Delete active operational dataset.""" response = await self._session.delete( f"{self._url}/node/dataset/active", timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status == HTTPStatus.CONFLICT: raise ThreadNetworkActiveError if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") async def create_pending_dataset(self, dataset: PendingDataSet) -> None: """Create pending operational dataset. The passed in PendingDataSet does not need to be fully populated, any fields not set will be automatically set by the open thread border router. Raises if the http status is 400 or higher or if the response is invalid. """ response = await self._session.put( f"{self._url}/node/dataset/pending", json=dataset.as_json(), timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status == HTTPStatus.CONFLICT: raise ThreadNetworkActiveError if response.status not in (HTTPStatus.CREATED, HTTPStatus.OK): raise OTBRError(f"unexpected http status {response.status}") async def delete_pending_dataset(self) -> None: """Delete pending operational dataset.""" response = await self._session.delete( f"{self._url}/node/dataset/pending", timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status == HTTPStatus.CONFLICT: raise ThreadNetworkActiveError if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") async def set_active_dataset_tlvs(self, dataset: bytes) -> None: """Set current active operational dataset. Raises if the http status is 400 or higher or if the response is invalid. """ response = await self._session.put( f"{self._url}/node/dataset/active", data=dataset.hex(), headers={"Content-Type": "text/plain"}, timeout=aiohttp.ClientTimeout(total=10), ) if response.status == HTTPStatus.CONFLICT: raise ThreadNetworkActiveError if response.status not in (HTTPStatus.CREATED, HTTPStatus.OK): raise OTBRError(f"unexpected http status {response.status}") async def set_channel( self, channel: int, delay: int = PENDING_DATASET_DELAY_TIMER ) -> None: """Change the channel The channel is changed by creating a new pending dataset based on the active dataset. """ if not 11 <= channel <= 26: raise OTBRError(f"invalid channel {channel}") if not (dataset := await self.get_active_dataset()): raise OTBRError("router has no active dataset") if dataset.active_timestamp and dataset.active_timestamp.seconds is not None: dataset.active_timestamp.seconds += 1 else: dataset.active_timestamp = Timestamp(False, 1, 0) dataset.channel = channel pending_dataset = PendingDataSet(active_dataset=dataset, delay=delay) await self.create_pending_dataset(pending_dataset) async def get_extended_address(self) -> bytes: """Get extended address (EUI-64). Raises if the http status is not 200 or if the response is invalid. """ response = await self._session.get( f"{self._url}/node/ext-address", headers={"Accept": "application/json"}, timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") try: return bytes.fromhex(await response.json()) except ValueError as exc: raise OTBRError("unexpected API response") from exc async def get_coprocessor_version(self) -> str: """Get the coprocessor firmware version. Raises if the http status is not 200 or if the response is invalid. """ response = await self._session.get( f"{self._url}/node/coprocessor/version", headers={"Accept": "application/json"}, timeout=aiohttp.ClientTimeout(total=self._timeout), ) if response.status != HTTPStatus.OK: raise OTBRError(f"unexpected http status {response.status}") try: return await response.json() except ValueError as exc: raise OTBRError("unexpected API response") from exc python-otbr-api-2.7.0/python_otbr_api/mdns.py000066400000000000000000000033741474155026400213060ustar00rootroot00000000000000"""Utility function to decode fields in _meshcop._udp.local. mDNS services. The implementation is based on the Open Thread implementation: https://github.com/openthread/ot-br-posix/blob/8a8b2411abcf68659c25bb97672bdd2e5e724dcc/src/border_agent/border_agent.cpp#L109 """ from dataclasses import dataclass from enum import IntEnum import bitstruct # type: ignore[import] from typing_extensions import Self class ConnectionMode(IntEnum): """Connection mode.""" DISABLED = 0 PSKC = 1 PSKD = 2 VENDOR = 3 X509 = 4 class ThreadInterfaceStatus(IntEnum): """Thread interface status.""" NOT_INITIALIZED = 0 INITIALIZED = 1 ACTIVE = 2 class Availability(IntEnum): """Availability.""" INFREQUENT = 0 HIGH = 1 STATE_BITMAP_FORMAT = "u23u1u1u2u2u3" @dataclass class StateBitmap: """State bitmap.""" connection_mode: ConnectionMode thread_interface_status: ThreadInterfaceStatus availability: Availability is_active: bool is_primary: bool @classmethod def from_bytes(cls, data: bytes) -> Self: """Decode from bytes.""" if len(data) != 4: raise ValueError("Incorrect length") ( padding, is_primary, is_active, availability, thread_if_status, connection_mode, ) = bitstruct.unpack(STATE_BITMAP_FORMAT, data) if padding != 0: raise ValueError(f"Could not decode '{data.hex}'") return cls( connection_mode=ConnectionMode(connection_mode), thread_interface_status=ThreadInterfaceStatus(thread_if_status), availability=Availability(availability), is_active=is_active, is_primary=is_primary, ) python-otbr-api-2.7.0/python_otbr_api/models.py000066400000000000000000000205661474155026400216320ustar00rootroot00000000000000"""Data models.""" from __future__ import annotations from dataclasses import dataclass from typing import Any import voluptuous as vol # type:ignore[import] @dataclass class Timestamp: """Timestamp.""" SCHEMA = vol.Schema( { vol.Optional("Authoritative"): bool, vol.Optional("Seconds"): int, vol.Optional("Ticks"): int, } ) authoritative: bool | None = None seconds: int | None = None ticks: int | None = None def as_json(self) -> dict: """Serialize to JSON.""" result: dict[str, Any] = {} if self.authoritative is not None: result["Authoritative"] = self.authoritative if self.seconds is not None: result["Seconds"] = self.seconds if self.ticks is not None: result["Ticks"] = self.ticks return result @classmethod def from_json(cls, json_data: Any) -> Timestamp: """Deserialize from JSON.""" cls.SCHEMA(json_data) return cls( json_data.get("Authoritative"), json_data.get("Seconds"), json_data.get("Ticks"), ) @dataclass class SecurityPolicy: # pylint: disable=too-many-instance-attributes """Security policy.""" SCHEMA = vol.Schema( { vol.Optional("AutonomousEnrollment"): bool, vol.Optional("CommercialCommissioning"): bool, vol.Optional("ExternalCommissioning"): bool, vol.Optional("NativeCommissioning"): bool, vol.Optional("NetworkKeyProvisioning"): bool, vol.Optional("NonCcmRouters"): bool, vol.Optional("ObtainNetworkKey"): bool, vol.Optional("RotationTime"): int, vol.Optional("Routers"): bool, vol.Optional("TobleLink"): bool, } ) autonomous_enrollment: bool | None = None commercial_commissioning: bool | None = None external_commissioning: bool | None = None native_commissioning: bool | None = None network_key_provisioning: bool | None = None non_ccm_routers: bool | None = None obtain_network_key: bool | None = None rotation_time: int | None = None routers: bool | None = None to_ble_link: bool | None = None def as_json(self) -> dict: """Serialize to JSON.""" result: dict[str, Any] = {} if self.autonomous_enrollment is not None: result["AutonomousEnrollment"] = self.autonomous_enrollment if self.commercial_commissioning is not None: result["CommercialCommissioning"] = self.commercial_commissioning if self.external_commissioning is not None: result["ExternalCommissioning"] = self.external_commissioning if self.native_commissioning is not None: result["NativeCommissioning"] = self.native_commissioning if self.network_key_provisioning is not None: result["NetworkKeyProvisioning"] = self.network_key_provisioning if self.non_ccm_routers is not None: result["NonCcmRouters"] = self.non_ccm_routers if self.obtain_network_key is not None: result["ObtainNetworkKey"] = self.obtain_network_key if self.rotation_time is not None: result["RotationTime"] = self.rotation_time if self.routers is not None: result["Routers"] = self.routers if self.to_ble_link is not None: result["TobleLink"] = self.to_ble_link return result @classmethod def from_json(cls, json_data: Any) -> SecurityPolicy: """Deserialize from JSON.""" cls.SCHEMA(json_data) return cls( json_data.get("AutonomousEnrollment"), json_data.get("CommercialCommissioning"), json_data.get("ExternalCommissioning"), json_data.get("NativeCommissioning"), json_data.get("NetworkKeyProvisioning"), json_data.get("NonCcmRouters"), json_data.get("ObtainNetworkKey"), json_data.get("RotationTime"), json_data.get("Routers"), json_data.get("TobleLink"), ) @dataclass class ActiveDataSet: # pylint: disable=too-many-instance-attributes """Operational dataset.""" SCHEMA = vol.Schema( { vol.Optional("ActiveTimestamp"): dict, vol.Optional("ChannelMask"): int, vol.Optional("Channel"): int, vol.Optional("ExtPanId"): str, vol.Optional("MeshLocalPrefix"): str, vol.Optional("NetworkKey"): str, vol.Optional("NetworkName"): str, vol.Optional("PanId"): int, vol.Optional("PSKc"): str, vol.Optional("SecurityPolicy"): dict, } ) active_timestamp: Timestamp | None = None channel_mask: int | None = None channel: int | None = None extended_pan_id: str | None = None mesh_local_prefix: str | None = None network_key: str | None = None network_name: str | None = None pan_id: int | None = None psk_c: str | None = None security_policy: SecurityPolicy | None = None def as_json(self) -> dict: """Serialize to JSON.""" result: dict[str, Any] = {} if self.active_timestamp is not None: result["ActiveTimestamp"] = self.active_timestamp.as_json() if self.channel_mask is not None: result["ChannelMask"] = self.channel_mask if self.channel is not None: result["Channel"] = self.channel if self.extended_pan_id is not None: result["ExtPanId"] = self.extended_pan_id if self.mesh_local_prefix is not None: result["MeshLocalPrefix"] = self.mesh_local_prefix if self.network_key is not None: result["NetworkKey"] = self.network_key if self.network_name is not None: result["NetworkName"] = self.network_name if self.pan_id is not None: result["PanId"] = self.pan_id if self.psk_c is not None: result["PSKc"] = self.psk_c if self.security_policy is not None: result["SecurityPolicy"] = self.security_policy.as_json() return result @classmethod def from_json(cls, json_data: Any) -> ActiveDataSet: """Deserialize from JSON.""" cls.SCHEMA(json_data) active_timestamp = None security_policy = None if "ActiveTimestamp" in json_data: active_timestamp = Timestamp.from_json(json_data["ActiveTimestamp"]) if "SecurityPolicy" in json_data: security_policy = SecurityPolicy.from_json(json_data["SecurityPolicy"]) return ActiveDataSet( active_timestamp, json_data.get("ChannelMask"), json_data.get("Channel"), json_data.get("ExtPanId"), json_data.get("MeshLocalPrefix"), json_data.get("NetworkKey"), json_data.get("NetworkName"), json_data.get("PanId"), json_data.get("PSKc"), security_policy, ) @dataclass class PendingDataSet: # pylint: disable=too-many-instance-attributes """Operational dataset.""" SCHEMA = vol.Schema( { vol.Optional("ActiveDataset"): dict, vol.Optional("Delay"): int, vol.Optional("PendingTimestamp"): dict, } ) active_dataset: ActiveDataSet | None = None delay: int | None = None pending_timestamp: Timestamp | None = None def as_json(self) -> dict: """Serialize to JSON.""" result: dict[str, Any] = {} if self.active_dataset is not None: result["ActiveDataset"] = self.active_dataset.as_json() if self.delay is not None: result["Delay"] = self.delay if self.pending_timestamp is not None: result["PendingTimestamp"] = self.pending_timestamp.as_json() return result @classmethod def from_json(cls, json_data: Any) -> PendingDataSet: """Deserialize from JSON.""" cls.SCHEMA(json_data) active_dataset = None pending_timestamp = None if "ActiveDataset" in json_data: active_dataset = ActiveDataSet.from_json(json_data["ActiveDataset"]) if "PendingTimestamp" in json_data: pending_timestamp = Timestamp.from_json(json_data["PendingTimestamp"]) return PendingDataSet( active_dataset, json_data.get("Delay"), pending_timestamp, ) python-otbr-api-2.7.0/python_otbr_api/pskc.py000066400000000000000000000026161474155026400213030ustar00rootroot00000000000000"""Calculate Thread PSKc. Based on https://github.com/openthread/ot-br-posix/blob/main/src/utils/pskc.cpp """ import struct from cryptography.hazmat.primitives import cmac from cryptography.hazmat.primitives.ciphers import algorithms AES_128_KEY_LEN = 16 ITERATION_COUNTS = 16384 BLKSIZE = 16 SALT_PREFIX = "Thread".encode() def _derive_key(passphrase: str) -> bytes: """Derive key from passphrase according to RFC 4615.""" passphrase_bytes = passphrase.encode() if len(passphrase_bytes) == AES_128_KEY_LEN: return passphrase_bytes c = cmac.CMAC(algorithms.AES128(b"\0" * AES_128_KEY_LEN)) c.update(passphrase_bytes) return c.finalize() def compute_pskc(ext_pan_id: bytes, network_name: str, passphrase: str) -> bytes: """Compute Thread PSKc.""" salt = SALT_PREFIX + ext_pan_id + network_name.encode() key = _derive_key(passphrase) block_counter = 1 prf_input = salt + struct.pack("!L", block_counter) # Calculate U_1 c = cmac.CMAC(algorithms.AES128(key)) c.update(prf_input) prf_output = c.finalize() pskc = bytearray(prf_output) for _ in range(ITERATION_COUNTS - 1): prf_input = prf_output # Calculate U_i c = cmac.CMAC(algorithms.AES128(key)) c.update(prf_input) prf_output = c.finalize() # xor for i in range(BLKSIZE): pskc[i] ^= prf_output[i] return pskc python-otbr-api-2.7.0/python_otbr_api/py.typed000066400000000000000000000000001474155026400214510ustar00rootroot00000000000000python-otbr-api-2.7.0/python_otbr_api/tlv_parser.py000066400000000000000000000107011474155026400225160ustar00rootroot00000000000000"""Parse datasets TLV encoded as specified by Thread.""" from __future__ import annotations from dataclasses import dataclass, field from enum import IntEnum import struct class TLVError(Exception): """TLV error.""" class MeshcopTLVType(IntEnum): """Types.""" CHANNEL = 0 PANID = 1 EXTPANID = 2 NETWORKNAME = 3 PSKC = 4 NETWORKKEY = 5 NETWORK_KEY_SEQUENCE = 6 MESHLOCALPREFIX = 7 STEERING_DATA = 8 BORDER_AGENT_RLOC = 9 COMMISSIONER_ID = 10 COMM_SESSION_ID = 11 SECURITYPOLICY = 12 GET = 13 ACTIVETIMESTAMP = 14 COMMISSIONER_UDP_PORT = 15 STATE = 16 JOINER_DTLS = 17 JOINER_UDP_PORT = 18 JOINER_IID = 19 JOINER_RLOC = 20 JOINER_ROUTER_KEK = 21 PROVISIONING_URL = 32 VENDOR_NAME_TLV = 33 VENDOR_MODEL_TLV = 34 VENDOR_SW_VERSION_TLV = 35 VENDOR_DATA_TLV = 36 VENDOR_STACK_VERSION_TLV = 37 UDP_ENCAPSULATION_TLV = 48 IPV6_ADDRESS_TLV = 49 PENDINGTIMESTAMP = 51 DELAYTIMER = 52 CHANNELMASK = 53 COUNT = 54 PERIOD = 55 SCAN_DURATION = 56 ENERGY_LIST = 57 # Seen in a dataset imported through iOS companion app APPLE_TAG_UNKNOWN = 74 DISCOVERYREQUEST = 128 DISCOVERYRESPONSE = 129 JOINERADVERTISEMENT = 241 @dataclass class MeshcopTLVItem: """Base class for TLV items.""" tag: int data: bytes def __str__(self) -> str: """Return a string representation.""" return self.data.hex() @dataclass class Channel(MeshcopTLVItem): """Channel.""" channel: int = field(init=False) def __post_init__(self) -> None: """Decode the channel.""" self.channel = int.from_bytes(self.data, "big") if not self.channel: raise TLVError(f"invalid channel '{self.channel}'") @dataclass class NetworkName(MeshcopTLVItem): """Network name.""" name: str = field(init=False) def __post_init__(self) -> None: """Decode the name.""" try: self.name = self.data.decode() except UnicodeDecodeError as err: raise TLVError(f"invalid network name '{self.data.hex()}'") from err def __str__(self) -> str: return self.name @dataclass class Timestamp(MeshcopTLVItem): """Timestamp.""" authoritative: bool = field(init=False) seconds: int = field(init=False) ticks: int = field(init=False) def __post_init__(self) -> None: """Decode the timestamp.""" # The timestamps are packed in 8 bytes: # [seconds 48 bits][ticks 15 bits][authoritative flag 1 bit] unpacked: int = struct.unpack("!Q", self.data)[0] self.authoritative = bool(unpacked & 1) self.seconds = unpacked >> 16 self.ticks = (unpacked >> 1) & 0x7FF def _encode_item(item: MeshcopTLVItem) -> bytes: """Encode a dataset item to TLV format.""" data_len = len(item.data) return struct.pack(f"!BB{data_len}s", item.tag, data_len, item.data) def encode_tlv(items: dict[MeshcopTLVType, MeshcopTLVItem]) -> str: """Encode a TLV encoded dataset to a hex string. Raises if the TLV is invalid. """ result = b"" for item in items.values(): result += _encode_item(item) return result.hex() def _parse_item(tag: MeshcopTLVType, data: bytes) -> MeshcopTLVItem: """Parse a TLV encoded dataset item.""" if tag == MeshcopTLVType.ACTIVETIMESTAMP: return Timestamp(tag, data) if tag == MeshcopTLVType.CHANNEL: return Channel(tag, data) if tag == MeshcopTLVType.NETWORKNAME: return NetworkName(tag, data) return MeshcopTLVItem(tag, data) def parse_tlv(data: str) -> dict[MeshcopTLVType, MeshcopTLVItem]: """Parse a TLV encoded dataset. Raises if the TLV is invalid. """ try: data_bytes = bytes.fromhex(data) except ValueError as err: raise TLVError("invalid tlvs") from err result = {} pos = 0 while pos < len(data_bytes): try: tag = MeshcopTLVType(data_bytes[pos]) except ValueError as err: raise TLVError(f"unknown type {data_bytes[pos]}") from err pos += 1 _len = data_bytes[pos] pos += 1 val = data_bytes[pos : pos + _len] if len(val) < _len: raise TLVError(f"expected {_len} bytes for {tag.name}, got {len(val)}") pos += _len if tag in result: raise TLVError(f"duplicated tag {tag.name}") result[tag] = _parse_item(tag, val) return result python-otbr-api-2.7.0/requirements-test.txt000066400000000000000000000001351474155026400210240ustar00rootroot00000000000000black==24.10.0 flake8==7.1.1 mypy==1.14.1 pylint==3.3.3 pytest-asyncio==0.25.2 pytest==8.3.4 python-otbr-api-2.7.0/requirements.txt000066400000000000000000000000741474155026400200510ustar00rootroot00000000000000aiohttp bitstruct cryptography typing_extensions voluptuous python-otbr-api-2.7.0/setup.cfg000066400000000000000000000001461474155026400164060ustar00rootroot00000000000000[flake8] # To work with Black max-line-length = 88 # E203: Whitespace before ':' extend-ignore = E203 python-otbr-api-2.7.0/tests/000077500000000000000000000000001474155026400157265ustar00rootroot00000000000000python-otbr-api-2.7.0/tests/__init__.py000066400000000000000000000000151474155026400200330ustar00rootroot00000000000000"""Tests.""" python-otbr-api-2.7.0/tests/conftest.py000066400000000000000000000005451474155026400201310ustar00rootroot00000000000000"""Test fixtures.""" from collections.abc import Generator import pytest from tests.test_util.aiohttp import AiohttpClientMocker, mock_aiohttp_client @pytest.fixture def aioclient_mock() -> Generator[AiohttpClientMocker, None, None]: """Fixture to mock aioclient calls.""" with mock_aiohttp_client() as mock_session: yield mock_session python-otbr-api-2.7.0/tests/test_init.py000066400000000000000000000623211474155026400203060ustar00rootroot00000000000000"""Test the OTBR REST API client.""" from http import HTTPStatus from typing import Any import pytest import python_otbr_api from tests.test_util.aiohttp import AiohttpClientMocker BASE_URL = "http://core-silabs-multiprotocol:8081" DATASET_JSON: dict[str, Any] = { "ActiveTimestamp": { "Authoritative": False, "Seconds": 1, "Ticks": 0, }, "ChannelMask": 134215680, "Channel": 15, "ExtPanId": "8478E3379E047B92", "MeshLocalPrefix": "fd89:bde7:42ed:a901::/64", "NetworkKey": "96271D6ECC78749114AB6A591E0D06F1", "NetworkName": "OpenThread HA", "PanId": 33991, "PSKc": "9760C89414D461AC717DCD105EB87E5B", "SecurityPolicy": { "AutonomousEnrollment": False, "CommercialCommissioning": False, "ExternalCommissioning": True, "NativeCommissioning": True, "NetworkKeyProvisioning": False, "NonCcmRouters": False, "ObtainNetworkKey": True, "RotationTime": 672, "Routers": True, "TobleLink": True, }, } async def test_factory_reset(aioclient_mock: AiohttpClientMocker) -> None: """Test factory_reset.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete(f"{BASE_URL}/node", status=HTTPStatus.OK) await otbr.factory_reset() assert aioclient_mock.call_count == 1 assert aioclient_mock.mock_calls[-1][0] == "DELETE" assert aioclient_mock.mock_calls[-1][1].path == "/node" assert aioclient_mock.mock_calls[-1][2] is None async def test_factory_reset_unsupported(aioclient_mock: AiohttpClientMocker) -> None: """Test factory_reset is unsupported.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete(f"{BASE_URL}/node", status=HTTPStatus.METHOD_NOT_ALLOWED) with pytest.raises(python_otbr_api.FactoryResetNotSupportedError): await otbr.factory_reset() async def test_factory_reset_201(aioclient_mock: AiohttpClientMocker) -> None: """Test factory_reset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete(f"{BASE_URL}/node", status=HTTPStatus.CREATED) with pytest.raises(python_otbr_api.OTBRError): await otbr.factory_reset() async def test_get_border_agent_id(aioclient_mock: AiohttpClientMocker) -> None: """Test get_border_agent_id.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) mock_response = "230C6A1AC57F6F4BE262ACF32E5EF52C" aioclient_mock.get(f"{BASE_URL}/node/ba-id", json=mock_response) assert await otbr.get_border_agent_id() == bytes.fromhex(mock_response) async def test_get_border_agent_id_unsupported( aioclient_mock: AiohttpClientMocker, ) -> None: """Test get_border_agent_id with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/ba-id", status=HTTPStatus.NOT_FOUND) with pytest.raises(python_otbr_api.GetBorderAgentIdNotSupportedError): await otbr.get_border_agent_id() async def test_get_border_agent_id_invalid(aioclient_mock: AiohttpClientMocker) -> None: """Test get_border_agent_id with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/ba-id", text="unexpected") with pytest.raises(python_otbr_api.OTBRError): await otbr.get_border_agent_id() async def test_get_border_agent_id_201(aioclient_mock: AiohttpClientMocker) -> None: """Test get_border_agent_id with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/ba-id", status=HTTPStatus.CREATED) with pytest.raises(python_otbr_api.OTBRError): await otbr.get_border_agent_id() async def test_set_enabled(aioclient_mock: AiohttpClientMocker) -> None: """Test set_enabled.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/state", status=HTTPStatus.OK) await otbr.set_enabled(True) assert aioclient_mock.call_count == 1 assert aioclient_mock.mock_calls[-1][0] == "PUT" assert aioclient_mock.mock_calls[-1][1].path == "/node/state" assert aioclient_mock.mock_calls[-1][2] == "enable" await otbr.set_enabled(False) assert aioclient_mock.call_count == 2 assert aioclient_mock.mock_calls[-1][0] == "PUT" assert aioclient_mock.mock_calls[-1][1].path == "/node/state" assert aioclient_mock.mock_calls[-1][2] == "disable" async def test_get_active_dataset(aioclient_mock: AiohttpClientMocker): """Test get_active_dataset.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", json=DATASET_JSON) active_timestamp = python_otbr_api.models.Timestamp( DATASET_JSON["ActiveTimestamp"]["Authoritative"], DATASET_JSON["ActiveTimestamp"]["Seconds"], DATASET_JSON["ActiveTimestamp"]["Ticks"], ) security_policy = python_otbr_api.models.SecurityPolicy( DATASET_JSON["SecurityPolicy"]["AutonomousEnrollment"], DATASET_JSON["SecurityPolicy"]["CommercialCommissioning"], DATASET_JSON["SecurityPolicy"]["ExternalCommissioning"], DATASET_JSON["SecurityPolicy"]["NativeCommissioning"], DATASET_JSON["SecurityPolicy"]["NetworkKeyProvisioning"], DATASET_JSON["SecurityPolicy"]["NonCcmRouters"], DATASET_JSON["SecurityPolicy"]["ObtainNetworkKey"], DATASET_JSON["SecurityPolicy"]["RotationTime"], DATASET_JSON["SecurityPolicy"]["Routers"], DATASET_JSON["SecurityPolicy"]["TobleLink"], ) active_dataset = await otbr.get_active_dataset() assert active_dataset == python_otbr_api.ActiveDataSet( active_timestamp, DATASET_JSON["ChannelMask"], DATASET_JSON["Channel"], DATASET_JSON["ExtPanId"], DATASET_JSON["MeshLocalPrefix"], DATASET_JSON["NetworkKey"], DATASET_JSON["NetworkName"], DATASET_JSON["PanId"], DATASET_JSON["PSKc"], security_policy, ) assert active_dataset.as_json() == DATASET_JSON async def test_get_active_dataset_empty(aioclient_mock: AiohttpClientMocker): """Test get_active_dataset.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NO_CONTENT) assert await otbr.get_active_dataset() is None async def test_get_active_dataset_tlvs(aioclient_mock: AiohttpClientMocker) -> None: """Test get_active_dataset_tlvs.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) mock_response = ( "0E080000000000010000000300001035060004001FFFE00208F642646DA209B1C00708FDF57B5A" "0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102" "25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8" ) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text=mock_response) assert await otbr.get_active_dataset_tlvs() == bytes.fromhex(mock_response) async def test_get_active_dataset_tlvs_empty(aioclient_mock: AiohttpClientMocker): """Test get_active_dataset_tlvs.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NO_CONTENT) assert await otbr.get_active_dataset_tlvs() is None async def test_get_pending_dataset_tlvs(aioclient_mock: AiohttpClientMocker) -> None: """Test get_pending_dataset_tlvs.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) mock_response = ( "0E080000000000010000340400006699000300000C35060004001FFFE00208057B7CD3D6CC9F65" "0708FD17C9D59809B27A05107546326F20BCCFD946609FBAF7F39AD5030F4F70656E5468726561" "642D32366363010226CC0410FA7EC34EBE58DD1FD74F13F65D021C5B0C0402A0F7F8" ) aioclient_mock.get(f"{BASE_URL}/node/dataset/pending", text=mock_response) assert await otbr.get_pending_dataset_tlvs() == bytes.fromhex(mock_response) async def test_get_pending_dataset_tlvs_empty(aioclient_mock: AiohttpClientMocker): """Test get_pending_dataset_tlvs.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.NO_CONTENT) assert await otbr.get_pending_dataset_tlvs() is None async def test_create_active_dataset(aioclient_mock: AiohttpClientMocker): """Test create_active_dataset.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED) await otbr.create_active_dataset(python_otbr_api.ActiveDataSet()) assert aioclient_mock.call_count == 1 assert aioclient_mock.mock_calls[-1][0] == "PUT" assert aioclient_mock.mock_calls[-1][1].path == "/node/dataset/active" assert aioclient_mock.mock_calls[-1][2] == {} await otbr.create_active_dataset( python_otbr_api.ActiveDataSet(network_name="OpenThread HA") ) assert aioclient_mock.call_count == 2 assert aioclient_mock.mock_calls[-1][0] == "PUT" assert aioclient_mock.mock_calls[-1][1].path == "/node/dataset/active" assert aioclient_mock.mock_calls[-1][2] == {"NetworkName": "OpenThread HA"} await otbr.create_active_dataset( python_otbr_api.ActiveDataSet(network_name="OpenThread HA", channel=15) ) assert aioclient_mock.call_count == 3 assert aioclient_mock.mock_calls[-1][0] == "PUT" assert aioclient_mock.mock_calls[-1][1].path == "/node/dataset/active" assert aioclient_mock.mock_calls[-1][2] == { "NetworkName": "OpenThread HA", "Channel": 15, } async def test_delete_active_dataset(aioclient_mock: AiohttpClientMocker): """Test delete_active_dataset.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.OK) await otbr.delete_active_dataset() assert aioclient_mock.call_count == 1 assert aioclient_mock.mock_calls[-1][0] == "DELETE" assert aioclient_mock.mock_calls[-1][1].path == "/node/dataset/active" assert aioclient_mock.mock_calls[-1][2] is None async def test_create_pending_dataset(aioclient_mock: AiohttpClientMocker): """Test create_pending_dataset.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.CREATED) await otbr.create_pending_dataset(python_otbr_api.PendingDataSet()) assert aioclient_mock.call_count == 1 assert aioclient_mock.mock_calls[-1][0] == "PUT" assert aioclient_mock.mock_calls[-1][1].path == "/node/dataset/pending" assert aioclient_mock.mock_calls[-1][2] == {} await otbr.create_pending_dataset( python_otbr_api.PendingDataSet( python_otbr_api.ActiveDataSet(network_name="OpenThread HA"), 12345, python_otbr_api.Timestamp(), ) ) assert aioclient_mock.call_count == 2 assert aioclient_mock.mock_calls[-1][0] == "PUT" assert aioclient_mock.mock_calls[-1][1].path == "/node/dataset/pending" assert aioclient_mock.mock_calls[-1][2] == { "ActiveDataset": { "NetworkName": "OpenThread HA", }, "Delay": 12345, "PendingTimestamp": {}, } await otbr.create_pending_dataset( python_otbr_api.PendingDataSet( python_otbr_api.ActiveDataSet(network_name="OpenThread HA", channel=15), 23456, ) ) assert aioclient_mock.call_count == 3 assert aioclient_mock.mock_calls[-1][0] == "PUT" assert aioclient_mock.mock_calls[-1][1].path == "/node/dataset/pending" assert aioclient_mock.mock_calls[-1][2] == { "ActiveDataset": { "Channel": 15, "NetworkName": "OpenThread HA", }, "Delay": 23456, } async def test_delete_pending_dataset(aioclient_mock: AiohttpClientMocker): """Test delete_pending_dataset.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.OK) await otbr.delete_pending_dataset() assert aioclient_mock.call_count == 1 assert aioclient_mock.mock_calls[-1][0] == "DELETE" assert aioclient_mock.mock_calls[-1][1].path == "/node/dataset/pending" assert aioclient_mock.mock_calls[-1][2] is None async def test_set_channel(aioclient_mock: AiohttpClientMocker) -> None: """Test set_channel.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", json=DATASET_JSON) aioclient_mock.put(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.CREATED) new_channel = 16 expected_active_timestamp = DATASET_JSON["ActiveTimestamp"] | {"Seconds": 2} expected_pending_dataset = { "ActiveDataset": DATASET_JSON | { "ActiveTimestamp": expected_active_timestamp, "Channel": new_channel, }, "Delay": 1234, } assert new_channel != DATASET_JSON["Channel"] await otbr.set_channel(new_channel, 1234) assert aioclient_mock.call_count == 2 assert aioclient_mock.mock_calls[0][0] == "GET" assert aioclient_mock.mock_calls[0][1].path == "/node/dataset/active" assert aioclient_mock.mock_calls[1][0] == "PUT" assert aioclient_mock.mock_calls[1][1].path == "/node/dataset/pending" assert aioclient_mock.mock_calls[1][2] == expected_pending_dataset async def test_set_channel_default_delay(aioclient_mock: AiohttpClientMocker) -> None: """Test set_channel.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", json=DATASET_JSON) aioclient_mock.put(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.CREATED) new_channel = 16 expected_active_timestamp = DATASET_JSON["ActiveTimestamp"] | {"Seconds": 2} expected_pending_dataset = { "ActiveDataset": DATASET_JSON | { "ActiveTimestamp": expected_active_timestamp, "Channel": new_channel, }, "Delay": 300000, } assert new_channel != DATASET_JSON["Channel"] await otbr.set_channel(new_channel) assert aioclient_mock.call_count == 2 assert aioclient_mock.mock_calls[0][0] == "GET" assert aioclient_mock.mock_calls[0][1].path == "/node/dataset/active" assert aioclient_mock.mock_calls[1][0] == "PUT" assert aioclient_mock.mock_calls[1][1].path == "/node/dataset/pending" assert aioclient_mock.mock_calls[1][2] == expected_pending_dataset async def test_set_channel_no_timestamp(aioclient_mock: AiohttpClientMocker) -> None: """Test set_channel.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) dataset_json = dict(DATASET_JSON) dataset_json.pop("ActiveTimestamp") aioclient_mock.get(f"{BASE_URL}/node/dataset/active", json=dataset_json) aioclient_mock.put(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.CREATED) new_channel = 16 expected_active_timestamp = {"Authoritative": False, "Seconds": 1, "Ticks": 0} expected_pending_dataset = { "ActiveDataset": DATASET_JSON | { "ActiveTimestamp": expected_active_timestamp, "Channel": new_channel, }, "Delay": 300000, } assert new_channel != DATASET_JSON["Channel"] await otbr.set_channel(new_channel) assert aioclient_mock.call_count == 2 assert aioclient_mock.mock_calls[0][0] == "GET" assert aioclient_mock.mock_calls[0][1].path == "/node/dataset/active" assert aioclient_mock.mock_calls[1][0] == "PUT" assert aioclient_mock.mock_calls[1][1].path == "/node/dataset/pending" assert aioclient_mock.mock_calls[1][2] == expected_pending_dataset async def test_set_channel_invalid_channel(aioclient_mock: AiohttpClientMocker) -> None: """Test set_channel.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) with pytest.raises(python_otbr_api.OTBRError): await otbr.set_channel(123) async def test_set_channel_no_dataset(aioclient_mock: AiohttpClientMocker) -> None: """Test set_channel.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NO_CONTENT) with pytest.raises(python_otbr_api.OTBRError): await otbr.set_channel(16) async def test_get_extended_address(aioclient_mock: AiohttpClientMocker) -> None: """Test get_active_dataset_tlvs.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) mock_response = "4EF6C4F3FF750626" aioclient_mock.get(f"{BASE_URL}/node/ext-address", json=mock_response) assert await otbr.get_extended_address() == bytes.fromhex(mock_response) async def test_get_coprocessor_version(aioclient_mock: AiohttpClientMocker) -> None: """Test get_coprocessor_version.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) mock_response = ( "OPENTHREAD/thread-reference-20200818-1740-g33cc75ed3;" " NRF52840; Jun 2 2022 14:25:49" ) aioclient_mock.get(f"{BASE_URL}/node/coprocessor/version", json=mock_response) assert await otbr.get_coprocessor_version() == mock_response async def test_set_enabled_201(aioclient_mock: AiohttpClientMocker) -> None: """Test set_enabled.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/state", status=HTTPStatus.CREATED) with pytest.raises(python_otbr_api.OTBRError): await otbr.set_enabled(True) async def test_get_active_dataset_201(aioclient_mock: AiohttpClientMocker): """Test get_active_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED) with pytest.raises(python_otbr_api.OTBRError): await otbr.get_active_dataset() async def test_get_active_dataset_invalid(aioclient_mock: AiohttpClientMocker): """Test get_active_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="unexpected") with pytest.raises(python_otbr_api.OTBRError): await otbr.get_active_dataset() async def test_get_active_dataset_tlvs_201(aioclient_mock: AiohttpClientMocker): """Test get_active_dataset_tlvs with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED) with pytest.raises(python_otbr_api.OTBRError): await otbr.get_active_dataset_tlvs() async def test_get_active_dataset_tlvs_invalid(aioclient_mock: AiohttpClientMocker): """Test get_active_dataset_tlvs with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="unexpected") with pytest.raises(python_otbr_api.OTBRError): await otbr.get_active_dataset_tlvs() async def test_get_pending_dataset_tlvs_201(aioclient_mock: AiohttpClientMocker): """Test test_get_pending_dataset_tlvs_201 with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.CREATED) with pytest.raises(python_otbr_api.OTBRError): await otbr.get_pending_dataset_tlvs() async def test_test_get_pending_dataset_tlvs_201_invalid( aioclient_mock: AiohttpClientMocker, ): """Test get_pending_dataset_tlvs with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/dataset/pending", text="unexpected") with pytest.raises(python_otbr_api.OTBRError): await otbr.get_pending_dataset_tlvs() async def test_create_active_dataset_thread_active(aioclient_mock: AiohttpClientMocker): """Test create_active_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CONFLICT) with pytest.raises(python_otbr_api.ThreadNetworkActiveError): await otbr.create_active_dataset(python_otbr_api.ActiveDataSet()) async def test_create_active_dataset_202(aioclient_mock: AiohttpClientMocker): """Test create_active_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.ACCEPTED) with pytest.raises(python_otbr_api.OTBRError): await otbr.create_active_dataset(python_otbr_api.ActiveDataSet()) async def test_delete_active_dataset_thread_active(aioclient_mock: AiohttpClientMocker): """Test delete_active_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CONFLICT) with pytest.raises(python_otbr_api.ThreadNetworkActiveError): await otbr.delete_active_dataset() async def test_delete_active_dataset_202(aioclient_mock: AiohttpClientMocker): """Test delete_active_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.ACCEPTED) with pytest.raises(python_otbr_api.OTBRError): await otbr.delete_active_dataset() async def test_create_pending_dataset_thread_active( aioclient_mock: AiohttpClientMocker, ): """Test create_pending_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.CONFLICT) with pytest.raises(python_otbr_api.ThreadNetworkActiveError): await otbr.create_pending_dataset(python_otbr_api.PendingDataSet()) async def test_create_pending_dataset_202(aioclient_mock: AiohttpClientMocker): """Test create_pending_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.ACCEPTED) with pytest.raises(python_otbr_api.OTBRError): await otbr.create_pending_dataset(python_otbr_api.PendingDataSet()) async def test_delete_pending_dataset_thread_active( aioclient_mock: AiohttpClientMocker, ): """Test delete_pending_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete( f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.CONFLICT ) with pytest.raises(python_otbr_api.ThreadNetworkActiveError): await otbr.delete_pending_dataset() async def test_delete_pending_dataset_202(aioclient_mock: AiohttpClientMocker): """Test delete_pending_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.delete( f"{BASE_URL}/node/dataset/pending", status=HTTPStatus.ACCEPTED ) with pytest.raises(python_otbr_api.OTBRError): await otbr.delete_pending_dataset() async def test_set_active_dataset_tlvs_thread_active( aioclient_mock: AiohttpClientMocker, ): """Test set_active_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CONFLICT) with pytest.raises(python_otbr_api.ThreadNetworkActiveError): await otbr.set_active_dataset_tlvs(b"") async def test_set_active_dataset_tlvs_202(aioclient_mock: AiohttpClientMocker): """Test set_active_dataset with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.put(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.ACCEPTED) with pytest.raises(python_otbr_api.OTBRError): await otbr.set_active_dataset_tlvs(b"") async def test_get_extended_address_201(aioclient_mock: AiohttpClientMocker) -> None: """Test get_extended_address with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/ext-address", status=HTTPStatus.CREATED) with pytest.raises(python_otbr_api.OTBRError): await otbr.get_extended_address() async def test_get_extended_address_invalid(aioclient_mock: AiohttpClientMocker): """Test get_extended_address with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get(f"{BASE_URL}/node/ext-address", text="unexpected") with pytest.raises(python_otbr_api.OTBRError): await otbr.get_extended_address() async def test_get_coprocessor_version_invalid(aioclient_mock: AiohttpClientMocker): """Test get_coprocessor_version with error.""" otbr = python_otbr_api.OTBR(BASE_URL, aioclient_mock.create_session()) aioclient_mock.get( f"{BASE_URL}/node/coprocessor/version", status=HTTPStatus.NOT_FOUND ) with pytest.raises(python_otbr_api.OTBRError): await otbr.get_coprocessor_version() python-otbr-api-2.7.0/tests/test_mdns.py000066400000000000000000000032711474155026400203030ustar00rootroot00000000000000"""Test decoding fields in _meshcop._udp.local. services.""" import pytest from python_otbr_api.mdns import ( Availability, ConnectionMode, StateBitmap, ThreadInterfaceStatus, ) @pytest.mark.parametrize( "encoded, decoded", [ ( b"\x00\x00\x01\xb1", StateBitmap( connection_mode=ConnectionMode.PSKC, thread_interface_status=ThreadInterfaceStatus.ACTIVE, availability=Availability.HIGH, is_active=True, is_primary=True, ), ), ( b"\x00\x00\x00!", StateBitmap( connection_mode=ConnectionMode.PSKC, thread_interface_status=ThreadInterfaceStatus.NOT_INITIALIZED, availability=Availability.HIGH, is_active=False, is_primary=False, ), ), ], ) def test_decode_state_bitmap(encoded, decoded) -> None: """Test the TLV parser.""" assert StateBitmap.from_bytes(encoded) == decoded @pytest.mark.parametrize( "encoded, error", [ # Input not bytes ("blah", TypeError), # Wrong length (b"\x00\x01\xb1", ValueError), # Padding not zeroed (b"\xff\x00\x01\xb1", ValueError), # Invalid ConnectionMode (b"\x00\x00\x01\xb5", ValueError), # Invalid ThreadInterfaceStatus (b"\x00\x00\x01\xb9", ValueError), # Invalid Availability (b"\x00\x00\x01\xf1", ValueError), ], ) def test_decode_state_bitmap_error(encoded, error) -> None: """Test the TLV parser.""" with pytest.raises(error): StateBitmap.from_bytes(encoded) python-otbr-api-2.7.0/tests/test_models.py000066400000000000000000000010311474155026400206150ustar00rootroot00000000000000"""Test data models.""" import python_otbr_api def test_deserialize_pending_dataset(): """Test deserializing a pending dataset.""" assert python_otbr_api.PendingDataSet.from_json( { "ActiveDataset": { "NetworkName": "OpenThread HA", }, "Delay": 12345, "PendingTimestamp": {}, } ) == python_otbr_api.PendingDataSet( python_otbr_api.ActiveDataSet(network_name="OpenThread HA"), 12345, python_otbr_api.Timestamp(), ) python-otbr-api-2.7.0/tests/test_pskc.py000066400000000000000000000020241474155026400202750ustar00rootroot00000000000000"""Test calculating PSKc.""" import pytest from python_otbr_api.pskc import compute_pskc @pytest.mark.parametrize( "ext_pan_id, network_name, passphrase, expected_pskc", [ # Example from https://openthread.io/guides/border-router/tools#pskc_generator ( bytes.fromhex("1234AAAA1234BBBB"), "MyOTBRNetwork", "J01NME", "ee4fb64e9341e13846bbe7e1c52b6785", ), # OTBR Web UI default ( bytes.fromhex("1111111122222222"), "OpenThreadDemo", "j01Nme", "445f2b5ca6f2a93a55ce570a70efeecb", ), # 128 bit key ( bytes.fromhex("1234AAAA1234BBBB"), "MyOTBRNetwork", "0123456789ABCDEF", "f1927f0ec11da1ac7ef4ee05e81fe0ce", ), ], ) def test_compute_pskc(ext_pan_id, network_name, passphrase, expected_pskc) -> None: """Test the TLV parser.""" assert expected_pskc == compute_pskc(ext_pan_id, network_name, passphrase).hex() python-otbr-api-2.7.0/tests/test_tlv_parser.py000066400000000000000000000124641474155026400215270ustar00rootroot00000000000000"""Test the Thread TLV parser.""" import pytest from python_otbr_api.tlv_parser import ( Timestamp, Channel, MeshcopTLVItem, MeshcopTLVType, NetworkName, TLVError, encode_tlv, parse_tlv, ) def test_encode_tlv() -> None: """Test the TLV parser.""" dataset = { MeshcopTLVType.ACTIVETIMESTAMP: MeshcopTLVItem( MeshcopTLVType.ACTIVETIMESTAMP, bytes.fromhex("0000000000010000") ), MeshcopTLVType.CHANNEL: MeshcopTLVItem( MeshcopTLVType.CHANNEL, bytes.fromhex("00000f") ), MeshcopTLVType.CHANNELMASK: MeshcopTLVItem( MeshcopTLVType.CHANNELMASK, bytes.fromhex("0004001fffe0") ), MeshcopTLVType.EXTPANID: MeshcopTLVItem( MeshcopTLVType.EXTPANID, bytes.fromhex("1111111122222222") ), MeshcopTLVType.MESHLOCALPREFIX: MeshcopTLVItem( MeshcopTLVType.MESHLOCALPREFIX, bytes.fromhex("fdad70bfe5aa15dd") ), MeshcopTLVType.NETWORKKEY: MeshcopTLVItem( MeshcopTLVType.NETWORKKEY, bytes.fromhex("00112233445566778899aabbccddeeff") ), MeshcopTLVType.NETWORKNAME: NetworkName( MeshcopTLVType.NETWORKNAME, "OpenThreadDemo".encode() ), MeshcopTLVType.PANID: MeshcopTLVItem( MeshcopTLVType.PANID, bytes.fromhex("1234") ), MeshcopTLVType.PSKC: MeshcopTLVItem( MeshcopTLVType.PSKC, bytes.fromhex("445f2b5ca6f2a93a55ce570a70efeecb") ), MeshcopTLVType.SECURITYPOLICY: MeshcopTLVItem( MeshcopTLVType.SECURITYPOLICY, bytes.fromhex("02a0f7f8") ), } dataset_tlv = encode_tlv(dataset) assert ( dataset_tlv == ( "0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDAD" "70BFE5AA15DD051000112233445566778899AABBCCDDEEFF030E4F70656E54687265616444" "656D6F010212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8" ).lower() ) def test_parse_tlv() -> None: """Test the TLV parser.""" dataset_tlv = ( "0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDAD70BF" "E5AA15DD051000112233445566778899AABBCCDDEEFF030E4F70656E54687265616444656D6F01" "0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8" ) dataset = parse_tlv(dataset_tlv) assert dataset == { MeshcopTLVType.CHANNEL: Channel( MeshcopTLVType.CHANNEL, bytes.fromhex("00000f") ), MeshcopTLVType.PANID: MeshcopTLVItem( MeshcopTLVType.PANID, bytes.fromhex("1234") ), MeshcopTLVType.EXTPANID: MeshcopTLVItem( MeshcopTLVType.EXTPANID, bytes.fromhex("1111111122222222") ), MeshcopTLVType.NETWORKNAME: NetworkName( MeshcopTLVType.NETWORKNAME, "OpenThreadDemo".encode() ), MeshcopTLVType.PSKC: MeshcopTLVItem( MeshcopTLVType.PSKC, bytes.fromhex("445f2b5ca6f2a93a55ce570a70efeecb") ), MeshcopTLVType.NETWORKKEY: MeshcopTLVItem( MeshcopTLVType.NETWORKKEY, bytes.fromhex("00112233445566778899aabbccddeeff") ), MeshcopTLVType.MESHLOCALPREFIX: MeshcopTLVItem( MeshcopTLVType.MESHLOCALPREFIX, bytes.fromhex("fdad70bfe5aa15dd") ), MeshcopTLVType.SECURITYPOLICY: MeshcopTLVItem( MeshcopTLVType.SECURITYPOLICY, bytes.fromhex("02a0f7f8") ), MeshcopTLVType.ACTIVETIMESTAMP: Timestamp( MeshcopTLVType.ACTIVETIMESTAMP, bytes.fromhex("0000000000010000") ), MeshcopTLVType.CHANNELMASK: MeshcopTLVItem( MeshcopTLVType.CHANNELMASK, bytes.fromhex("0004001fffe0") ), } def test_parse_tlv_apple() -> None: """Test the TLV parser from a (truncated) dataset from an Apple BR.""" dataset_tlv = ( "0e08000065901a07000000030000194a0300000f35060004001fffc003104d79486f6d65313233" "31323331323334" ) dataset = parse_tlv(dataset_tlv) assert dataset == { MeshcopTLVType.ACTIVETIMESTAMP: Timestamp( MeshcopTLVType.ACTIVETIMESTAMP, bytes.fromhex("000065901a070000") ), MeshcopTLVType.CHANNEL: Channel( MeshcopTLVType.CHANNEL, bytes.fromhex("000019") ), MeshcopTLVType.APPLE_TAG_UNKNOWN: MeshcopTLVItem( MeshcopTLVType.APPLE_TAG_UNKNOWN, bytes.fromhex("00000f") ), MeshcopTLVType.CHANNELMASK: MeshcopTLVItem( MeshcopTLVType.CHANNELMASK, bytes.fromhex("0004001fffc0") ), MeshcopTLVType.NETWORKNAME: NetworkName( MeshcopTLVType.NETWORKNAME, "MyHome1231231234".encode() ), } @pytest.mark.parametrize( "tlv, error, msg", ( ( "killevippen", TLVError, "invalid tlvs", ), ( "FF", TLVError, "unknown type 255", ), ( "030E4F70656E54687265616444656D", TLVError, "expected 14 bytes for NETWORKNAME, got 13", ), ( "030E4F70656E54687265616444656DFF", TLVError, "invalid network name '4f70656e54687265616444656dff'", ), ), ) def test_parse_tlv_error(tlv, error, msg) -> None: """Test the TLV parser error handling.""" with pytest.raises(error, match=msg): parse_tlv(tlv) python-otbr-api-2.7.0/tests/test_util/000077500000000000000000000000001474155026400177425ustar00rootroot00000000000000python-otbr-api-2.7.0/tests/test_util/__init__.py000066400000000000000000000000221474155026400220450ustar00rootroot00000000000000"""Test utils.""" python-otbr-api-2.7.0/tests/test_util/aiohttp.py000066400000000000000000000214701474155026400217700ustar00rootroot00000000000000"""Aiohttp test utils.""" import asyncio from contextlib import contextmanager from http import HTTPStatus from json import dumps as json_dumps, loads as json_loads import re from unittest import mock from urllib.parse import parse_qs from aiohttp import ClientSession from aiohttp.client_exceptions import ClientError, ClientResponseError from aiohttp.streams import StreamReader from multidict import CIMultiDict from yarl import URL RETYPE = type(re.compile("")) # pylint: disable=unused-argument, too-many-arguments, too-many-instance-attributes def mock_stream(data): """Mock a stream with data.""" protocol = mock.Mock(_reading_paused=False) stream = StreamReader(protocol, limit=2**16) stream.feed_data(data) stream.feed_eof() return stream class AiohttpClientMocker: """Mock Aiohttp client requests.""" def __init__(self): """Initialize the request mocker.""" self._mocks = [] self._cookies = {} self.mock_calls = [] def request( self, method, url, *, auth=None, status=HTTPStatus.OK, text=None, data=None, content=None, json=None, params=None, headers=None, exc=None, cookies=None, side_effect=None, ): """Mock a request.""" if headers is None: headers = {} if not isinstance(url, RETYPE): url = URL(url) if params: url = url.with_query(params) self._mocks.append( AiohttpClientMockResponse( method=method, url=url, status=status, response=content, json=json, text=text, cookies=cookies, exc=exc, headers=headers, side_effect=side_effect, ) ) def get(self, *args, **kwargs): """Register a mock get request.""" self.request("get", *args, **kwargs) def put(self, *args, **kwargs): """Register a mock put request.""" self.request("put", *args, **kwargs) def post(self, *args, **kwargs): """Register a mock post request.""" self.request("post", *args, **kwargs) def delete(self, *args, **kwargs): """Register a mock delete request.""" self.request("delete", *args, **kwargs) def options(self, *args, **kwargs): """Register a mock options request.""" self.request("options", *args, **kwargs) def patch(self, *args, **kwargs): """Register a mock patch request.""" self.request("patch", *args, **kwargs) @property def call_count(self): """Return the number of requests made.""" return len(self.mock_calls) def clear_requests(self): """Reset mock calls.""" self._mocks.clear() self._cookies.clear() self.mock_calls.clear() def create_session(self): """Create a ClientSession that is bound to this mocker.""" session = ClientSession(json_serialize=json_dumps) # Setting directly on `session` will raise deprecation warning object.__setattr__(session, "_request", self.match_request) return session async def match_request( self, method, url, *, data=None, auth=None, params=None, headers=None, allow_redirects=None, timeout=None, json=None, cookies=None, **kwargs, ): """Match a request against pre-registered requests.""" data = data or json url = URL(url) if params: url = url.with_query(params) for response in self._mocks: if response.match_request(method, url, params): self.mock_calls.append((method, url, data, headers)) if response.side_effect: response = await response.side_effect(method, url, data) if response.exc: raise response.exc return response raise AssertionError(f"No mock registered for {method.upper()} {url} {params}") class AiohttpClientMockResponse: """Mock Aiohttp client response.""" def __init__( self, method, url, *, status=HTTPStatus.OK, response=None, json=None, text=None, cookies=None, exc=None, headers=None, side_effect=None, ): """Initialize a fake response.""" if json is not None: text = json_dumps(json) if text is not None: response = text.encode("utf-8") if response is None: response = b"" self.charset = "utf-8" self.method = method self._url = url self.status = status self.response = response self.exc = exc self.side_effect = side_effect self._headers = CIMultiDict(headers or {}) self._cookies = {} if cookies: for name, data in cookies.items(): cookie = mock.MagicMock() cookie.value = data self._cookies[name] = cookie def match_request(self, method, url, params=None): """Test if response answers request.""" if method.lower() != self.method.lower(): return False # regular expression matching if isinstance(self._url, RETYPE): return self._url.search(str(url)) is not None if ( self._url.scheme != url.scheme or self._url.host != url.host or self._url.path != url.path ): return False # Ensure all query components in matcher are present in the request request_qs = parse_qs(url.query_string) matcher_qs = parse_qs(self._url.query_string) for key, vals in matcher_qs.items(): for val in vals: try: request_qs.get(key, []).remove(val) except ValueError: return False return True @property def headers(self): """Return content_type.""" return self._headers @property def cookies(self): """Return dict of cookies.""" return self._cookies @property def url(self): """Return yarl of URL.""" return self._url @property def content_type(self): """Return yarl of URL.""" return self._headers.get("content-type") @property def content(self): """Return content.""" return mock_stream(self.response) async def read(self): """Return mock response.""" return self.response async def text(self, encoding="utf-8", errors="strict"): """Return mock response as a string.""" return self.response.decode(encoding, errors=errors) async def json(self, encoding="utf-8", content_type=None, loads=json_loads): """Return mock response as a json.""" return loads(self.response.decode(encoding)) def release(self): """Mock release.""" def raise_for_status(self): """Raise error if status is 400 or higher.""" if self.status >= 400: request_info = mock.Mock(real_url="http://example.com") raise ClientResponseError( request_info=request_info, history=None, status=self.status, headers=self.headers, ) def close(self): """Mock close.""" @contextmanager def mock_aiohttp_client(): """Context manager to mock aiohttp client.""" mocker = AiohttpClientMocker() yield mocker class MockLongPollSideEffect: """Imitate a long_poll request. It should be created and used as a side effect for a GET/PUT/etc. request. Once created, actual responses are queued with queue_response If queue is empty, will await until done. """ def __init__(self): """Initialize the queue.""" self.semaphore = asyncio.Semaphore(0) self.response_list = [] self.stopping = False async def __call__(self, method, url, data): """Fetch the next response from the queue or wait until the queue has items.""" if self.stopping: raise ClientError() await self.semaphore.acquire() kwargs = self.response_list.pop(0) return AiohttpClientMockResponse(method=method, url=url, **kwargs) def queue_response(self, **kwargs): """Add a response to the long_poll queue.""" self.response_list.append(kwargs) self.semaphore.release() def stop(self): """Stop the current request and future ones. This avoids an exception if there is someone waiting when exiting test. """ self.stopping = True self.queue_response(exc=ClientError())